From c33675588117aad0b8fabfa5816676d95ba8067e Mon Sep 17 00:00:00 2001 From: Levente Kurusa Date: Wed, 19 Apr 2023 23:30:52 +0200 Subject: [PATCH 001/320] fix(test/coverage): exclude test files (#18748) Fixes: #18454 --- cli/tests/integration/coverage_tests.rs | 54 +++++++++++++++++++ .../coverage/no_tests_included/expected.out | 1 + .../coverage/no_tests_included/foo.test.js | 6 +++ .../coverage/no_tests_included/foo.test.mts | 6 +++ .../coverage/no_tests_included/foo.test.ts | 6 +++ .../coverage/no_tests_included/foo.ts | 3 ++ cli/tools/coverage/mod.rs | 5 +- cli/tools/test.rs | 2 +- 8 files changed, 81 insertions(+), 2 deletions(-) create mode 100644 cli/tests/testdata/coverage/no_tests_included/expected.out create mode 100644 cli/tests/testdata/coverage/no_tests_included/foo.test.js create mode 100644 cli/tests/testdata/coverage/no_tests_included/foo.test.mts create mode 100644 cli/tests/testdata/coverage/no_tests_included/foo.test.ts create mode 100644 cli/tests/testdata/coverage/no_tests_included/foo.ts diff --git a/cli/tests/integration/coverage_tests.rs b/cli/tests/integration/coverage_tests.rs index 440d6b17ed..79e15d95b2 100644 --- a/cli/tests/integration/coverage_tests.rs +++ b/cli/tests/integration/coverage_tests.rs @@ -26,6 +26,13 @@ fn no_snaps() { no_snaps_included("no_snaps_included", "ts"); } +#[test] +fn no_tests() { + no_tests_included("foo", "mts"); + no_tests_included("foo", "ts"); + no_tests_included("foo", "js"); +} + #[test] fn error_if_invalid_cache() { let context = TestContextBuilder::new().use_temp_cwd().build(); @@ -277,6 +284,53 @@ fn no_snaps_included(test_name: &str, extension: &str) { output.assert_exit_code(0); } +fn no_tests_included(test_name: &str, extension: &str) { + let context = TestContext::default(); + let tempdir = context.deno_dir(); + let tempdir = tempdir.path().join("cov"); + + let output = context + .new_command() + .args_vec(vec![ + "test".to_string(), + "--quiet".to_string(), + "--allow-read".to_string(), + format!("--coverage={}", tempdir.to_str().unwrap()), + format!("coverage/no_tests_included/{test_name}.test.{extension}"), + ]) + .run(); + + output.assert_exit_code(0); + output.skip_output_check(); + + let output = context + .new_command() + .args_vec(vec![ + "coverage".to_string(), + format!("{}/", tempdir.to_str().unwrap()), + ]) + .split_output() + .run(); + + // Verify there's no "Check" being printed + assert!(output.stderr().is_empty()); + + let actual = util::strip_ansi_codes(output.stdout()).to_string(); + + let expected = fs::read_to_string( + util::testdata_path().join("coverage/no_tests_included/expected.out"), + ) + .unwrap(); + + if !util::wildcard_match(&expected, &actual) { + println!("OUTPUT\n{actual}\nOUTPUT"); + println!("EXPECTED\n{expected}\nEXPECTED"); + panic!("pattern match failed"); + } + + output.assert_exit_code(0); +} + #[test] fn no_npm_cache_coverage() { let context = TestContext::default(); diff --git a/cli/tests/testdata/coverage/no_tests_included/expected.out b/cli/tests/testdata/coverage/no_tests_included/expected.out new file mode 100644 index 0000000000..3b2469f2d6 --- /dev/null +++ b/cli/tests/testdata/coverage/no_tests_included/expected.out @@ -0,0 +1 @@ +cover [WILDCARD]/no_tests_included/foo.ts ... 100.000% (3/3) diff --git a/cli/tests/testdata/coverage/no_tests_included/foo.test.js b/cli/tests/testdata/coverage/no_tests_included/foo.test.js new file mode 100644 index 0000000000..06b13d743b --- /dev/null +++ b/cli/tests/testdata/coverage/no_tests_included/foo.test.js @@ -0,0 +1,6 @@ +import { addNumbers } from "./foo.ts"; +import { assertEquals } from "https://deno.land/std@0.183.0/testing/asserts.ts"; + +Deno.test("addNumbers works", () => { + assertEquals(addNumbers(1, 2), 3); +}); diff --git a/cli/tests/testdata/coverage/no_tests_included/foo.test.mts b/cli/tests/testdata/coverage/no_tests_included/foo.test.mts new file mode 100644 index 0000000000..44aa738722 --- /dev/null +++ b/cli/tests/testdata/coverage/no_tests_included/foo.test.mts @@ -0,0 +1,6 @@ +import { addNumbers } from './foo.ts'; +import { assertEquals } from "https://deno.land/std@0.183.0/testing/asserts.ts"; + +Deno.test("addNumbers works", () => { + assertEquals(addNumbers(1, 2), 3); +}); diff --git a/cli/tests/testdata/coverage/no_tests_included/foo.test.ts b/cli/tests/testdata/coverage/no_tests_included/foo.test.ts new file mode 100644 index 0000000000..06b13d743b --- /dev/null +++ b/cli/tests/testdata/coverage/no_tests_included/foo.test.ts @@ -0,0 +1,6 @@ +import { addNumbers } from "./foo.ts"; +import { assertEquals } from "https://deno.land/std@0.183.0/testing/asserts.ts"; + +Deno.test("addNumbers works", () => { + assertEquals(addNumbers(1, 2), 3); +}); diff --git a/cli/tests/testdata/coverage/no_tests_included/foo.ts b/cli/tests/testdata/coverage/no_tests_included/foo.ts new file mode 100644 index 0000000000..fc2860ef0c --- /dev/null +++ b/cli/tests/testdata/coverage/no_tests_included/foo.ts @@ -0,0 +1,3 @@ +export function addNumbers(a: number, b: number): number { + return a + b; +} diff --git a/cli/tools/coverage/mod.rs b/cli/tools/coverage/mod.rs index 0297782436..d3044a7163 100644 --- a/cli/tools/coverage/mod.rs +++ b/cli/tools/coverage/mod.rs @@ -6,6 +6,7 @@ use crate::args::Flags; use crate::colors; use crate::proc_state::ProcState; use crate::tools::fmt::format_json; +use crate::tools::test::is_supported_test_path; use crate::util::fs::FileCollector; use crate::util::text_encoding::source_map_from_code; @@ -27,6 +28,7 @@ use std::io::BufWriter; use std::io::Error; use std::io::Write; use std::io::{self}; +use std::path::Path; use std::path::PathBuf; use text_lines::TextLines; use uuid::Uuid; @@ -602,7 +604,8 @@ fn filter_coverages( || e.url.starts_with(npm_root_dir) || e.url.ends_with("__anonymous__") || e.url.ends_with("$deno$test.js") - || e.url.ends_with(".snap"); + || e.url.ends_with(".snap") + || is_supported_test_path(Path::new(e.url.as_str())); let is_included = include.iter().any(|p| p.is_match(&e.url)); let is_excluded = exclude.iter().any(|p| p.is_match(&e.url)); diff --git a/cli/tools/test.rs b/cli/tools/test.rs index 977073ab73..8533073744 100644 --- a/cli/tools/test.rs +++ b/cli/tools/test.rs @@ -1518,7 +1518,7 @@ async fn test_specifiers( } /// Checks if the path has a basename and extension Deno supports for tests. -fn is_supported_test_path(path: &Path) -> bool { +pub(crate) fn is_supported_test_path(path: &Path) -> bool { if let Some(name) = path.file_stem() { let basename = name.to_string_lossy(); (basename.ends_with("_test") From 10442350c31c94f75855a93a5e78f4c6b5b8e382 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Wed, 19 Apr 2023 23:35:02 +0200 Subject: [PATCH 002/320] refactor(core): remove PhantomData from IdentityHasher (#18770) --- core/realm.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/core/realm.rs b/core/realm.rs index 8e2d932b5b..08a550294d 100644 --- a/core/realm.rs +++ b/core/realm.rs @@ -10,7 +10,6 @@ use std::collections::HashMap; use std::collections::HashSet; use std::hash::BuildHasherDefault; use std::hash::Hasher; -use std::marker::PhantomData; use std::option::Option; use std::rc::Rc; use v8::HandleScope; @@ -19,7 +18,7 @@ use v8::Local; // Hasher used for `unrefed_ops`. Since these are rolling i32, there's no // need to actually hash them. #[derive(Default)] -pub(crate) struct IdentityHasher(u64, PhantomData); +pub(crate) struct IdentityHasher(u64); impl Hasher for IdentityHasher { fn write_i32(&mut self, i: i32) { From c2e9c8cce5ce1a1c4e81f685a552dc4b6955f421 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Wed, 19 Apr 2023 17:50:56 -0400 Subject: [PATCH 003/320] fix(compile): write bytes directly to output file (#18777) 1. Adds cli/standalone folder 2. Writes the bytes directly to the output file. When adding npm packages this might get quite large, so let's not keep the final output in memory just in case. --- cli/main.rs | 6 +- cli/standalone/binary.rs | 307 +++++++++++++++++++++++ cli/{standalone.rs => standalone/mod.rs} | 94 +------ cli/tests/integration/compile_tests.rs | 41 +-- cli/tools/standalone.rs | 236 ++++------------- 5 files changed, 381 insertions(+), 303 deletions(-) create mode 100644 cli/standalone/binary.rs rename cli/{standalone.rs => standalone/mod.rs} (77%) diff --git a/cli/main.rs b/cli/main.rs index 5e088d8911..02ac5891cd 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -42,6 +42,7 @@ use deno_runtime::colors; use deno_runtime::fmt_errors::format_js_error; use deno_runtime::tokio_util::run_local; use std::env; +use std::env::current_exe; use std::path::PathBuf; async fn run_subcommand(flags: Flags) -> Result { @@ -245,8 +246,11 @@ pub fn main() { let args: Vec = env::args().collect(); let future = async move { + let current_exe_path = current_exe()?; let standalone_res = - match standalone::extract_standalone(args.clone()).await { + match standalone::extract_standalone(¤t_exe_path, args.clone()) + .await + { Ok(Some((metadata, eszip))) => standalone::run(eszip, metadata).await, Ok(None) => Ok(()), Err(err) => Err(err), diff --git a/cli/standalone/binary.rs b/cli/standalone/binary.rs new file mode 100644 index 0000000000..bca0aff2b4 --- /dev/null +++ b/cli/standalone/binary.rs @@ -0,0 +1,307 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +use std::io::Read; +use std::io::Seek; +use std::io::SeekFrom; +use std::io::Write; +use std::path::Path; +use std::sync::Arc; + +use deno_ast::ModuleSpecifier; +use deno_core::anyhow::Context; +use deno_core::error::AnyError; +use deno_core::futures::io::AllowStdIo; +use deno_core::futures::AsyncReadExt; +use deno_core::futures::AsyncSeekExt; +use deno_core::serde_json; +use deno_core::url::Url; +use deno_runtime::permissions::PermissionsOptions; +use log::Level; +use serde::Deserialize; +use serde::Serialize; + +use crate::args::CaData; +use crate::args::CliOptions; +use crate::args::CompileFlags; +use crate::cache::DenoDir; +use crate::file_fetcher::FileFetcher; +use crate::http_util::HttpClient; +use crate::util::progress_bar::ProgressBar; +use crate::util::progress_bar::ProgressBarStyle; + +const MAGIC_TRAILER: &[u8; 8] = b"d3n0l4nd"; + +#[derive(Deserialize, Serialize)] +pub struct Metadata { + pub argv: Vec, + pub unstable: bool, + pub seed: Option, + pub permissions: PermissionsOptions, + pub location: Option, + pub v8_flags: Vec, + pub log_level: Option, + pub ca_stores: Option>, + pub ca_data: Option>, + pub unsafely_ignore_certificate_errors: Option>, + pub maybe_import_map: Option<(Url, String)>, + pub entrypoint: ModuleSpecifier, +} + +pub fn write_binary_bytes( + writer: &mut impl Write, + original_bin: Vec, + metadata: &Metadata, + eszip: eszip::EszipV2, +) -> Result<(), AnyError> { + let metadata = serde_json::to_string(metadata)?.as_bytes().to_vec(); + let eszip_archive = eszip.into_bytes(); + + let eszip_pos = original_bin.len(); + let metadata_pos = eszip_pos + eszip_archive.len(); + let mut trailer = MAGIC_TRAILER.to_vec(); + trailer.write_all(&eszip_pos.to_be_bytes())?; + trailer.write_all(&metadata_pos.to_be_bytes())?; + + writer.write_all(&original_bin)?; + writer.write_all(&eszip_archive)?; + writer.write_all(&metadata)?; + writer.write_all(&trailer)?; + + Ok(()) +} + +pub fn is_standalone_binary(exe_path: &Path) -> bool { + let Ok(mut output_file) = std::fs::File::open(exe_path) else { + return false; + }; + if output_file.seek(SeekFrom::End(-24)).is_err() { + // This seek may fail because the file is too small to possibly be + // `deno compile` output. + return false; + } + let mut trailer = [0; 24]; + if output_file.read_exact(&mut trailer).is_err() { + return false; + }; + let (magic_trailer, _) = trailer.split_at(8); + magic_trailer == MAGIC_TRAILER +} + +/// This function will try to run this binary as a standalone binary +/// produced by `deno compile`. It determines if this is a standalone +/// binary by checking for the magic trailer string `d3n0l4nd` at EOF-24 (8 bytes * 3). +/// The magic trailer is followed by: +/// - a u64 pointer to the JS bundle embedded in the binary +/// - a u64 pointer to JSON metadata (serialized flags) embedded in the binary +/// These are dereferenced, and the bundle is executed under the configuration +/// specified by the metadata. If no magic trailer is present, this function +/// exits with `Ok(None)`. +pub async fn extract_standalone( + exe_path: &Path, + cli_args: Vec, +) -> Result, AnyError> { + let file = std::fs::File::open(exe_path)?; + + let mut bufreader = + deno_core::futures::io::BufReader::new(AllowStdIo::new(file)); + + let trailer_pos = bufreader.seek(SeekFrom::End(-24)).await?; + let mut trailer = [0; 24]; + bufreader.read_exact(&mut trailer).await?; + let (magic_trailer, rest) = trailer.split_at(8); + if magic_trailer != MAGIC_TRAILER { + return Ok(None); + } + + let (eszip_archive_pos, rest) = rest.split_at(8); + let metadata_pos = rest; + let eszip_archive_pos = u64_from_bytes(eszip_archive_pos)?; + let metadata_pos = u64_from_bytes(metadata_pos)?; + let metadata_len = trailer_pos - metadata_pos; + + bufreader.seek(SeekFrom::Start(eszip_archive_pos)).await?; + + let (eszip, loader) = eszip::EszipV2::parse(bufreader) + .await + .context("Failed to parse eszip header")?; + + let mut bufreader = loader.await.context("Failed to parse eszip archive")?; + + bufreader.seek(SeekFrom::Start(metadata_pos)).await?; + + let mut metadata = String::new(); + + bufreader + .take(metadata_len) + .read_to_string(&mut metadata) + .await + .context("Failed to read metadata from the current executable")?; + + let mut metadata: Metadata = serde_json::from_str(&metadata).unwrap(); + metadata.argv.append(&mut cli_args[1..].to_vec()); + + Ok(Some((metadata, eszip))) +} + +fn u64_from_bytes(arr: &[u8]) -> Result { + let fixed_arr: &[u8; 8] = arr + .try_into() + .context("Failed to convert the buffer into a fixed-size array")?; + Ok(u64::from_be_bytes(*fixed_arr)) +} + +pub struct DenoCompileBinaryWriter { + file_fetcher: Arc, + client: HttpClient, + deno_dir: DenoDir, +} + +impl DenoCompileBinaryWriter { + pub fn new( + file_fetcher: Arc, + client: HttpClient, + deno_dir: DenoDir, + ) -> Self { + Self { + file_fetcher, + client, + deno_dir, + } + } + + pub async fn write_bin( + &self, + writer: &mut impl Write, + eszip: eszip::EszipV2, + module_specifier: &ModuleSpecifier, + compile_flags: &CompileFlags, + cli_options: &CliOptions, + ) -> Result<(), AnyError> { + // Select base binary based on target + let original_binary = + self.get_base_binary(compile_flags.target.clone()).await?; + + self + .write_standalone_binary( + writer, + original_binary, + eszip, + module_specifier, + cli_options, + compile_flags, + ) + .await + } + + async fn get_base_binary( + &self, + target: Option, + ) -> Result, AnyError> { + if target.is_none() { + let path = std::env::current_exe()?; + return Ok(std::fs::read(path)?); + } + + let target = target.unwrap_or_else(|| env!("TARGET").to_string()); + let binary_name = format!("deno-{target}.zip"); + + let binary_path_suffix = if crate::version::is_canary() { + format!("canary/{}/{}", crate::version::GIT_COMMIT_HASH, binary_name) + } else { + format!("release/v{}/{}", env!("CARGO_PKG_VERSION"), binary_name) + }; + + let download_directory = self.deno_dir.dl_folder_path(); + let binary_path = download_directory.join(&binary_path_suffix); + + if !binary_path.exists() { + self + .download_base_binary(&download_directory, &binary_path_suffix) + .await?; + } + + let archive_data = std::fs::read(binary_path)?; + let temp_dir = tempfile::TempDir::new()?; + let base_binary_path = crate::tools::upgrade::unpack_into_dir( + archive_data, + target.contains("windows"), + &temp_dir, + )?; + let base_binary = std::fs::read(base_binary_path)?; + drop(temp_dir); // delete the temp dir + Ok(base_binary) + } + + async fn download_base_binary( + &self, + output_directory: &Path, + binary_path_suffix: &str, + ) -> Result<(), AnyError> { + let download_url = format!("https://dl.deno.land/{binary_path_suffix}"); + let maybe_bytes = { + let progress_bars = ProgressBar::new(ProgressBarStyle::DownloadBars); + let progress = progress_bars.update(&download_url); + + self + .client + .download_with_progress(download_url, &progress) + .await? + }; + let bytes = match maybe_bytes { + Some(bytes) => bytes, + None => { + log::info!("Download could not be found, aborting"); + std::process::exit(1) + } + }; + + std::fs::create_dir_all(output_directory)?; + let output_path = output_directory.join(binary_path_suffix); + std::fs::create_dir_all(output_path.parent().unwrap())?; + tokio::fs::write(output_path, bytes).await?; + Ok(()) + } + + /// This functions creates a standalone deno binary by appending a bundle + /// and magic trailer to the currently executing binary. + async fn write_standalone_binary( + &self, + writer: &mut impl Write, + original_bin: Vec, + eszip: eszip::EszipV2, + entrypoint: &ModuleSpecifier, + cli_options: &CliOptions, + compile_flags: &CompileFlags, + ) -> Result<(), AnyError> { + let ca_data = match cli_options.ca_data() { + Some(CaData::File(ca_file)) => Some( + std::fs::read(ca_file) + .with_context(|| format!("Reading: {ca_file}"))?, + ), + Some(CaData::Bytes(bytes)) => Some(bytes.clone()), + None => None, + }; + let maybe_import_map = cli_options + .resolve_import_map(&self.file_fetcher) + .await? + .map(|import_map| (import_map.base_url().clone(), import_map.to_json())); + let metadata = Metadata { + argv: compile_flags.args.clone(), + unstable: cli_options.unstable(), + seed: cli_options.seed(), + location: cli_options.location_flag().clone(), + permissions: cli_options.permissions_options(), + v8_flags: cli_options.v8_flags().clone(), + unsafely_ignore_certificate_errors: cli_options + .unsafely_ignore_certificate_errors() + .clone(), + log_level: cli_options.log_level(), + ca_stores: cli_options.ca_stores().clone(), + ca_data, + entrypoint: entrypoint.clone(), + maybe_import_map, + }; + + write_binary_bytes(writer, original_bin, &metadata, eszip) + } +} diff --git a/cli/standalone.rs b/cli/standalone/mod.rs similarity index 77% rename from cli/standalone.rs rename to cli/standalone/mod.rs index 48d71a045c..a2872e9b92 100644 --- a/cli/standalone.rs +++ b/cli/standalone/mod.rs @@ -12,16 +12,9 @@ use crate::CliGraphResolver; use deno_core::anyhow::Context; use deno_core::error::type_error; use deno_core::error::AnyError; -use deno_core::futures::io::AllowStdIo; use deno_core::futures::task::LocalFutureObj; -use deno_core::futures::AsyncReadExt; -use deno_core::futures::AsyncSeekExt; use deno_core::futures::FutureExt; use deno_core::located_script_name; -use deno_core::serde::Deserialize; -use deno_core::serde::Serialize; -use deno_core::serde_json; -use deno_core::url::Url; use deno_core::v8_set_flags; use deno_core::ModuleLoader; use deno_core::ModuleSpecifier; @@ -33,7 +26,6 @@ use deno_runtime::ops::worker_host::CreateWebWorkerCb; use deno_runtime::ops::worker_host::WorkerEventCb; use deno_runtime::permissions::Permissions; use deno_runtime::permissions::PermissionsContainer; -use deno_runtime::permissions::PermissionsOptions; use deno_runtime::web_worker::WebWorker; use deno_runtime::web_worker::WebWorkerOptions; use deno_runtime::worker::MainWorker; @@ -41,93 +33,17 @@ use deno_runtime::worker::WorkerOptions; use deno_runtime::BootstrapOptions; use import_map::parse_from_json; use log::Level; -use std::env::current_exe; -use std::io::SeekFrom; use std::pin::Pin; use std::rc::Rc; use std::sync::Arc; -#[derive(Deserialize, Serialize)] -pub struct Metadata { - pub argv: Vec, - pub unstable: bool, - pub seed: Option, - pub permissions: PermissionsOptions, - pub location: Option, - pub v8_flags: Vec, - pub log_level: Option, - pub ca_stores: Option>, - pub ca_data: Option>, - pub unsafely_ignore_certificate_errors: Option>, - pub maybe_import_map: Option<(Url, String)>, - pub entrypoint: ModuleSpecifier, -} +mod binary; -pub const MAGIC_TRAILER: &[u8; 8] = b"d3n0l4nd"; +pub use binary::extract_standalone; +pub use binary::is_standalone_binary; +pub use binary::DenoCompileBinaryWriter; -/// This function will try to run this binary as a standalone binary -/// produced by `deno compile`. It determines if this is a standalone -/// binary by checking for the magic trailer string `d3n0l4nd` at EOF-24. -/// The magic trailer is followed by: -/// - a u64 pointer to the JS bundle embedded in the binary -/// - a u64 pointer to JSON metadata (serialized flags) embedded in the binary -/// These are dereferenced, and the bundle is executed under the configuration -/// specified by the metadata. If no magic trailer is present, this function -/// exits with `Ok(None)`. -pub async fn extract_standalone( - args: Vec, -) -> Result, AnyError> { - let current_exe_path = current_exe()?; - - let file = std::fs::File::open(current_exe_path)?; - - let mut bufreader = - deno_core::futures::io::BufReader::new(AllowStdIo::new(file)); - - let trailer_pos = bufreader.seek(SeekFrom::End(-24)).await?; - let mut trailer = [0; 24]; - bufreader.read_exact(&mut trailer).await?; - let (magic_trailer, rest) = trailer.split_at(8); - if magic_trailer != MAGIC_TRAILER { - return Ok(None); - } - - let (eszip_archive_pos, rest) = rest.split_at(8); - let metadata_pos = rest; - let eszip_archive_pos = u64_from_bytes(eszip_archive_pos)?; - let metadata_pos = u64_from_bytes(metadata_pos)?; - let metadata_len = trailer_pos - metadata_pos; - - bufreader.seek(SeekFrom::Start(eszip_archive_pos)).await?; - - let (eszip, loader) = eszip::EszipV2::parse(bufreader) - .await - .context("Failed to parse eszip header")?; - - let mut bufreader = loader.await.context("Failed to parse eszip archive")?; - - bufreader.seek(SeekFrom::Start(metadata_pos)).await?; - - let mut metadata = String::new(); - - bufreader - .take(metadata_len) - .read_to_string(&mut metadata) - .await - .context("Failed to read metadata from the current executable")?; - - let mut metadata: Metadata = serde_json::from_str(&metadata).unwrap(); - metadata.argv.append(&mut args[1..].to_vec()); - - Ok(Some((metadata, eszip))) -} - -fn u64_from_bytes(arr: &[u8]) -> Result { - let fixed_arr: &[u8; 8] = arr - .try_into() - .context("Failed to convert the buffer into a fixed-size array")?; - Ok(u64::from_be_bytes(*fixed_arr)) -} +use self::binary::Metadata; #[derive(Clone)] struct EmbeddedModuleLoader { diff --git a/cli/tests/integration/compile_tests.rs b/cli/tests/integration/compile_tests.rs index 957beed30a..7835d7f0d8 100644 --- a/cli/tests/integration/compile_tests.rs +++ b/cli/tests/integration/compile_tests.rs @@ -4,6 +4,7 @@ use std::fs::File; use std::process::Command; use test_util as util; use test_util::TempDir; +use util::assert_contains; #[test] fn compile() { @@ -111,13 +112,13 @@ fn standalone_error() { let stderr = util::strip_ansi_codes(&stderr).to_string(); // On Windows, we cannot assert the file path (because '\'). // Instead we just check for relevant output. - assert!(stderr.contains("error: Uncaught Error: boom!")); - assert!(stderr.contains("throw new Error(\"boom!\");")); - assert!(stderr.contains("\n at boom (file://")); - assert!(stderr.contains("standalone_error.ts:2:11")); - assert!(stderr.contains("at foo (file://")); - assert!(stderr.contains("standalone_error.ts:5:5")); - assert!(stderr.contains("standalone_error.ts:7:1")); + assert_contains!(stderr, "error: Uncaught Error: boom!"); + assert_contains!(stderr, "throw new Error(\"boom!\");"); + assert_contains!(stderr, "\n at boom (file://"); + assert_contains!(stderr, "standalone_error.ts:2:11"); + assert_contains!(stderr, "at foo (file://"); + assert_contains!(stderr, "standalone_error.ts:5:5"); + assert_contains!(stderr, "standalone_error.ts:7:1"); } #[test] @@ -156,10 +157,10 @@ fn standalone_error_module_with_imports() { let stderr = util::strip_ansi_codes(&stderr).to_string(); // On Windows, we cannot assert the file path (because '\'). // Instead we just check for relevant output. - assert!(stderr.contains("error: Uncaught Error: boom!")); - assert!(stderr.contains("throw new Error(\"boom!\");")); - assert!(stderr.contains("\n at file://")); - assert!(stderr.contains("standalone_error_module_with_imports_2.ts:2:7")); + assert_contains!(stderr, "error: Uncaught Error: boom!"); + assert_contains!(stderr, "throw new Error(\"boom!\");"); + assert_contains!(stderr, "\n at file://"); + assert_contains!(stderr, "standalone_error_module_with_imports_2.ts:2:7"); } #[test] @@ -259,7 +260,7 @@ fn compile_with_file_exists_error() { file_path.display(), ); let stderr = String::from_utf8(output.stderr).unwrap(); - assert!(stderr.contains(&expected_stderr)); + assert_contains!(stderr, &expected_stderr); } #[test] @@ -293,7 +294,7 @@ fn compile_with_directory_exists_error() { exe.display() ); let stderr = String::from_utf8(output.stderr).unwrap(); - assert!(stderr.contains(&expected_stderr)); + assert_contains!(stderr, &expected_stderr); } #[test] @@ -327,8 +328,7 @@ fn compile_with_conflict_file_exists_error() { exe.display() ); let stderr = String::from_utf8(output.stderr).unwrap(); - dbg!(&stderr); - assert!(stderr.contains(&expected_stderr)); + assert_contains!(stderr, &expected_stderr); assert!(std::fs::read(&exe) .unwrap() .eq(b"SHOULD NOT BE OVERWRITTEN")); @@ -407,8 +407,10 @@ fn standalone_runtime_flags() { let stdout_str = String::from_utf8(output.stdout).unwrap(); assert_eq!(util::strip_ansi_codes(&stdout_str), "0.147205063401058\n"); let stderr_str = String::from_utf8(output.stderr).unwrap(); - assert!(util::strip_ansi_codes(&stderr_str) - .contains("PermissionDenied: Requires write access")); + assert_contains!( + util::strip_ansi_codes(&stderr_str), + "PermissionDenied: Requires write access" + ); } #[test] @@ -636,9 +638,10 @@ fn check_local_by_default2() { let stdout = String::from_utf8(output.stdout).unwrap(); let stderr = String::from_utf8(output.stderr).unwrap(); assert!(stdout.is_empty()); - assert!(stderr.contains( + assert_contains!( + stderr, r#"error: TS2322 [ERROR]: Type '12' is not assignable to type '"b"'."# - )); + ); } #[test] diff --git a/cli/tools/standalone.rs b/cli/tools/standalone.rs index fab3266ea4..94b1c01703 100644 --- a/cli/tools/standalone.rs +++ b/cli/tools/standalone.rs @@ -1,32 +1,18 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -use crate::args::CaData; use crate::args::CompileFlags; use crate::args::Flags; -use crate::cache::DenoDir; use crate::graph_util::error_for_any_npm_specifier; -use crate::http_util::HttpClient; -use crate::standalone::Metadata; -use crate::standalone::MAGIC_TRAILER; +use crate::standalone::is_standalone_binary; +use crate::standalone::DenoCompileBinaryWriter; use crate::util::path::path_has_trailing_slash; -use crate::util::progress_bar::ProgressBar; -use crate::util::progress_bar::ProgressBarStyle; use crate::ProcState; use deno_core::anyhow::bail; use deno_core::anyhow::Context; use deno_core::error::generic_error; use deno_core::error::AnyError; use deno_core::resolve_url_or_path; -use deno_core::serde_json; -use deno_graph::ModuleSpecifier; use deno_runtime::colors; -use std::env; -use std::fs; -use std::fs::File; -use std::io::Read; -use std::io::Seek; -use std::io::SeekFrom; -use std::io::Write; use std::path::Path; use std::path::PathBuf; use std::sync::Arc; @@ -38,6 +24,11 @@ pub async fn compile( compile_flags: CompileFlags, ) -> Result<(), AnyError> { let ps = ProcState::from_flags(flags).await?; + let binary_writer = DenoCompileBinaryWriter::new( + ps.file_fetcher.clone(), + ps.http_client.clone(), + ps.dir.clone(), + ); let module_specifier = ps.options.resolve_main_module()?; let module_roots = { let mut vec = Vec::with_capacity(compile_flags.include.len() + 1); @@ -47,7 +38,6 @@ pub async fn compile( } vec }; - let deno_dir = &ps.dir; let output_path = resolve_compile_executable_output_path( &compile_flags, @@ -69,164 +59,40 @@ pub async fn compile( let eszip = eszip::EszipV2::from_graph(graph, &parser, Default::default())?; log::info!( - "{} {}", + "{} {} to {}", colors::green("Compile"), - module_specifier.to_string() + module_specifier.to_string(), + output_path.display(), ); + validate_output_path(&output_path)?; - // Select base binary based on target - let original_binary = - get_base_binary(&ps.http_client, deno_dir, compile_flags.target.clone()) - .await?; + let mut file = std::fs::File::create(&output_path)?; + binary_writer + .write_bin( + &mut file, + eszip, + &module_specifier, + &compile_flags, + &ps.options, + ) + .await + .with_context(|| format!("Writing {}", output_path.display()))?; + drop(file); - let final_bin = create_standalone_binary( - original_binary, - eszip, - module_specifier, - &compile_flags, - ps, - ) - .await?; - - log::info!("{} {}", colors::green("Emit"), output_path.display()); - - write_standalone_binary(output_path, final_bin).await?; - Ok(()) -} - -async fn get_base_binary( - client: &HttpClient, - deno_dir: &DenoDir, - target: Option, -) -> Result, AnyError> { - if target.is_none() { - let path = std::env::current_exe()?; - return Ok(tokio::fs::read(path).await?); + // set it as executable + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + let perms = std::fs::Permissions::from_mode(0o777); + std::fs::set_permissions(output_path, perms)?; } - let target = target.unwrap_or_else(|| env!("TARGET").to_string()); - let binary_name = format!("deno-{target}.zip"); - - let binary_path_suffix = if crate::version::is_canary() { - format!("canary/{}/{}", crate::version::GIT_COMMIT_HASH, binary_name) - } else { - format!("release/v{}/{}", env!("CARGO_PKG_VERSION"), binary_name) - }; - - let download_directory = deno_dir.dl_folder_path(); - let binary_path = download_directory.join(&binary_path_suffix); - - if !binary_path.exists() { - download_base_binary(client, &download_directory, &binary_path_suffix) - .await?; - } - - let archive_data = tokio::fs::read(binary_path).await?; - let temp_dir = tempfile::TempDir::new()?; - let base_binary_path = crate::tools::upgrade::unpack_into_dir( - archive_data, - target.contains("windows"), - &temp_dir, - )?; - let base_binary = tokio::fs::read(base_binary_path).await?; - drop(temp_dir); // delete the temp dir - Ok(base_binary) -} - -async fn download_base_binary( - client: &HttpClient, - output_directory: &Path, - binary_path_suffix: &str, -) -> Result<(), AnyError> { - let download_url = format!("https://dl.deno.land/{binary_path_suffix}"); - let maybe_bytes = { - let progress_bars = ProgressBar::new(ProgressBarStyle::DownloadBars); - let progress = progress_bars.update(&download_url); - - client - .download_with_progress(download_url, &progress) - .await? - }; - let bytes = match maybe_bytes { - Some(bytes) => bytes, - None => { - log::info!("Download could not be found, aborting"); - std::process::exit(1) - } - }; - - std::fs::create_dir_all(output_directory)?; - let output_path = output_directory.join(binary_path_suffix); - std::fs::create_dir_all(output_path.parent().unwrap())?; - tokio::fs::write(output_path, bytes).await?; Ok(()) } -/// This functions creates a standalone deno binary by appending a bundle -/// and magic trailer to the currently executing binary. -async fn create_standalone_binary( - mut original_bin: Vec, - eszip: eszip::EszipV2, - entrypoint: ModuleSpecifier, - compile_flags: &CompileFlags, - ps: ProcState, -) -> Result, AnyError> { - let mut eszip_archive = eszip.into_bytes(); - - let ca_data = match ps.options.ca_data() { - Some(CaData::File(ca_file)) => { - Some(fs::read(ca_file).with_context(|| format!("Reading: {ca_file}"))?) - } - Some(CaData::Bytes(bytes)) => Some(bytes.clone()), - None => None, - }; - let maybe_import_map = ps - .options - .resolve_import_map(&ps.file_fetcher) - .await? - .map(|import_map| (import_map.base_url().clone(), import_map.to_json())); - let metadata = Metadata { - argv: compile_flags.args.clone(), - unstable: ps.options.unstable(), - seed: ps.options.seed(), - location: ps.options.location_flag().clone(), - permissions: ps.options.permissions_options(), - v8_flags: ps.options.v8_flags().clone(), - unsafely_ignore_certificate_errors: ps - .options - .unsafely_ignore_certificate_errors() - .clone(), - log_level: ps.options.log_level(), - ca_stores: ps.options.ca_stores().clone(), - ca_data, - entrypoint, - maybe_import_map, - }; - let mut metadata = serde_json::to_string(&metadata)?.as_bytes().to_vec(); - - let eszip_pos = original_bin.len(); - let metadata_pos = eszip_pos + eszip_archive.len(); - let mut trailer = MAGIC_TRAILER.to_vec(); - trailer.write_all(&eszip_pos.to_be_bytes())?; - trailer.write_all(&metadata_pos.to_be_bytes())?; - - let mut final_bin = Vec::with_capacity( - original_bin.len() + eszip_archive.len() + trailer.len(), - ); - final_bin.append(&mut original_bin); - final_bin.append(&mut eszip_archive); - final_bin.append(&mut metadata); - final_bin.append(&mut trailer); - - Ok(final_bin) -} - /// This function writes out a final binary to specified path. If output path /// is not already standalone binary it will return error instead. -async fn write_standalone_binary( - output_path: PathBuf, - final_bin: Vec, -) -> Result<(), AnyError> { +fn validate_output_path(output_path: &Path) -> Result<(), AnyError> { if output_path.exists() { // If the output is a directory, throw error if output_path.is_dir() { @@ -240,19 +106,9 @@ async fn write_standalone_binary( ); } - // Make sure we don't overwrite any file not created by Deno compiler. - // Check for magic trailer in last 24 bytes. - let mut has_trailer = false; - let mut output_file = File::open(&output_path)?; - // This seek may fail because the file is too small to possibly be - // `deno compile` output. - if output_file.seek(SeekFrom::End(-24)).is_ok() { - let mut trailer = [0; 24]; - output_file.read_exact(&mut trailer)?; - let (magic_trailer, _) = trailer.split_at(8); - has_trailer = magic_trailer == MAGIC_TRAILER; - } - if !has_trailer { + // Make sure we don't overwrite any file not created by Deno compiler because + // this filename is chosen automatically in some cases. + if !is_standalone_binary(output_path) { bail!( concat!( "Could not compile to file '{}' because the file already exists ", @@ -265,28 +121,20 @@ async fn write_standalone_binary( // Remove file if it was indeed a deno compiled binary, to avoid corruption // (see https://github.com/denoland/deno/issues/10310) - std::fs::remove_file(&output_path)?; + std::fs::remove_file(output_path)?; } else { let output_base = &output_path.parent().unwrap(); if output_base.exists() && output_base.is_file() { bail!( - concat!( - "Could not compile to file '{}' because its parent directory ", - "is an existing file. You can use the `--output ` flag to ", - "provide an alternative name.", - ), - output_base.display(), - ); + concat!( + "Could not compile to file '{}' because its parent directory ", + "is an existing file. You can use the `--output ` flag to ", + "provide an alternative name.", + ), + output_base.display(), + ); } - tokio::fs::create_dir_all(output_base).await?; - } - - tokio::fs::write(&output_path, final_bin).await?; - #[cfg(unix)] - { - use std::os::unix::fs::PermissionsExt; - let perms = std::fs::Permissions::from_mode(0o777); - tokio::fs::set_permissions(output_path, perms).await?; + std::fs::create_dir_all(output_base)?; } Ok(()) From 02da57e2759a7521d53e28f7fb3fc0a268406b82 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Thu, 20 Apr 2023 00:58:41 +0200 Subject: [PATCH 004/320] refactor(ext/webidl): remove option bags from "makeException" (#18679) Creating these options bags is more costly than passing arguments one-by-one. Especially since `prefix` and `context` are passed to all functions. --- ext/fetch/20_headers.js | 3 +- ext/fetch/23_request.js | 3 +- ext/web/06_streams.js | 96 +++++++++------------------- ext/webidl/00_webidl.js | 132 ++++++++++++++++++++++++++++----------- ext/webidl/internal.d.ts | 10 +-- 5 files changed, 135 insertions(+), 109 deletions(-) diff --git a/ext/fetch/20_headers.js b/ext/fetch/20_headers.js index a96d0da3b8..29b4540f94 100644 --- a/ext/fetch/20_headers.js +++ b/ext/fetch/20_headers.js @@ -431,7 +431,8 @@ webidl.converters["HeadersInit"] = (V, opts) => { throw webidl.makeException( TypeError, "The provided value is not of type '(sequence> or record)'", - opts, + opts.prefix, + opts.context, ); }; webidl.converters["Headers"] = webidl.createInterfaceConverter( diff --git a/ext/fetch/23_request.js b/ext/fetch/23_request.js index 798346ae62..ae3edffd47 100644 --- a/ext/fetch/23_request.js +++ b/ext/fetch/23_request.js @@ -341,7 +341,8 @@ class Request { throw webidl.makeException( TypeError, "`client` must be a Deno.HttpClient", - { prefix, context: "Argument 2" }, + prefix, + "Argument 2", ); } request.clientRid = init.client?.rid ?? null; diff --git a/ext/web/06_streams.js b/ext/web/06_streams.js index c516063658..ac626a209d 100644 --- a/ext/web/06_streams.js +++ b/ext/web/06_streams.js @@ -508,7 +508,7 @@ function extractSizeAlgorithm(strategy) { [chunk], undefined, webidl.converters["unrestricted double"], - { prefix: "Failed to call `sizeAlgorithm`" }, + "Failed to call `sizeAlgorithm`", ); } @@ -3315,10 +3315,7 @@ function setUpReadableByteStreamControllerFromUnderlyingSource( [controller], underlyingSource, webidl.converters.any, - { - prefix: - "Failed to call 'startAlgorithm' on 'ReadableByteStreamController'", - }, + "Failed to call 'startAlgorithm' on 'ReadableByteStreamController'", ); } if (underlyingSourceDict.pull !== undefined) { @@ -3328,11 +3325,8 @@ function setUpReadableByteStreamControllerFromUnderlyingSource( [controller], underlyingSource, webidl.converters["Promise"], - { - prefix: - "Failed to call 'pullAlgorithm' on 'ReadableByteStreamController'", - returnsPromise: true, - }, + "Failed to call 'pullAlgorithm' on 'ReadableByteStreamController'", + true, ); } if (underlyingSourceDict.cancel !== undefined) { @@ -3342,11 +3336,8 @@ function setUpReadableByteStreamControllerFromUnderlyingSource( [reason], underlyingSource, webidl.converters["Promise"], - { - prefix: - "Failed to call 'cancelAlgorithm' on 'ReadableByteStreamController'", - returnsPromise: true, - }, + "Failed to call 'cancelAlgorithm' on 'ReadableByteStreamController'", + true, ); } const autoAllocateChunkSize = underlyingSourceDict["autoAllocateChunkSize"]; @@ -3437,10 +3428,7 @@ function setUpReadableStreamDefaultControllerFromUnderlyingSource( [controller], underlyingSource, webidl.converters.any, - { - prefix: - "Failed to call 'startAlgorithm' on 'ReadableStreamDefaultController'", - }, + "Failed to call 'startAlgorithm' on 'ReadableStreamDefaultController'", ); } if (underlyingSourceDict.pull !== undefined) { @@ -3450,11 +3438,8 @@ function setUpReadableStreamDefaultControllerFromUnderlyingSource( [controller], underlyingSource, webidl.converters["Promise"], - { - prefix: - "Failed to call 'pullAlgorithm' on 'ReadableStreamDefaultController'", - returnsPromise: true, - }, + "Failed to call 'pullAlgorithm' on 'ReadableStreamDefaultController'", + true, ); } if (underlyingSourceDict.cancel !== undefined) { @@ -3464,11 +3449,8 @@ function setUpReadableStreamDefaultControllerFromUnderlyingSource( [reason], underlyingSource, webidl.converters["Promise"], - { - prefix: - "Failed to call 'cancelAlgorithm' on 'ReadableStreamDefaultController'", - returnsPromise: true, - }, + "Failed to call 'cancelAlgorithm' on 'ReadableStreamDefaultController'", + true, ); } setUpReadableStreamDefaultController( @@ -3569,11 +3551,8 @@ function setUpTransformStreamDefaultControllerFromTransformer( [chunk, controller], transformer, webidl.converters["Promise"], - { - prefix: - "Failed to call 'transformAlgorithm' on 'TransformStreamDefaultController'", - returnsPromise: true, - }, + "Failed to call 'transformAlgorithm' on 'TransformStreamDefaultController'", + true, ); } if (transformerDict.flush !== undefined) { @@ -3583,11 +3562,8 @@ function setUpTransformStreamDefaultControllerFromTransformer( [controller], transformer, webidl.converters["Promise"], - { - prefix: - "Failed to call 'flushAlgorithm' on 'TransformStreamDefaultController'", - returnsPromise: true, - }, + "Failed to call 'flushAlgorithm' on 'TransformStreamDefaultController'", + true, ); } setUpTransformStreamDefaultController( @@ -3679,10 +3655,7 @@ function setUpWritableStreamDefaultControllerFromUnderlyingSink( [controller], underlyingSink, webidl.converters.any, - { - prefix: - "Failed to call 'startAlgorithm' on 'WritableStreamDefaultController'", - }, + "Failed to call 'startAlgorithm' on 'WritableStreamDefaultController'", ); } if (underlyingSinkDict.write !== undefined) { @@ -3692,11 +3665,8 @@ function setUpWritableStreamDefaultControllerFromUnderlyingSink( [chunk, controller], underlyingSink, webidl.converters["Promise"], - { - prefix: - "Failed to call 'writeAlgorithm' on 'WritableStreamDefaultController'", - returnsPromise: true, - }, + "Failed to call 'writeAlgorithm' on 'WritableStreamDefaultController'", + true, ); } if (underlyingSinkDict.close !== undefined) { @@ -3706,11 +3676,8 @@ function setUpWritableStreamDefaultControllerFromUnderlyingSink( [], underlyingSink, webidl.converters["Promise"], - { - prefix: - "Failed to call 'closeAlgorithm' on 'WritableStreamDefaultController'", - returnsPromise: true, - }, + "Failed to call 'closeAlgorithm' on 'WritableStreamDefaultController'", + true, ); } if (underlyingSinkDict.abort !== undefined) { @@ -3720,11 +3687,8 @@ function setUpWritableStreamDefaultControllerFromUnderlyingSink( [reason], underlyingSink, webidl.converters["Promise"], - { - prefix: - "Failed to call 'abortAlgorithm' on 'WritableStreamDefaultController'", - returnsPromise: true, - }, + "Failed to call 'abortAlgorithm' on 'WritableStreamDefaultController'", + true, ); } setUpWritableStreamDefaultController( @@ -5467,16 +5431,19 @@ class ReadableByteStreamController { ); } if (byteLength === 0) { - throw webidl.makeException(TypeError, "length must be non-zero", { + throw webidl.makeException( + TypeError, + "length must be non-zero", prefix, - context: arg1, - }); + arg1, + ); } if (getArrayBufferByteLength(buffer) === 0) { throw webidl.makeException( TypeError, "buffer length must be non-zero", - { prefix, context: arg1 }, + prefix, + arg1, ); } if (this[_closeRequested] === true) { @@ -5790,10 +5757,7 @@ class TransformStream { [this[_controller]], transformer, webidl.converters.any, - { - prefix: - "Failed to call 'start' on 'TransformStreamDefaultController'", - }, + "Failed to call 'start' on 'TransformStreamDefaultController'", ), ); } else { diff --git a/ext/webidl/00_webidl.js b/ext/webidl/00_webidl.js index 4398609e52..7788741048 100644 --- a/ext/webidl/00_webidl.js +++ b/ext/webidl/00_webidl.js @@ -86,11 +86,9 @@ const { Uint8ClampedArray, } = primordials; -function makeException(ErrorType, message, opts = {}) { +function makeException(ErrorType, message, prefix, context) { return new ErrorType( - `${opts.prefix ? opts.prefix + ": " : ""}${ - opts.context ? opts.context : "Value" - } ${message}`, + `${prefix ? prefix + ": " : ""}${context ? context : "Value"} ${message}`, ); } @@ -199,7 +197,12 @@ function createIntegerConversion(bitLength, typeOpts) { if (opts.enforceRange) { if (!NumberIsFinite(x)) { - throw makeException(TypeError, "is not a finite number", opts); + throw makeException( + TypeError, + "is not a finite number", + opts.prefix, + opts.context, + ); } x = integerPart(x); @@ -208,7 +211,8 @@ function createIntegerConversion(bitLength, typeOpts) { throw makeException( TypeError, `is outside the accepted range of ${lowerBound} to ${upperBound}, inclusive`, - opts, + opts.prefix, + opts.context, ); } @@ -252,7 +256,12 @@ function createLongLongConversion(bitLength, { unsigned }) { if (opts.enforceRange) { if (!NumberIsFinite(x)) { - throw makeException(TypeError, "is not a finite number", opts); + throw makeException( + TypeError, + "is not a finite number", + opts.prefix, + opts.context, + ); } x = integerPart(x); @@ -261,7 +270,8 @@ function createLongLongConversion(bitLength, { unsigned }) { throw makeException( TypeError, `is outside the accepted range of ${lowerBound} to ${upperBound}, inclusive`, - opts, + opts.prefix, + opts.context, ); } @@ -317,7 +327,8 @@ converters.float = (V, opts) => { throw makeException( TypeError, "is not a finite floating-point value", - opts, + opts.prefix, + opts.context, ); } @@ -331,7 +342,8 @@ converters.float = (V, opts) => { throw makeException( TypeError, "is outside the range of a single-precision floating-point value", - opts, + opts.prefix, + opts.context, ); } @@ -359,7 +371,8 @@ converters.double = (V, opts) => { throw makeException( TypeError, "is not a finite floating-point value", - opts, + opts.prefix, + opts.context, ); } @@ -381,7 +394,8 @@ converters.DOMString = function (V, opts = {}) { throw makeException( TypeError, "is a symbol, which cannot be converted to a string", - opts, + opts.prefix, + opts.context, ); } @@ -393,7 +407,12 @@ const IS_BYTE_STRING = new SafeRegExp(/^[\x00-\xFF]*$/); converters.ByteString = (V, opts) => { const x = converters.DOMString(V, opts); if (!RegExpPrototypeTest(IS_BYTE_STRING, x)) { - throw makeException(TypeError, "is not a valid ByteString", opts); + throw makeException( + TypeError, + "is not a valid ByteString", + opts.prefix, + opts.context, + ); } return x; }; @@ -427,7 +446,12 @@ converters.USVString = (V, opts) => { converters.object = (V, opts) => { if (type(V) !== "Object") { - throw makeException(TypeError, "is not an object", opts); + throw makeException( + TypeError, + "is not an object", + opts.prefix, + opts.context, + ); } return V; @@ -439,7 +463,12 @@ converters.object = (V, opts) => { // handling for that is omitted. function convertCallbackFunction(V, opts) { if (typeof V !== "function") { - throw makeException(TypeError, "is not a function", opts); + throw makeException( + TypeError, + "is not a function", + opts.prefix, + opts.context, + ); } return V; } @@ -464,10 +493,16 @@ converters.ArrayBuffer = (V, opts = {}) => { throw makeException( TypeError, "is not an ArrayBuffer or SharedArrayBuffer", - opts, + opts.prefix, + opts.context, ); } - throw makeException(TypeError, "is not an ArrayBuffer", opts); + throw makeException( + TypeError, + "is not an ArrayBuffer", + opts.prefix, + opts.context, + ); } return V; @@ -475,14 +510,20 @@ converters.ArrayBuffer = (V, opts = {}) => { converters.DataView = (V, opts = {}) => { if (!isDataView(V)) { - throw makeException(TypeError, "is not a DataView", opts); + throw makeException( + TypeError, + "is not a DataView", + opts.prefix, + opts.context, + ); } if (!opts.allowShared && isSharedArrayBuffer(DataViewPrototypeGetBuffer(V))) { throw makeException( TypeError, "is backed by a SharedArrayBuffer, which is not allowed", - opts, + opts.prefix, + opts.context, ); } @@ -511,7 +552,8 @@ ArrayPrototypeForEach( throw makeException( TypeError, `is not ${article} ${name} object`, - opts, + opts.prefix, + opts.context, ); } if ( @@ -521,7 +563,8 @@ ArrayPrototypeForEach( throw makeException( TypeError, "is a view on a SharedArrayBuffer, which is not allowed", - opts, + opts.prefix, + opts.context, ); } @@ -537,7 +580,8 @@ converters.ArrayBufferView = (V, opts = {}) => { throw makeException( TypeError, "is not a view on an ArrayBuffer or SharedArrayBuffer", - opts, + opts.prefix, + opts.context, ); } let buffer; @@ -550,7 +594,8 @@ converters.ArrayBufferView = (V, opts = {}) => { throw makeException( TypeError, "is a view on a SharedArrayBuffer, which is not allowed", - opts, + opts.prefix, + opts.context, ); } @@ -569,7 +614,8 @@ converters.BufferSource = (V, opts = {}) => { throw makeException( TypeError, "is a view on a SharedArrayBuffer, which is not allowed", - opts, + opts.prefix, + opts.context, ); } @@ -580,7 +626,8 @@ converters.BufferSource = (V, opts = {}) => { throw makeException( TypeError, "is not an ArrayBuffer or a view on one", - opts, + opts.prefix, + opts.context, ); } if ( @@ -591,7 +638,8 @@ converters.BufferSource = (V, opts = {}) => { throw makeException( TypeError, "is not an ArrayBuffer, SharedArrayBuffer, or a view on one", - opts, + opts.prefix, + opts.context, ); } @@ -707,7 +755,8 @@ function createDictionaryConverter(name, ...dictionaries) { throw makeException( TypeError, "can not be converted to a dictionary", - opts, + opts.prefix, + opts.context, ); } const esDict = V; @@ -741,7 +790,8 @@ function createDictionaryConverter(name, ...dictionaries) { throw makeException( TypeError, `can not be converted to '${name}' because '${key}' is required in '${name}'.`, - opts, + opts.prefix, + opts.context, ); } } @@ -789,7 +839,8 @@ function createSequenceConverter(converter) { throw makeException( TypeError, "can not be converted to sequence.", - opts, + opts.prefix, + opts.context, ); } const iter = V?.[SymbolIterator]?.(); @@ -797,7 +848,8 @@ function createSequenceConverter(converter) { throw makeException( TypeError, "can not be converted to sequence.", - opts, + opts.prefix, + opts.context, ); } const array = []; @@ -807,7 +859,8 @@ function createSequenceConverter(converter) { throw makeException( TypeError, "can not be converted to sequence.", - opts, + opts.prefix, + opts.context, ); } if (res.done === true) break; @@ -827,7 +880,8 @@ function createRecordConverter(keyConverter, valueConverter) { throw makeException( TypeError, "can not be converted to dictionary.", - opts, + opts.prefix, + opts.context, ); } const result = {}; @@ -870,16 +924,17 @@ function invokeCallbackFunction( args, thisArg, returnValueConverter, - opts, + prefix, + returnsPromise, ) { try { const rv = ReflectApply(callable, thisArg, args); return returnValueConverter(rv, { - prefix: opts.prefix, + prefix, context: "return value", }); } catch (err) { - if (opts.returnsPromise === true) { + if (returnsPromise === true) { return PromiseReject(err); } throw err; @@ -891,7 +946,12 @@ const brand = Symbol("[[webidl.brand]]"); function createInterfaceConverter(name, prototype) { return (V, opts) => { if (!ObjectPrototypeIsPrototypeOf(prototype, V) || V[brand] !== brand) { - throw makeException(TypeError, `is not of type ${name}.`, opts); + throw makeException( + TypeError, + `is not of type ${name}.`, + opts.prefix, + opts.context, + ); } return V; }; diff --git a/ext/webidl/internal.d.ts b/ext/webidl/internal.d.ts index 110031ae73..095e5ab91d 100644 --- a/ext/webidl/internal.d.ts +++ b/ext/webidl/internal.d.ts @@ -5,7 +5,7 @@ /// declare module "ext:deno_webidl/00_webidl.js" { - interface ConverterOpts { + interface ValueConverterOpts { /** * The prefix for error messages created by this converter. * Examples: @@ -13,8 +13,6 @@ declare module "ext:deno_webidl/00_webidl.js" { * - `Failed to execute 'removeEventListener' on 'EventTarget'` */ prefix: string; - } - interface ValueConverterOpts extends ConverterOpts { /** * The context of this value error messages created by this converter. * Examples: @@ -26,7 +24,8 @@ declare module "ext:deno_webidl/00_webidl.js" { function makeException( ErrorType: any, message: string, - opts: ValueConverterOpts, + prefix: string, + context: string, ): any; interface IntConverterOpts extends ValueConverterOpts { /** @@ -261,7 +260,8 @@ declare module "ext:deno_webidl/00_webidl.js" { args: any[], thisArg: any, returnValueConverter: (v: any, opts: ValueConverterOpts) => T, - opts: ConverterOpts & { returnsPromise?: boolean }, + prefix: string, + returnsPromise?: boolean, ): T; /** From f5202840818cade33c65c0ae6c2ac17fb5732229 Mon Sep 17 00:00:00 2001 From: Yoshiya Hinosawa Date: Thu, 20 Apr 2023 13:24:28 +0900 Subject: [PATCH 005/320] refactor(ext/node): remove polyfills/_core.ts (#18766) --- ext/node/lib.rs | 1 - ext/node/polyfills/_core.ts | 83 -------------------- ext/node/polyfills/_next_tick.ts | 3 +- ext/node/polyfills/async_hooks.ts | 3 +- ext/node/polyfills/child_process.ts | 3 +- ext/node/polyfills/internal_binding/types.ts | 2 +- ext/node/polyfills/process.ts | 2 +- 7 files changed, 8 insertions(+), 89 deletions(-) delete mode 100644 ext/node/polyfills/_core.ts diff --git a/ext/node/lib.rs b/ext/node/lib.rs index 64a2e083ab..65db6e45f4 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -269,7 +269,6 @@ deno_core::extension!(deno_node, "00_globals.js", "01_require.js", "02_init.js", - "_core.ts", "_events.mjs", "_fs/_fs_access.ts", "_fs/_fs_appendFile.ts", diff --git a/ext/node/polyfills/_core.ts b/ext/node/polyfills/_core.ts deleted file mode 100644 index af619378f3..0000000000 --- a/ext/node/polyfills/_core.ts +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. - -// This module provides an interface to `Deno.core`. For environments -// that don't have access to `Deno.core` some APIs are polyfilled, while -// some are unavailble and throw on call. -// Note: deno_std shouldn't use Deno.core namespace. We should minimize these -// usages. - -import { TextEncoder } from "ext:deno_web/08_text_encoding.js"; - -// deno-lint-ignore no-explicit-any -let DenoCore: any; - -// deno-lint-ignore no-explicit-any -const { Deno } = globalThis as any; - -// @ts-ignore Deno.core is not defined in types -if (Deno?.[Deno.internal]?.core) { - // @ts-ignore Deno[Deno.internal].core is not defined in types - DenoCore = Deno[Deno.internal].core; -} else if (Deno?.core) { - // @ts-ignore Deno.core is not defined in types - DenoCore = Deno.core; -} else { - DenoCore = {}; -} - -export const core = { - runMicrotasks: DenoCore.runMicrotasks ?? function () { - throw new Error( - "Deno.core.runMicrotasks() is not supported in this environment", - ); - }, - setHasTickScheduled: DenoCore.setHasTickScheduled ?? function () { - throw new Error( - "Deno.core.setHasTickScheduled() is not supported in this environment", - ); - }, - hasTickScheduled: DenoCore.hasTickScheduled ?? function () { - throw new Error( - "Deno.core.hasTickScheduled() is not supported in this environment", - ); - }, - setNextTickCallback: DenoCore.setNextTickCallback ?? undefined, - setMacrotaskCallback: DenoCore.setMacrotaskCallback ?? function () { - throw new Error( - "Deno.core.setNextTickCallback() is not supported in this environment", - ); - }, - evalContext: DenoCore.evalContext ?? - function (_code: string, _filename: string) { - throw new Error( - "Deno.core.evalContext is not supported in this environment", - ); - }, - encode: DenoCore.encode ?? function (chunk: string): Uint8Array { - return new TextEncoder().encode(chunk); - }, - eventLoopHasMoreWork: DenoCore.eventLoopHasMoreWork ?? function (): boolean { - return false; - }, - isProxy: DenoCore.isProxy ?? function (): boolean { - return false; - }, - getPromiseDetails: DenoCore.getPromiseDetails ?? - function (_promise: Promise): [number, unknown] { - throw new Error( - "Deno.core.getPromiseDetails is not supported in this environment", - ); - }, - setPromiseHooks: DenoCore.setPromiseHooks ?? function () { - throw new Error( - "Deno.core.setPromiseHooks is not supported in this environment", - ); - }, - ops: DenoCore.ops ?? { - op_napi_open(_filename: string) { - throw new Error( - "Node API is not supported in this environment", - ); - }, - }, -}; diff --git a/ext/node/polyfills/_next_tick.ts b/ext/node/polyfills/_next_tick.ts index 45e972d6b5..fe1b687421 100644 --- a/ext/node/polyfills/_next_tick.ts +++ b/ext/node/polyfills/_next_tick.ts @@ -1,11 +1,12 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. // Copyright Joyent, Inc. and other Node contributors. -import { core } from "ext:deno_node/_core.ts"; import { validateFunction } from "ext:deno_node/internal/validators.mjs"; import { _exiting } from "ext:deno_node/_process/exiting.ts"; import { FixedQueue } from "ext:deno_node/internal/fixed_queue.ts"; +const { core } = globalThis.__bootstrap; + interface Tock { callback: (...args: Array) => void; args: Array; diff --git a/ext/node/polyfills/async_hooks.ts b/ext/node/polyfills/async_hooks.ts index 8111af73b2..d2c9390009 100644 --- a/ext/node/polyfills/async_hooks.ts +++ b/ext/node/polyfills/async_hooks.ts @@ -5,7 +5,8 @@ // https://github.com/cloudflare/workerd/blob/77fd0ed6ddba184414f0216508fc62b06e716cab/src/workerd/api/node/async-hooks.c++#L9 import { validateFunction } from "ext:deno_node/internal/validators.mjs"; -import { core } from "ext:deno_node/_core.ts"; + +const { core } = globalThis.__bootstrap; function assert(cond: boolean) { if (!cond) throw new Error("Assertion failed"); diff --git a/ext/node/polyfills/child_process.ts b/ext/node/polyfills/child_process.ts index 5ca760ed03..f731a0bf11 100644 --- a/ext/node/polyfills/child_process.ts +++ b/ext/node/polyfills/child_process.ts @@ -2,7 +2,6 @@ // This module implements 'child_process' module of Node.JS API. // ref: https://nodejs.org/api/child_process.html -import { core } from "ext:deno_node/_core.ts"; import { ChildProcess, ChildProcessOptions, @@ -44,6 +43,8 @@ import { kEmptyObject, } from "ext:deno_node/internal/util.mjs"; +const { core } = globalThis.__bootstrap; + const MAX_BUFFER = 1024 * 1024; type ForkOptions = ChildProcessOptions; diff --git a/ext/node/polyfills/internal_binding/types.ts b/ext/node/polyfills/internal_binding/types.ts index 943f5e31dc..d03c342ad9 100644 --- a/ext/node/polyfills/internal_binding/types.ts +++ b/ext/node/polyfills/internal_binding/types.ts @@ -21,7 +21,7 @@ // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. -import { core } from "ext:deno_node/_core.ts"; +const { core } = globalThis.__bootstrap; // https://tc39.es/ecma262/#sec-object.prototype.tostring const _toString = Object.prototype.toString; diff --git a/ext/node/polyfills/process.ts b/ext/node/polyfills/process.ts index eb5a491ae0..5e45fecfda 100644 --- a/ext/node/polyfills/process.ts +++ b/ext/node/polyfills/process.ts @@ -2,7 +2,7 @@ // Copyright Joyent, Inc. and Node.js contributors. All rights reserved. MIT license. const internals = globalThis.__bootstrap.internals; -import { core } from "ext:deno_node/_core.ts"; +const { core } = globalThis.__bootstrap; import { notImplemented, warnNotImplemented } from "ext:deno_node/_utils.ts"; import { EventEmitter } from "ext:deno_node/events.ts"; import { validateString } from "ext:deno_node/internal/validators.mjs"; From 1976504c632c78aaadbf24dc94e8ce5626bce9f1 Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Thu, 20 Apr 2023 21:54:22 +0530 Subject: [PATCH 006/320] refactor(ext/websocket): use fastwebsockets client (#18725) --- Cargo.lock | 9 +- ext/http/lib.rs | 39 +-- ext/websocket/01_websocket.js | 16 +- ext/websocket/02_websocketstream.js | 2 +- ext/websocket/Cargo.toml | 2 +- ext/websocket/lib.rs | 372 +++++++++++++--------------- ext/websocket/server.rs | 194 --------------- 7 files changed, 183 insertions(+), 451 deletions(-) delete mode 100644 ext/websocket/server.rs diff --git a/Cargo.lock b/Cargo.lock index f301625015..29f02d5c42 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1819,11 +1819,16 @@ dependencies = [ [[package]] name = "fastwebsockets" -version = "0.1.3" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d57e99c3fa6d0e1c6aeb84f4c904b26425128215fd318a251d8e785e373d43b6" +checksum = "99a248d92ac4e9048a30d147d7897eaaadd0a5230f11982ab7d6935d7d268902" dependencies = [ + "base64 0.21.0", "cc", + "hyper", + "pin-project", + "rand", + "sha1", "simdutf8", "tokio", "utf-8", diff --git a/ext/http/lib.rs b/ext/http/lib.rs index 289e7bf0f9..43e3c130aa 100644 --- a/ext/http/lib.rs +++ b/ext/http/lib.rs @@ -1129,41 +1129,6 @@ async fn op_http_upgrade_early( Ok(rid) } -struct UpgradedStream(hyper::upgrade::Upgraded); -impl tokio::io::AsyncRead for UpgradedStream { - fn poll_read( - self: Pin<&mut Self>, - cx: &mut Context, - buf: &mut tokio::io::ReadBuf, - ) -> std::task::Poll> { - Pin::new(&mut self.get_mut().0).poll_read(cx, buf) - } -} - -impl tokio::io::AsyncWrite for UpgradedStream { - fn poll_write( - self: Pin<&mut Self>, - cx: &mut Context, - buf: &[u8], - ) -> std::task::Poll> { - Pin::new(&mut self.get_mut().0).poll_write(cx, buf) - } - fn poll_flush( - self: Pin<&mut Self>, - cx: &mut Context, - ) -> std::task::Poll> { - Pin::new(&mut self.get_mut().0).poll_flush(cx) - } - fn poll_shutdown( - self: Pin<&mut Self>, - cx: &mut Context, - ) -> std::task::Poll> { - Pin::new(&mut self.get_mut().0).poll_shutdown(cx) - } -} - -impl deno_websocket::Upgraded for UpgradedStream {} - #[op] async fn op_http_upgrade_websocket( state: Rc>, @@ -1183,9 +1148,7 @@ async fn op_http_upgrade_websocket( }; let transport = hyper::upgrade::on(request).await?; - let ws_rid = - ws_create_server_stream(&state, Box::pin(UpgradedStream(transport))) - .await?; + let ws_rid = ws_create_server_stream(&state, transport).await?; Ok(ws_rid) } diff --git a/ext/websocket/01_websocket.js b/ext/websocket/01_websocket.js index 2c6bf46b27..60378b6758 100644 --- a/ext/websocket/01_websocket.js +++ b/ext/websocket/01_websocket.js @@ -317,9 +317,7 @@ class WebSocket extends EventTarget { this[_bufferedAmount] += byteLength; PromisePrototypeThen( core.opAsync2( - this[_role] === SERVER - ? "op_server_ws_send_binary" - : "op_ws_send_binary", + "op_ws_send_binary", this[_rid], view, ), @@ -357,7 +355,7 @@ class WebSocket extends EventTarget { this[_bufferedAmount] += TypedArrayPrototypeGetByteLength(d); PromisePrototypeThen( core.opAsync2( - this[_role] === SERVER ? "op_server_ws_send_text" : "op_ws_send_text", + "op_ws_send_text", this[_rid], string, ), @@ -416,7 +414,7 @@ class WebSocket extends EventTarget { PromisePrototypeCatch( core.opAsync( - this[_role] === SERVER ? "op_server_ws_close" : "op_ws_close", + "op_ws_close", this[_rid], code, reason, @@ -441,7 +439,7 @@ class WebSocket extends EventTarget { async [_eventLoop]() { while (this[_readyState] !== CLOSED) { const { 0: kind, 1: value } = await core.opAsync2( - this[_role] === SERVER ? "op_server_ws_next_event" : "op_ws_next_event", + "op_ws_next_event", this[_rid], ); @@ -508,7 +506,7 @@ class WebSocket extends EventTarget { if (prevState === OPEN) { try { await core.opAsync( - this[_role] === SERVER ? "op_server_ws_close" : "op_ws_close", + "op_ws_close", this[_rid], code, value, @@ -537,7 +535,7 @@ class WebSocket extends EventTarget { this[_idleTimeoutTimeout] = setTimeout(async () => { if (this[_readyState] === OPEN) { await core.opAsync( - this[_role] === SERVER ? "op_server_ws_send" : "op_ws_send", + "op_ws_send", this[_rid], { kind: "ping", @@ -548,7 +546,7 @@ class WebSocket extends EventTarget { this[_readyState] = CLOSING; const reason = "No response from ping frame."; await core.opAsync( - this[_role] === SERVER ? "op_server_ws_close" : "op_ws_close", + "op_ws_close", this[_rid], 1001, reason, diff --git a/ext/websocket/02_websocketstream.js b/ext/websocket/02_websocketstream.js index 0ee7a70aa0..0d01e62eea 100644 --- a/ext/websocket/02_websocketstream.js +++ b/ext/websocket/02_websocketstream.js @@ -176,7 +176,7 @@ class WebSocketStream { create.rid, ); - if (kind > 6) { + if (kind > 5) { /* close */ break; } diff --git a/ext/websocket/Cargo.toml b/ext/websocket/Cargo.toml index 2f5ed95b30..03cb3076a6 100644 --- a/ext/websocket/Cargo.toml +++ b/ext/websocket/Cargo.toml @@ -16,7 +16,7 @@ path = "lib.rs" [dependencies] deno_core.workspace = true deno_tls.workspace = true -fastwebsockets = "0.1.3" +fastwebsockets = { version = "0.2.1", features = ["upgrade"] } http.workspace = true hyper.workspace = true serde.workspace = true diff --git a/ext/websocket/lib.rs b/ext/websocket/lib.rs index 71f176070a..f63191a8ef 100644 --- a/ext/websocket/lib.rs +++ b/ext/websocket/lib.rs @@ -3,10 +3,6 @@ use deno_core::error::invalid_hostname; use deno_core::error::type_error; use deno_core::error::AnyError; -use deno_core::futures::stream::SplitSink; -use deno_core::futures::stream::SplitStream; -use deno_core::futures::SinkExt; -use deno_core::futures::StreamExt; use deno_core::op; use deno_core::StringOrBuffer; @@ -21,42 +17,41 @@ use deno_core::Resource; use deno_core::ResourceId; use deno_core::ZeroCopyBuf; use deno_tls::create_client_config; +use http::header::CONNECTION; +use http::header::UPGRADE; use http::HeaderName; use http::HeaderValue; use http::Method; use http::Request; use http::Uri; +use hyper::upgrade::Upgraded; +use hyper::Body; +use hyper::Response; use serde::Deserialize; use serde::Serialize; use std::borrow::Cow; +use std::cell::Cell; use std::cell::RefCell; use std::convert::TryFrom; use std::fmt; use std::path::PathBuf; -use std::pin::Pin; use std::rc::Rc; use std::sync::Arc; -use tokio::io::AsyncRead; -use tokio::io::AsyncWrite; use tokio::net::TcpStream; use tokio_rustls::rustls::RootCertStore; use tokio_rustls::rustls::ServerName; use tokio_rustls::TlsConnector; -use tokio_tungstenite::client_async_with_config; -use tokio_tungstenite::tungstenite::handshake::client::Response; -use tokio_tungstenite::tungstenite::protocol::frame::coding::CloseCode; -use tokio_tungstenite::tungstenite::protocol::CloseFrame; -use tokio_tungstenite::tungstenite::protocol::Message; -use tokio_tungstenite::tungstenite::protocol::WebSocketConfig; use tokio_tungstenite::MaybeTlsStream; -use tokio_tungstenite::WebSocketStream; + +use fastwebsockets::CloseCode; +use fastwebsockets::FragmentCollector; +use fastwebsockets::Frame; +use fastwebsockets::OpCode; +use fastwebsockets::Role; +use fastwebsockets::WebSocket; pub use tokio_tungstenite; // Re-export tokio_tungstenite -mod server; - -pub use server::ws_create_server_stream; - #[derive(Clone)] pub struct WsRootStore(pub Option); #[derive(Clone)] @@ -76,100 +71,6 @@ pub trait WebSocketPermissions { /// would override previously used alias. pub struct UnsafelyIgnoreCertificateErrors(Option>); -type ClientWsStream = WebSocketStream>; -type ServerWsStream = WebSocketStream>>; - -pub enum WebSocketStreamType { - Client { - tx: AsyncRefCell>, - rx: AsyncRefCell>, - }, - Server { - tx: AsyncRefCell>, - rx: AsyncRefCell>, - }, -} - -pub trait Upgraded: AsyncRead + AsyncWrite + Unpin {} - -pub struct WsStreamResource { - pub stream: WebSocketStreamType, - // When a `WsStreamResource` resource is closed, all pending 'read' ops are - // canceled, while 'write' ops are allowed to complete. Therefore only - // 'read' futures are attached to this cancel handle. - pub cancel: CancelHandle, -} - -impl WsStreamResource { - async fn send(self: &Rc, message: Message) -> Result<(), AnyError> { - use tokio_tungstenite::tungstenite::Error; - let res = match self.stream { - WebSocketStreamType::Client { .. } => { - let mut tx = RcRef::map(self, |r| match &r.stream { - WebSocketStreamType::Client { tx, .. } => tx, - WebSocketStreamType::Server { .. } => unreachable!(), - }) - .borrow_mut() - .await; - tx.send(message).await - } - WebSocketStreamType::Server { .. } => { - let mut tx = RcRef::map(self, |r| match &r.stream { - WebSocketStreamType::Client { .. } => unreachable!(), - WebSocketStreamType::Server { tx, .. } => tx, - }) - .borrow_mut() - .await; - tx.send(message).await - } - }; - - match res { - Ok(()) => Ok(()), - Err(Error::ConnectionClosed) => Ok(()), - Err(tokio_tungstenite::tungstenite::Error::Protocol( - tokio_tungstenite::tungstenite::error::ProtocolError::SendAfterClosing, - )) => Ok(()), - Err(err) => Err(err.into()), - } - } - - async fn next_message( - self: &Rc, - cancel: RcRef, - ) -> Result< - Option>, - AnyError, - > { - match &self.stream { - WebSocketStreamType::Client { .. } => { - let mut rx = RcRef::map(self, |r| match &r.stream { - WebSocketStreamType::Client { rx, .. } => rx, - WebSocketStreamType::Server { .. } => unreachable!(), - }) - .borrow_mut() - .await; - rx.next().or_cancel(cancel).await.map_err(AnyError::from) - } - WebSocketStreamType::Server { .. } => { - let mut rx = RcRef::map(self, |r| match &r.stream { - WebSocketStreamType::Client { .. } => unreachable!(), - WebSocketStreamType::Server { rx, .. } => rx, - }) - .borrow_mut() - .await; - rx.next().or_cancel(cancel).await.map_err(AnyError::from) - } - } - } -} - -impl Resource for WsStreamResource { - fn name(&self) -> Cow { - "webSocketStream".into() - } -} - pub struct WsCancelResource(Rc); impl Resource for WsCancelResource { @@ -182,6 +83,15 @@ impl Resource for WsCancelResource { } } +#[derive(Deserialize)] +#[serde(tag = "kind", content = "value", rename_all = "camelCase")] +pub enum SendValue { + Text(String), + Binary(ZeroCopyBuf), + Pong, + Ping, +} + // This op is needed because creating a WS instance in JavaScript is a sync // operation and should throw error when permissions are not fulfilled, // but actual op that connects WS is async. @@ -257,7 +167,21 @@ where let uri: Uri = url.parse()?; let mut request = Request::builder().method(Method::GET).uri(&uri); - request = request.header("User-Agent", user_agent); + let authority = uri.authority().unwrap().as_str(); + let host = authority + .find('@') + .map(|idx| authority.split_at(idx + 1).1) + .unwrap_or_else(|| authority); + request = request + .header("User-Agent", user_agent) + .header("Host", host) + .header(UPGRADE, "websocket") + .header(CONNECTION, "upgrade") + .header( + "Sec-WebSocket-Key", + fastwebsockets::handshake::generate_key(), + ) + .header("Sec-WebSocket-Version", "13"); if !protocols.is_empty() { request = request.header("Sec-WebSocket-Protocol", protocols); @@ -287,7 +211,7 @@ where } } - let request = request.body(())?; + let request = request.body(Body::empty())?; let domain = &uri.host().unwrap().to_string(); let port = &uri.port_u16().unwrap_or(match uri.scheme_str() { Some("wss") => 443, @@ -315,16 +239,9 @@ where _ => unreachable!(), }; - let client = client_async_with_config( - request, - socket, - Some(WebSocketConfig { - max_message_size: Some(128 << 20), - max_frame_size: Some(32 << 20), - ..Default::default() - }), - ); - let (stream, response): (ClientWsStream, Response) = + let client = fastwebsockets::handshake::client(request, socket); + + let (stream, response): (WebSocket, Response) = if let Some(cancel_resource) = cancel_resource { client.or_cancel(cancel_resource.0.to_owned()).await? } else { @@ -340,13 +257,9 @@ where state.borrow_mut().resource_table.close(cancel_rid).ok(); } - let (ws_tx, ws_rx) = stream.split(); - let resource = WsStreamResource { - stream: WebSocketStreamType::Client { - rx: AsyncRefCell::new(ws_rx), - tx: AsyncRefCell::new(ws_tx), - }, - cancel: Default::default(), + let resource = ServerWebSocket { + ws: AsyncRefCell::new(FragmentCollector::new(stream)), + closed: Rc::new(Cell::new(false)), }; let mut state = state.borrow_mut(); let rid = state.resource_table.add(resource); @@ -368,13 +281,60 @@ where }) } -#[derive(Deserialize)] -#[serde(tag = "kind", content = "value", rename_all = "camelCase")] -pub enum SendValue { - Text(String), - Binary(ZeroCopyBuf), - Pong, - Ping, +#[repr(u16)] +pub enum MessageKind { + Text = 0, + Binary = 1, + Pong = 2, + Ping = 3, + Error = 5, + Closed = 6, +} + +pub struct ServerWebSocket { + ws: AsyncRefCell>, + closed: Rc>, +} + +impl ServerWebSocket { + #[inline] + pub async fn write_frame( + self: Rc, + frame: Frame, + ) -> Result<(), AnyError> { + // SAFETY: fastwebsockets only needs a mutable reference to the WebSocket + // to populate the write buffer. We encounter an await point when writing + // to the socket after the frame has already been written to the buffer. + let ws = unsafe { &mut *self.ws.as_ptr() }; + ws.write_frame(frame) + .await + .map_err(|err| type_error(err.to_string()))?; + Ok(()) + } +} + +impl Resource for ServerWebSocket { + fn name(&self) -> Cow { + "serverWebSocket".into() + } +} +pub async fn ws_create_server_stream( + state: &Rc>, + transport: Upgraded, +) -> Result { + let mut ws = WebSocket::after_handshake(transport, Role::Server); + ws.set_writev(true); + ws.set_auto_close(true); + ws.set_auto_pong(true); + + let ws_resource = ServerWebSocket { + ws: AsyncRefCell::new(FragmentCollector::new(ws)), + closed: Rc::new(Cell::new(false)), + }; + + let resource_table = &mut state.borrow_mut().resource_table; + let rid = resource_table.add(ws_resource); + Ok(rid) } #[op] @@ -386,9 +346,10 @@ pub async fn op_ws_send_binary( let resource = state .borrow_mut() .resource_table - .get::(rid)?; - resource.send(Message::Binary(data.to_vec())).await?; - Ok(()) + .get::(rid)?; + resource + .write_frame(Frame::new(true, OpCode::Binary, None, data.to_vec())) + .await } #[op] @@ -400,9 +361,10 @@ pub async fn op_ws_send_text( let resource = state .borrow_mut() .resource_table - .get::(rid)?; - resource.send(Message::Text(data)).await?; - Ok(()) + .get::(rid)?; + resource + .write_frame(Frame::new(true, OpCode::Text, None, data.into_bytes())) + .await } #[op] @@ -412,18 +374,21 @@ pub async fn op_ws_send( value: SendValue, ) -> Result<(), AnyError> { let msg = match value { - SendValue::Text(text) => Message::Text(text), - SendValue::Binary(buf) => Message::Binary(buf.to_vec()), - SendValue::Pong => Message::Pong(vec![]), - SendValue::Ping => Message::Ping(vec![]), + SendValue::Text(text) => { + Frame::new(true, OpCode::Text, None, text.into_bytes()) + } + SendValue::Binary(buf) => { + Frame::new(true, OpCode::Binary, None, buf.to_vec()) + } + SendValue::Pong => Frame::new(true, OpCode::Pong, None, vec![]), + SendValue::Ping => Frame::new(true, OpCode::Ping, None, vec![]), }; let resource = state .borrow_mut() .resource_table - .get::(rid)?; - resource.send(msg).await?; - Ok(()) + .get::(rid)?; + resource.write_frame(msg).await } #[op(deferred)] @@ -433,34 +398,21 @@ pub async fn op_ws_close( code: Option, reason: Option, ) -> Result<(), AnyError> { - let rid = rid; - let msg = Message::Close(code.map(|c| CloseFrame { - code: CloseCode::from(c), - reason: match reason { - Some(reason) => Cow::from(reason), - None => Default::default(), - }, - })); - let resource = state .borrow_mut() .resource_table - .get::(rid)?; - resource.send(msg).await?; + .get::(rid)?; + let frame = reason + .map(|reason| Frame::close(code.unwrap_or(1005), reason.as_bytes())) + .unwrap_or_else(|| Frame::close_raw(vec![])); + + let cell = Rc::clone(&resource.closed); + cell.set(true); + resource.write_frame(frame).await?; Ok(()) } -#[repr(u16)] -pub enum MessageKind { - Text = 0, - Binary = 1, - Pong = 2, - Ping = 3, - Error = 5, - Closed = 6, -} - -#[op] +#[op(deferred)] pub async fn op_ws_next_event( state: Rc>, rid: ResourceId, @@ -468,45 +420,58 @@ pub async fn op_ws_next_event( let resource = state .borrow_mut() .resource_table - .get::(rid)?; + .get::(rid)?; - let cancel = RcRef::map(&resource, |r| &r.cancel); - let val = resource.next_message(cancel).await?; - let res = match val { - Some(Ok(Message::Text(text))) => { - (MessageKind::Text as u16, StringOrBuffer::String(text)) + let mut ws = RcRef::map(&resource, |r| &r.ws).borrow_mut().await; + let val = match ws.read_frame().await { + Ok(val) => val, + Err(err) => { + // No message was received, socket closed while we waited. + // Try close the stream, ignoring any errors, and report closed status to JavaScript. + if resource.closed.get() { + let _ = state.borrow_mut().resource_table.close(rid); + return Ok(( + MessageKind::Closed as u16, + StringOrBuffer::Buffer(vec![].into()), + )); + } + + return Ok(( + MessageKind::Error as u16, + StringOrBuffer::String(err.to_string()), + )); } - Some(Ok(Message::Binary(data))) => ( + }; + + let res = match val.opcode { + OpCode::Text => ( + MessageKind::Text as u16, + StringOrBuffer::String(String::from_utf8(val.payload).unwrap()), + ), + OpCode::Binary => ( MessageKind::Binary as u16, - StringOrBuffer::Buffer(data.into()), + StringOrBuffer::Buffer(val.payload.into()), ), - Some(Ok(Message::Close(Some(frame)))) => ( - frame.code.into(), - StringOrBuffer::String(frame.reason.to_string()), - ), - Some(Ok(Message::Close(None))) => { - (1005, StringOrBuffer::String("".to_string())) + OpCode::Close => { + if val.payload.len() < 2 { + return Ok((1005, StringOrBuffer::String("".to_string()))); + } + + let close_code = + CloseCode::from(u16::from_be_bytes([val.payload[0], val.payload[1]])); + let reason = String::from_utf8(val.payload[2..].to_vec()).unwrap(); + (close_code.into(), StringOrBuffer::String(reason)) } - Some(Ok(Message::Ping(_))) => ( + OpCode::Ping => ( MessageKind::Ping as u16, StringOrBuffer::Buffer(vec![].into()), ), - Some(Ok(Message::Pong(_))) => ( + OpCode::Pong => ( MessageKind::Pong as u16, StringOrBuffer::Buffer(vec![].into()), ), - Some(Err(e)) => ( - MessageKind::Error as u16, - StringOrBuffer::String(e.to_string()), - ), - None => { - // No message was received, presumably the socket closed while we waited. - // Try close the stream, ignoring any errors, and report closed status to JavaScript. - let _ = state.borrow_mut().resource_table.close(rid); - ( - MessageKind::Closed as u16, - StringOrBuffer::Buffer(vec![].into()), - ) + OpCode::Continuation => { + return Err(type_error("Unexpected continuation frame")) } }; Ok(res) @@ -523,11 +488,6 @@ deno_core::extension!(deno_websocket, op_ws_next_event, op_ws_send_binary, op_ws_send_text, - server::op_server_ws_send, - server::op_server_ws_close, - server::op_server_ws_next_event, - server::op_server_ws_send_binary, - server::op_server_ws_send_text, ], esm = [ "01_websocket.js", "02_websocketstream.js" ], options = { diff --git a/ext/websocket/server.rs b/ext/websocket/server.rs deleted file mode 100644 index 44bc07e59b..0000000000 --- a/ext/websocket/server.rs +++ /dev/null @@ -1,194 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. - -use crate::MessageKind; -use crate::SendValue; -use crate::Upgraded; -use deno_core::error::type_error; -use deno_core::error::AnyError; -use deno_core::op; -use deno_core::AsyncRefCell; -use deno_core::OpState; -use deno_core::RcRef; -use deno_core::Resource; -use deno_core::ResourceId; -use deno_core::StringOrBuffer; -use deno_core::ZeroCopyBuf; -use std::borrow::Cow; -use std::cell::RefCell; -use std::pin::Pin; -use std::rc::Rc; - -use fastwebsockets::CloseCode; -use fastwebsockets::FragmentCollector; -use fastwebsockets::Frame; -use fastwebsockets::OpCode; -use fastwebsockets::WebSocket; - -pub struct ServerWebSocket { - ws: AsyncRefCell>>>, -} - -impl ServerWebSocket { - #[inline] - pub async fn write_frame( - self: Rc, - frame: Frame, - ) -> Result<(), AnyError> { - // SAFETY: fastwebsockets only needs a mutable reference to the WebSocket - // to populate the write buffer. We encounter an await point when writing - // to the socket after the frame has already been written to the buffer. - let ws = unsafe { &mut *self.ws.as_ptr() }; - ws.write_frame(frame) - .await - .map_err(|err| type_error(err.to_string()))?; - Ok(()) - } -} - -impl Resource for ServerWebSocket { - fn name(&self) -> Cow { - "serverWebSocket".into() - } -} -pub async fn ws_create_server_stream( - state: &Rc>, - transport: Pin>, -) -> Result { - let mut ws = WebSocket::after_handshake(transport); - ws.set_writev(false); - ws.set_auto_close(true); - ws.set_auto_pong(true); - - let ws_resource = ServerWebSocket { - ws: AsyncRefCell::new(FragmentCollector::new(ws)), - }; - - let resource_table = &mut state.borrow_mut().resource_table; - let rid = resource_table.add(ws_resource); - Ok(rid) -} - -#[op] -pub async fn op_server_ws_send_binary( - state: Rc>, - rid: ResourceId, - data: ZeroCopyBuf, -) -> Result<(), AnyError> { - let resource = state - .borrow_mut() - .resource_table - .get::(rid)?; - resource - .write_frame(Frame::new(true, OpCode::Binary, None, data.to_vec())) - .await -} - -#[op] -pub async fn op_server_ws_send_text( - state: Rc>, - rid: ResourceId, - data: String, -) -> Result<(), AnyError> { - let resource = state - .borrow_mut() - .resource_table - .get::(rid)?; - resource - .write_frame(Frame::new(true, OpCode::Text, None, data.into_bytes())) - .await -} - -#[op] -pub async fn op_server_ws_send( - state: Rc>, - rid: ResourceId, - value: SendValue, -) -> Result<(), AnyError> { - let msg = match value { - SendValue::Text(text) => { - Frame::new(true, OpCode::Text, None, text.into_bytes()) - } - SendValue::Binary(buf) => { - Frame::new(true, OpCode::Binary, None, buf.to_vec()) - } - SendValue::Pong => Frame::new(true, OpCode::Pong, None, vec![]), - SendValue::Ping => Frame::new(true, OpCode::Ping, None, vec![]), - }; - - let resource = state - .borrow_mut() - .resource_table - .get::(rid)?; - resource.write_frame(msg).await -} - -#[op(deferred)] -pub async fn op_server_ws_close( - state: Rc>, - rid: ResourceId, - code: Option, - reason: Option, -) -> Result<(), AnyError> { - let resource = state - .borrow_mut() - .resource_table - .get::(rid)?; - let frame = reason - .map(|reason| Frame::close(code.unwrap_or(1005), reason.as_bytes())) - .unwrap_or_else(|| Frame::close_raw(vec![])); - resource.write_frame(frame).await -} - -#[op(deferred)] -pub async fn op_server_ws_next_event( - state: Rc>, - rid: ResourceId, -) -> Result<(u16, StringOrBuffer), AnyError> { - let resource = state - .borrow_mut() - .resource_table - .get::(rid)?; - let mut ws = RcRef::map(&resource, |r| &r.ws).borrow_mut().await; - let val = match ws.read_frame().await { - Ok(val) => val, - Err(err) => { - return Ok(( - MessageKind::Error as u16, - StringOrBuffer::String(err.to_string()), - )) - } - }; - - let res = match val.opcode { - OpCode::Text => ( - MessageKind::Text as u16, - StringOrBuffer::String(String::from_utf8(val.payload).unwrap()), - ), - OpCode::Binary => ( - MessageKind::Binary as u16, - StringOrBuffer::Buffer(val.payload.into()), - ), - OpCode::Close => { - if val.payload.len() < 2 { - return Ok((1005, StringOrBuffer::String("".to_string()))); - } - - let close_code = - CloseCode::from(u16::from_be_bytes([val.payload[0], val.payload[1]])); - let reason = String::from_utf8(val.payload[2..].to_vec()).unwrap(); - (close_code.into(), StringOrBuffer::String(reason)) - } - OpCode::Ping => ( - MessageKind::Ping as u16, - StringOrBuffer::Buffer(vec![].into()), - ), - OpCode::Pong => ( - MessageKind::Pong as u16, - StringOrBuffer::Buffer(vec![].into()), - ), - OpCode::Continuation => { - return Err(type_error("Unexpected continuation frame")) - } - }; - Ok(res) -} From 4e944dea1d6ad6cc819cbef278b59923eeaa2287 Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Fri, 21 Apr 2023 12:25:02 +0530 Subject: [PATCH 007/320] fix(ext/websocket): upgrade fastwebsockets to 0.2.4 (#18791) Fixes https://github.com/denoland/deno/issues/18775 --- Cargo.lock | 4 +-- cli/tests/unit/websocket_test.ts | 51 +++++++++++++++++++++++++++++++- ext/websocket/Cargo.toml | 2 +- ext/websocket/lib.rs | 18 ++++++++++- 4 files changed, 70 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 29f02d5c42..cb5bc2f9d7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1819,9 +1819,9 @@ dependencies = [ [[package]] name = "fastwebsockets" -version = "0.2.1" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99a248d92ac4e9048a30d147d7897eaaadd0a5230f11982ab7d6935d7d268902" +checksum = "fcf2f933f24f45831bd66580a8f9394e440f1f5a23806cf0d4d8b6649e1a01e9" dependencies = [ "base64 0.21.0", "cc", diff --git a/cli/tests/unit/websocket_test.ts b/cli/tests/unit/websocket_test.ts index 948e2add23..997d8f0df6 100644 --- a/cli/tests/unit/websocket_test.ts +++ b/cli/tests/unit/websocket_test.ts @@ -1,5 +1,11 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -import { assertEquals, assertThrows, deferred, fail } from "./test_util.ts"; +import { + assert, + assertEquals, + assertThrows, + deferred, + fail, +} from "./test_util.ts"; Deno.test({ permissions: "none" }, function websocketPermissionless() { assertThrows( @@ -82,3 +88,46 @@ Deno.test( ws.close(); }, ); + +// https://github.com/denoland/deno/issues/18775 +Deno.test({ + sanitizeOps: false, + sanitizeResources: false, +}, async function websocketDoubleClose() { + const promise = deferred(); + + const ac = new AbortController(); + const listeningPromise = deferred(); + + const server = Deno.serve({ + handler: (req) => { + const { response, socket } = Deno.upgradeWebSocket(req); + let called = false; + socket.onopen = () => socket.send("Hello"); + socket.onmessage = () => { + assert(!called); + called = true; + socket.send("bye"); + socket.close(); + }; + socket.onclose = () => ac.abort(); + socket.onerror = () => fail(); + return response; + }, + signal: ac.signal, + onListen: () => listeningPromise.resolve(), + hostname: "localhost", + port: 4247, + }); + + await listeningPromise; + + const ws = new WebSocket("ws://localhost:4247/"); + assertEquals(ws.url, "ws://localhost:4247/"); + ws.onerror = () => fail(); + ws.onmessage = () => ws.send("bye"); + ws.onclose = () => { + promise.resolve(); + }; + await Promise.all([promise, server]); +}); diff --git a/ext/websocket/Cargo.toml b/ext/websocket/Cargo.toml index 03cb3076a6..a96b6cceb9 100644 --- a/ext/websocket/Cargo.toml +++ b/ext/websocket/Cargo.toml @@ -16,7 +16,7 @@ path = "lib.rs" [dependencies] deno_core.workspace = true deno_tls.workspace = true -fastwebsockets = { version = "0.2.1", features = ["upgrade"] } +fastwebsockets = { version = "0.2.4", features = ["upgrade"] } http.workspace = true hyper.workspace = true serde.workspace = true diff --git a/ext/websocket/lib.rs b/ext/websocket/lib.rs index f63191a8ef..798856bc14 100644 --- a/ext/websocket/lib.rs +++ b/ext/websocket/lib.rs @@ -34,6 +34,7 @@ use std::cell::Cell; use std::cell::RefCell; use std::convert::TryFrom; use std::fmt; +use std::future::Future; use std::path::PathBuf; use std::rc::Rc; use std::sync::Arc; @@ -239,7 +240,8 @@ where _ => unreachable!(), }; - let client = fastwebsockets::handshake::client(request, socket); + let client = + fastwebsockets::handshake::client(&LocalExecutor, request, socket); let (stream, response): (WebSocket, Response) = if let Some(cancel_resource) = cancel_resource { @@ -533,3 +535,17 @@ pub fn get_network_error_class_name(e: &AnyError) -> Option<&'static str> { e.downcast_ref::() .map(|_| "DOMExceptionNetworkError") } + +// Needed so hyper can use non Send futures +#[derive(Clone)] +struct LocalExecutor; + +impl hyper::rt::Executor for LocalExecutor +where + Fut: Future + 'static, + Fut::Output: 'static, +{ + fn execute(&self, fut: Fut) { + tokio::task::spawn_local(fut); + } +} From 1d447cb7c3295941be85a05b455f45d89e119667 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Fri, 21 Apr 2023 15:22:45 +0200 Subject: [PATCH 008/320] refactor(core): remove ops from Deno.core.ops that are disabled (#18793) This commit changes how "disabled" ops behave. Instead of using "void" functions under the hood, they now explicitly throw errors saying that a given op doesn't exist. --- core/bindings.rs | 46 ++++++++++++++++++++++++++++++++----------- core/runtime.rs | 23 ++++++---------------- runtime/ops/os/mod.rs | 5 ++++- 3 files changed, 44 insertions(+), 30 deletions(-) diff --git a/core/bindings.rs b/core/bindings.rs index 5650b78f36..95e78b6cd3 100644 --- a/core/bindings.rs +++ b/core/bindings.rs @@ -160,30 +160,41 @@ pub(crate) fn initialize_context<'s>( if matches!(snapshot_options, SnapshotOptions::Load) { // Only register ops that have `force_registration` flag set to true, - // the remaining ones should already be in the snapshot. - for op_ctx in op_ctxs - .iter() - .filter(|op_ctx| op_ctx.decl.force_registration) - { - add_op_to_deno_core_ops(scope, ops_obj, op_ctx); + // the remaining ones should already be in the snapshot. Ignore ops that + // are disabled. + for op_ctx in op_ctxs { + if op_ctx.decl.enabled { + if op_ctx.decl.force_registration { + add_op_to_deno_core_ops(scope, ops_obj, op_ctx); + } + } else { + delete_op_from_deno_core_ops(scope, ops_obj, op_ctx) + } } } else if matches!(snapshot_options, SnapshotOptions::CreateFromExisting) { - // Register all ops, probing for which ones are already registered. + // Register all enabled ops, probing for which ones are already registered. for op_ctx in op_ctxs { let key = v8::String::new_external_onebyte_static( scope, op_ctx.decl.name.as_bytes(), ) .unwrap(); - if ops_obj.get(scope, key.into()).is_some() { - continue; + + if op_ctx.decl.enabled { + if ops_obj.get(scope, key.into()).is_some() { + continue; + } + add_op_to_deno_core_ops(scope, ops_obj, op_ctx); + } else { + delete_op_from_deno_core_ops(scope, ops_obj, op_ctx) } - add_op_to_deno_core_ops(scope, ops_obj, op_ctx); } } else { - // In other cases register all ops unconditionally. + // In other cases register all ops enabled unconditionally. for op_ctx in op_ctxs { - add_op_to_deno_core_ops(scope, ops_obj, op_ctx); + if op_ctx.decl.enabled { + add_op_to_deno_core_ops(scope, ops_obj, op_ctx); + } } } @@ -203,6 +214,17 @@ fn set_func( obj.set(scope, key.into(), val.into()); } +fn delete_op_from_deno_core_ops( + scope: &mut v8::HandleScope<'_>, + obj: v8::Local, + op_ctx: &OpCtx, +) { + let key = + v8::String::new_external_onebyte_static(scope, op_ctx.decl.name.as_bytes()) + .unwrap(); + obj.delete(scope, key.into()); +} + fn add_op_to_deno_core_ops( scope: &mut v8::HandleScope<'_>, obj: v8::Local, diff --git a/core/runtime.rs b/core/runtime.rs index 27fd824964..923caaea9a 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -15,8 +15,6 @@ use crate::modules::ModuleId; use crate::modules::ModuleLoadId; use crate::modules::ModuleLoader; use crate::modules::ModuleMap; -use crate::op_void_async; -use crate::op_void_sync; use crate::ops::*; use crate::realm::ContextState; use crate::realm::JsRealm; @@ -773,16 +771,6 @@ impl JsRuntime { name: d.name, ..macroware(d) }) - .map(|op| match op.enabled { - true => op, - false => OpDecl { - v8_fn_ptr: match op.is_async { - true => op_void_async::v8_fn_ptr as _, - false => op_void_sync::v8_fn_ptr as _, - }, - ..op - }, - }) .collect() } @@ -4223,11 +4211,12 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { extensions: vec![test_ext::init_ops()], ..Default::default() }); - let r = runtime + let err = runtime .execute_script_static("test.js", "Deno.core.ops.op_foo()") - .unwrap(); - let scope = &mut runtime.handle_scope(); - assert!(r.open(scope).is_undefined()); + .unwrap_err(); + assert!(err + .to_string() + .contains("TypeError: Deno.core.ops.op_foo is not a function")); } #[test] @@ -4327,7 +4316,7 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { if (Deno.core.ops.op_foo() !== 42) { throw new Error("Exptected op_foo() === 42"); } - if (Deno.core.ops.op_bar() !== undefined) { + if (typeof Deno.core.ops.op_bar !== "undefined") { throw new Error("Expected op_bar to be disabled") } "#, diff --git a/runtime/ops/os/mod.rs b/runtime/ops/os/mod.rs index 70a1263018..911cd327c0 100644 --- a/runtime/ops/os/mod.rs +++ b/runtime/ops/os/mod.rs @@ -57,7 +57,10 @@ deno_core::extension!( deno_os_worker, ops_fn = deno_ops, middleware = |op| match op.name { - "op_exit" | "op_set_exit_code" => op.disable(), + "op_exit" | "op_set_exit_code" => deno_core::OpDecl { + v8_fn_ptr: deno_core::op_void_sync::v8_fn_ptr as _, + ..op + }, _ => op, }, customizer = |ext: &mut deno_core::ExtensionBuilder| { From 065d8771adfae6aa75cdd367741468c823fbae4a Mon Sep 17 00:00:00 2001 From: Nayeem Rahman Date: Fri, 21 Apr 2023 14:32:37 +0100 Subject: [PATCH 009/320] fix(test): allow explicit undefined for boolean test options (#18786) Fixes #18784. --- cli/ops/testing.rs | 2 ++ cli/tests/unit/testing_test.ts | 5 +++++ 2 files changed, 7 insertions(+) diff --git a/cli/ops/testing.rs b/cli/ops/testing.rs index e36d7e6114..3f9ade7c9e 100644 --- a/cli/ops/testing.rs +++ b/cli/ops/testing.rs @@ -100,7 +100,9 @@ struct TestInfo<'s> { #[serde(rename = "fn")] function: serde_v8::Value<'s>, name: String, + #[serde(default)] ignore: bool, + #[serde(default)] only: bool, location: TestLocation, } diff --git a/cli/tests/unit/testing_test.ts b/cli/tests/unit/testing_test.ts index 4e28d545c5..52e3baa133 100644 --- a/cli/tests/unit/testing_test.ts +++ b/cli/tests/unit/testing_test.ts @@ -147,3 +147,8 @@ Deno.test(async function parentOnTextContext(t1) { }); }); }); + +Deno.test("explicit undefined for boolean options", { + ignore: undefined, + only: undefined, +}, () => {}); From 4a33c349afd4b2728eb8c3c29676651353282d3b Mon Sep 17 00:00:00 2001 From: David Sherret Date: Fri, 21 Apr 2023 16:38:10 -0400 Subject: [PATCH 010/320] refactor: move some CJS and ESM code analysis to ext/node (#18789) --- cli/cache/node.rs | 8 - cli/module_loader.rs | 15 +- cli/node/analyze.rs | 634 ++++----------------------------------- cli/node/mod.rs | 6 +- cli/npm/mod.rs | 2 +- cli/npm/resolvers/mod.rs | 10 +- cli/proc_state.rs | 13 +- ext/node/analyze.rs | 564 ++++++++++++++++++++++++++++++++++ ext/node/crypto/x509.rs | 6 +- ext/node/lib.rs | 1 + 10 files changed, 655 insertions(+), 604 deletions(-) create mode 100644 ext/node/analyze.rs diff --git a/cli/cache/node.rs b/cli/cache/node.rs index f42f132fd4..298d81e2f1 100644 --- a/cli/cache/node.rs +++ b/cli/cache/node.rs @@ -42,14 +42,6 @@ pub struct NodeAnalysisCache { } impl NodeAnalysisCache { - #[cfg(test)] - pub fn new_in_memory() -> Self { - Self::new(CacheDB::in_memory( - &NODE_ANALYSIS_CACHE_DB, - crate::version::deno(), - )) - } - pub fn new(db: CacheDB) -> Self { Self { inner: NodeAnalysisCacheInner::new(db), diff --git a/cli/module_loader.rs b/cli/module_loader.rs index 06755bbf52..07fad6ffc6 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -11,9 +11,10 @@ use crate::graph_util::graph_valid_with_cli_options; use crate::graph_util::ModuleGraphBuilder; use crate::graph_util::ModuleGraphContainer; use crate::node; +use crate::node::CliCjsEsmCodeAnalyzer; use crate::node::CliNodeResolver; -use crate::node::NodeCodeTranslator; use crate::node::NodeResolution; +use crate::npm::CliRequireNpmResolver; use crate::proc_state::CjsResolutionStore; use crate::proc_state::FileWatcherReporter; use crate::proc_state::ProcState; @@ -49,7 +50,9 @@ use deno_graph::JsonModule; use deno_graph::Module; use deno_graph::Resolution; use deno_lockfile::Lockfile; +use deno_runtime::deno_node::analyze::NodeCodeTranslator; use deno_runtime::deno_node::NodeResolutionMode; +use deno_runtime::deno_node::RealFs; use deno_runtime::permissions::PermissionsContainer; use deno_semver::npm::NpmPackageReqReference; use std::borrow::Cow; @@ -241,7 +244,8 @@ pub struct CliModuleLoader { emitter: Arc, graph_container: Arc, module_load_preparer: Arc, - node_code_translator: Arc, + node_code_translator: + Arc>, node_resolver: Arc, parsed_source_cache: Arc, resolver: Arc, @@ -385,17 +389,16 @@ impl CliModuleLoader { self.root_permissions.clone() }; // translate cjs to esm if it's cjs and inject node globals - self.node_code_translator.translate_cjs_to_esm( + self.node_code_translator.translate_cjs_to_esm::( specifier, - code, - MediaType::Cjs, + &code, &mut permissions, )? } else { // only inject node globals for esm self .node_code_translator - .esm_code_with_node_globals(specifier, code)? + .esm_code_with_node_globals(specifier, &code)? }; ModuleCodeSource { code: code.into(), diff --git a/cli/node/analyze.rs b/cli/node/analyze.rs index f93e9fa910..27818639ef 100644 --- a/cli/node/analyze.rs +++ b/cli/node/analyze.rs @@ -1,11 +1,6 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. use std::collections::HashSet; -use std::collections::VecDeque; -use std::fmt::Write; -use std::path::Path; -use std::path::PathBuf; -use std::sync::Arc; use deno_ast::swc::common::SyntaxContext; use deno_ast::view::Node; @@ -15,195 +10,35 @@ use deno_ast::MediaType; use deno_ast::ModuleSpecifier; use deno_ast::ParsedSource; use deno_ast::SourceRanged; -use deno_core::anyhow::anyhow; use deno_core::error::AnyError; -use deno_runtime::deno_node::package_exports_resolve; -use deno_runtime::deno_node::NodeModuleKind; -use deno_runtime::deno_node::NodePermissions; -use deno_runtime::deno_node::NodeResolutionMode; -use deno_runtime::deno_node::PackageJson; -use deno_runtime::deno_node::PathClean; -use deno_runtime::deno_node::RealFs; -use deno_runtime::deno_node::RequireNpmResolver; -use deno_runtime::deno_node::NODE_GLOBAL_THIS_NAME; -use once_cell::sync::Lazy; +use deno_runtime::deno_node::analyze::CjsAnalysis as ExtNodeCjsAnalysis; +use deno_runtime::deno_node::analyze::CjsEsmCodeAnalyzer; use crate::cache::NodeAnalysisCache; -use crate::file_fetcher::FileFetcher; -use crate::npm::NpmPackageResolver; -static NODE_GLOBALS: &[&str] = &[ - "Buffer", - "clearImmediate", - "clearInterval", - "clearTimeout", - "console", - "global", - "process", - "setImmediate", - "setInterval", - "setTimeout", -]; - -pub struct NodeCodeTranslator { - analysis_cache: NodeAnalysisCache, - file_fetcher: Arc, - npm_resolver: Arc, +pub struct CliCjsEsmCodeAnalyzer { + cache: NodeAnalysisCache, } -impl NodeCodeTranslator { - pub fn new( - analysis_cache: NodeAnalysisCache, - file_fetcher: Arc, - npm_resolver: Arc, - ) -> Self { - Self { - analysis_cache, - file_fetcher, - npm_resolver, - } +impl CliCjsEsmCodeAnalyzer { + pub fn new(cache: NodeAnalysisCache) -> Self { + Self { cache } } - pub fn esm_code_with_node_globals( + fn inner_cjs_analysis( &self, specifier: &ModuleSpecifier, - code: String, - ) -> Result { - esm_code_with_node_globals(&self.analysis_cache, specifier, code) - } - - /// Translates given CJS module into ESM. This function will perform static - /// analysis on the file to find defined exports and reexports. - /// - /// For all discovered reexports the analysis will be performed recursively. - /// - /// If successful a source code for equivalent ES module is returned. - pub fn translate_cjs_to_esm( - &self, - specifier: &ModuleSpecifier, - code: String, - media_type: MediaType, - permissions: &mut dyn NodePermissions, - ) -> Result { - let mut temp_var_count = 0; - let mut handled_reexports: HashSet = HashSet::default(); - - let mut source = vec![ - r#"import {createRequire as __internalCreateRequire} from "node:module"; - const require = __internalCreateRequire(import.meta.url);"# - .to_string(), - ]; - - let analysis = - self.perform_cjs_analysis(specifier.as_str(), media_type, code)?; - - let mut all_exports = analysis - .exports - .iter() - .map(|s| s.to_string()) - .collect::>(); - - // (request, referrer) - let mut reexports_to_handle = VecDeque::new(); - for reexport in analysis.reexports { - reexports_to_handle.push_back((reexport, specifier.clone())); - } - - while let Some((reexport, referrer)) = reexports_to_handle.pop_front() { - if handled_reexports.contains(&reexport) { - continue; - } - - handled_reexports.insert(reexport.to_string()); - - // First, resolve relate reexport specifier - let resolved_reexport = self.resolve( - &reexport, - &referrer, - // FIXME(bartlomieju): check if these conditions are okay, probably - // should be `deno-require`, because `deno` is already used in `esm_resolver.rs` - &["deno", "require", "default"], - NodeResolutionMode::Execution, - permissions, - )?; - let reexport_specifier = - ModuleSpecifier::from_file_path(resolved_reexport).unwrap(); - // Second, read the source code from disk - let reexport_file = self - .file_fetcher - .get_source(&reexport_specifier) - .ok_or_else(|| { - anyhow!( - "Could not find '{}' ({}) referenced from {}", - reexport, - reexport_specifier, - referrer - ) - })?; - - { - let analysis = self.perform_cjs_analysis( - reexport_specifier.as_str(), - reexport_file.media_type, - reexport_file.source.to_string(), - )?; - - for reexport in analysis.reexports { - reexports_to_handle.push_back((reexport, reexport_specifier.clone())); - } - - all_exports.extend( - analysis - .exports - .into_iter() - .filter(|e| e.as_str() != "default"), - ); - } - } - - source.push(format!( - "const mod = require(\"{}\");", - specifier - .to_file_path() - .unwrap() - .to_str() - .unwrap() - .replace('\\', "\\\\") - .replace('\'', "\\\'") - .replace('\"', "\\\"") - )); - - for export in &all_exports { - if export.as_str() != "default" { - add_export( - &mut source, - export, - &format!("mod[\"{export}\"]"), - &mut temp_var_count, - ); - } - } - - source.push("export default mod;".to_string()); - - let translated_source = source.join("\n"); - Ok(translated_source) - } - - fn perform_cjs_analysis( - &self, - specifier: &str, - media_type: MediaType, - code: String, + source: &str, ) -> Result { - let source_hash = NodeAnalysisCache::compute_source_hash(&code); + let source_hash = NodeAnalysisCache::compute_source_hash(source); if let Some(analysis) = self - .analysis_cache - .get_cjs_analysis(specifier, &source_hash) + .cache + .get_cjs_analysis(specifier.as_str(), &source_hash) { return Ok(analysis); } + let media_type = MediaType::from_specifier(specifier); if media_type == MediaType::Json { return Ok(CjsAnalysis { exports: vec![], @@ -213,7 +48,7 @@ impl NodeCodeTranslator { let parsed_source = deno_ast::parse_script(deno_ast::ParseParams { specifier: specifier.to_string(), - text_info: deno_ast::SourceTextInfo::new(code.into()), + text_info: deno_ast::SourceTextInfo::new(source.into()), media_type, capture_tokens: true, scope_analysis: false, @@ -221,175 +56,61 @@ impl NodeCodeTranslator { })?; let analysis = parsed_source.analyze_cjs(); self - .analysis_cache - .set_cjs_analysis(specifier, &source_hash, &analysis); + .cache + .set_cjs_analysis(specifier.as_str(), &source_hash, &analysis); Ok(analysis) } +} - fn resolve( +impl CjsEsmCodeAnalyzer for CliCjsEsmCodeAnalyzer { + fn analyze_cjs( &self, - specifier: &str, - referrer: &ModuleSpecifier, - conditions: &[&str], - mode: NodeResolutionMode, - permissions: &mut dyn NodePermissions, - ) -> Result { - if specifier.starts_with('/') { - todo!(); - } - - let referrer_path = referrer.to_file_path().unwrap(); - if specifier.starts_with("./") || specifier.starts_with("../") { - if let Some(parent) = referrer_path.parent() { - return file_extension_probe(parent.join(specifier), &referrer_path); - } else { - todo!(); - } - } - - // We've got a bare specifier or maybe bare_specifier/blah.js" - - let (package_specifier, package_subpath) = - parse_specifier(specifier).unwrap(); - - // todo(dsherret): use not_found error on not found here - let resolver = self.npm_resolver.as_require_npm_resolver(); - let module_dir = resolver.resolve_package_folder_from_package( - package_specifier.as_str(), - &referrer_path, - mode, - )?; - - let package_json_path = module_dir.join("package.json"); - if package_json_path.exists() { - let package_json = PackageJson::load::( - &self.npm_resolver.as_require_npm_resolver(), - permissions, - package_json_path.clone(), - )?; - - if let Some(exports) = &package_json.exports { - return package_exports_resolve::( - &package_json_path, - package_subpath, - exports, - referrer, - NodeModuleKind::Esm, - conditions, - mode, - &self.npm_resolver.as_require_npm_resolver(), - permissions, - ); - } - - // old school - if package_subpath != "." { - let d = module_dir.join(package_subpath); - if let Ok(m) = d.metadata() { - if m.is_dir() { - // subdir might have a package.json that specifies the entrypoint - let package_json_path = d.join("package.json"); - if package_json_path.exists() { - let package_json = PackageJson::load::( - &self.npm_resolver.as_require_npm_resolver(), - permissions, - package_json_path, - )?; - if let Some(main) = package_json.main(NodeModuleKind::Cjs) { - return Ok(d.join(main).clean()); - } - } - - return Ok(d.join("index.js").clean()); - } - } - return file_extension_probe(d, &referrer_path); - } else if let Some(main) = package_json.main(NodeModuleKind::Cjs) { - return Ok(module_dir.join(main).clean()); - } else { - return Ok(module_dir.join("index.js").clean()); - } - } - Err(not_found(specifier, &referrer_path)) - } -} - -fn esm_code_with_node_globals( - analysis_cache: &NodeAnalysisCache, - specifier: &ModuleSpecifier, - code: String, -) -> Result { - // TODO(dsherret): this code is way more inefficient than it needs to be. - // - // In the future, we should disable capturing tokens & scope analysis - // and instead only use swc's APIs to go through the portions of the tree - // that we know will affect the global scope while still ensuring that - // `var` decls are taken into consideration. - let source_hash = NodeAnalysisCache::compute_source_hash(&code); - let text_info = deno_ast::SourceTextInfo::from_string(code); - let top_level_decls = if let Some(decls) = - analysis_cache.get_esm_analysis(specifier.as_str(), &source_hash) - { - HashSet::from_iter(decls) - } else { - let parsed_source = deno_ast::parse_program(deno_ast::ParseParams { - specifier: specifier.to_string(), - text_info: text_info.clone(), - media_type: deno_ast::MediaType::from_specifier(specifier), - capture_tokens: true, - scope_analysis: true, - maybe_syntax: None, - })?; - let top_level_decls = analyze_top_level_decls(&parsed_source)?; - analysis_cache.set_esm_analysis( - specifier.as_str(), - &source_hash, - &top_level_decls.clone().into_iter().collect(), - ); - top_level_decls - }; - - Ok(esm_code_from_top_level_decls( - text_info.text_str(), - &top_level_decls, - )) -} - -fn esm_code_from_top_level_decls( - file_text: &str, - top_level_decls: &HashSet, -) -> String { - let mut globals = Vec::with_capacity(NODE_GLOBALS.len()); - let has_global_this = top_level_decls.contains("globalThis"); - for global in NODE_GLOBALS.iter() { - if !top_level_decls.contains(&global.to_string()) { - globals.push(*global); - } + specifier: &ModuleSpecifier, + source: &str, + ) -> Result { + let analysis = self.inner_cjs_analysis(specifier, source)?; + Ok(ExtNodeCjsAnalysis { + exports: analysis.exports, + reexports: analysis.reexports, + }) } - let mut result = String::new(); - let global_this_expr = NODE_GLOBAL_THIS_NAME.as_str(); - let global_this_expr = if has_global_this { - global_this_expr - } else { - write!(result, "var globalThis = {global_this_expr};").unwrap(); - "globalThis" - }; - for global in globals { - write!(result, "var {global} = {global_this_expr}.{global};").unwrap(); + fn analyze_esm_top_level_decls( + &self, + specifier: &ModuleSpecifier, + source: &str, + ) -> Result, AnyError> { + // TODO(dsherret): this code is way more inefficient than it needs to be. + // + // In the future, we should disable capturing tokens & scope analysis + // and instead only use swc's APIs to go through the portions of the tree + // that we know will affect the global scope while still ensuring that + // `var` decls are taken into consideration. + let source_hash = NodeAnalysisCache::compute_source_hash(source); + if let Some(decls) = self + .cache + .get_esm_analysis(specifier.as_str(), &source_hash) + { + Ok(HashSet::from_iter(decls)) + } else { + let parsed_source = deno_ast::parse_program(deno_ast::ParseParams { + specifier: specifier.to_string(), + text_info: deno_ast::SourceTextInfo::from_string(source.to_string()), + media_type: deno_ast::MediaType::from_specifier(specifier), + capture_tokens: true, + scope_analysis: true, + maybe_syntax: None, + })?; + let top_level_decls = analyze_top_level_decls(&parsed_source)?; + self.cache.set_esm_analysis( + specifier.as_str(), + &source_hash, + &top_level_decls.clone().into_iter().collect::>(), + ); + Ok(top_level_decls) + } } - - // strip the shebang - let file_text = if file_text.starts_with("#!/") { - let start_index = file_text.find('\n').unwrap_or(file_text.len()); - &file_text[start_index..] - } else { - file_text - }; - result.push_str(file_text); - - result } fn analyze_top_level_decls( @@ -455,236 +176,3 @@ fn is_local_declaration_ident(node: Node) -> bool { false } } - -static RESERVED_WORDS: Lazy> = Lazy::new(|| { - HashSet::from([ - "break", - "case", - "catch", - "class", - "const", - "continue", - "debugger", - "default", - "delete", - "do", - "else", - "export", - "extends", - "false", - "finally", - "for", - "function", - "if", - "import", - "in", - "instanceof", - "new", - "null", - "return", - "super", - "switch", - "this", - "throw", - "true", - "try", - "typeof", - "var", - "void", - "while", - "with", - "yield", - "let", - "enum", - "implements", - "interface", - "package", - "private", - "protected", - "public", - "static", - ]) -}); - -fn add_export( - source: &mut Vec, - name: &str, - initializer: &str, - temp_var_count: &mut usize, -) { - fn is_valid_var_decl(name: &str) -> bool { - // it's ok to be super strict here - name - .chars() - .all(|c| c.is_ascii_alphanumeric() || c == '_' || c == '$') - } - - // TODO(bartlomieju): Node actually checks if a given export exists in `exports` object, - // but it might not be necessary here since our analysis is more detailed? - if RESERVED_WORDS.contains(name) || !is_valid_var_decl(name) { - *temp_var_count += 1; - // we can't create an identifier with a reserved word or invalid identifier name, - // so assign it to a temporary variable that won't have a conflict, then re-export - // it as a string - source.push(format!( - "const __deno_export_{temp_var_count}__ = {initializer};" - )); - source.push(format!( - "export {{ __deno_export_{temp_var_count}__ as \"{name}\" }};" - )); - } else { - source.push(format!("export const {name} = {initializer};")); - } -} - -fn parse_specifier(specifier: &str) -> Option<(String, String)> { - let mut separator_index = specifier.find('/'); - let mut valid_package_name = true; - // let mut is_scoped = false; - if specifier.is_empty() { - valid_package_name = false; - } else if specifier.starts_with('@') { - // is_scoped = true; - if let Some(index) = separator_index { - separator_index = specifier[index + 1..].find('/').map(|i| i + index + 1); - } else { - valid_package_name = false; - } - } - - let package_name = if let Some(index) = separator_index { - specifier[0..index].to_string() - } else { - specifier.to_string() - }; - - // Package name cannot have leading . and cannot have percent-encoding or separators. - for ch in package_name.chars() { - if ch == '%' || ch == '\\' { - valid_package_name = false; - break; - } - } - - if !valid_package_name { - return None; - } - - let package_subpath = if let Some(index) = separator_index { - format!(".{}", specifier.chars().skip(index).collect::()) - } else { - ".".to_string() - }; - - Some((package_name, package_subpath)) -} - -fn file_extension_probe( - p: PathBuf, - referrer: &Path, -) -> Result { - let p = p.clean(); - if p.exists() { - let file_name = p.file_name().unwrap(); - let p_js = p.with_file_name(format!("{}.js", file_name.to_str().unwrap())); - if p_js.exists() && p_js.is_file() { - return Ok(p_js); - } else if p.is_dir() { - return Ok(p.join("index.js")); - } else { - return Ok(p); - } - } else if let Some(file_name) = p.file_name() { - let p_js = p.with_file_name(format!("{}.js", file_name.to_str().unwrap())); - if p_js.exists() && p_js.is_file() { - return Ok(p_js); - } - } - Err(not_found(&p.to_string_lossy(), referrer)) -} - -fn not_found(path: &str, referrer: &Path) -> AnyError { - let msg = format!( - "[ERR_MODULE_NOT_FOUND] Cannot find module \"{}\" imported from \"{}\"", - path, - referrer.to_string_lossy() - ); - std::io::Error::new(std::io::ErrorKind::NotFound, msg).into() -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_esm_code_with_node_globals() { - let r = esm_code_with_node_globals( - &NodeAnalysisCache::new_in_memory(), - &ModuleSpecifier::parse("https://example.com/foo/bar.js").unwrap(), - "export const x = 1;".to_string(), - ) - .unwrap(); - assert!(r.contains(&format!( - "var globalThis = {};", - NODE_GLOBAL_THIS_NAME.as_str() - ))); - assert!(r.contains("var process = globalThis.process;")); - assert!(r.contains("export const x = 1;")); - } - - #[test] - fn test_esm_code_with_node_globals_with_shebang() { - let r = esm_code_with_node_globals( - &NodeAnalysisCache::new_in_memory(), - &ModuleSpecifier::parse("https://example.com/foo/bar.js").unwrap(), - "#!/usr/bin/env node\nexport const x = 1;".to_string(), - ) - .unwrap(); - assert_eq!( - r, - format!( - concat!( - "var globalThis = {}", - ";var Buffer = globalThis.Buffer;", - "var clearImmediate = globalThis.clearImmediate;var clearInterval = globalThis.clearInterval;", - "var clearTimeout = globalThis.clearTimeout;var console = globalThis.console;", - "var global = globalThis.global;var process = globalThis.process;", - "var setImmediate = globalThis.setImmediate;var setInterval = globalThis.setInterval;", - "var setTimeout = globalThis.setTimeout;\n", - "export const x = 1;" - ), - NODE_GLOBAL_THIS_NAME.as_str(), - ) - ); - } - - #[test] - fn test_add_export() { - let mut temp_var_count = 0; - let mut source = vec![]; - - let exports = vec!["static", "server", "app", "dashed-export"]; - for export in exports { - add_export(&mut source, export, "init", &mut temp_var_count); - } - assert_eq!( - source, - vec![ - "const __deno_export_1__ = init;".to_string(), - "export { __deno_export_1__ as \"static\" };".to_string(), - "export const server = init;".to_string(), - "export const app = init;".to_string(), - "const __deno_export_2__ = init;".to_string(), - "export { __deno_export_2__ as \"dashed-export\" };".to_string(), - ] - ) - } - - #[test] - fn test_parse_specifier() { - assert_eq!( - parse_specifier("@some-package/core/actions"), - Some(("@some-package/core".to_string(), "./actions".to_string())) - ); - } -} diff --git a/cli/node/mod.rs b/cli/node/mod.rs index 01216f50c1..eb584879e8 100644 --- a/cli/node/mod.rs +++ b/cli/node/mod.rs @@ -33,14 +33,14 @@ use deno_semver::npm::NpmPackageNv; use deno_semver::npm::NpmPackageNvReference; use deno_semver::npm::NpmPackageReqReference; +use crate::npm::CliRequireNpmResolver; use crate::npm::NpmPackageResolver; use crate::npm::NpmResolution; -use crate::npm::RequireNpmPackageResolver; use crate::util::fs::canonicalize_path_maybe_not_exists; mod analyze; -pub use analyze::NodeCodeTranslator; +pub use analyze::CliCjsEsmCodeAnalyzer; #[derive(Debug)] pub enum NodeResolution { @@ -116,7 +116,7 @@ pub fn resolve_builtin_node_module(module_name: &str) -> Result { pub struct CliNodeResolver { npm_resolution: Arc, npm_resolver: Arc, - require_npm_resolver: RequireNpmPackageResolver, + require_npm_resolver: CliRequireNpmResolver, } impl CliNodeResolver { diff --git a/cli/npm/mod.rs b/cli/npm/mod.rs index 8a38ee0795..8f6ac77bc6 100644 --- a/cli/npm/mod.rs +++ b/cli/npm/mod.rs @@ -13,6 +13,6 @@ pub use installer::PackageJsonDepsInstaller; pub use registry::CliNpmRegistryApi; pub use resolution::NpmResolution; pub use resolvers::create_npm_fs_resolver; +pub use resolvers::CliRequireNpmResolver; pub use resolvers::NpmPackageResolver; pub use resolvers::NpmProcessState; -pub use resolvers::RequireNpmPackageResolver; diff --git a/cli/npm/resolvers/mod.rs b/cli/npm/resolvers/mod.rs index 5d3eb52b0d..f693d3d23a 100644 --- a/cli/npm/resolvers/mod.rs +++ b/cli/npm/resolvers/mod.rs @@ -229,17 +229,15 @@ impl NpmPackageResolver { Ok(()) } - pub fn as_require_npm_resolver( - self: &Arc, - ) -> RequireNpmPackageResolver { - RequireNpmPackageResolver(self.clone()) + pub fn as_require_npm_resolver(self: &Arc) -> CliRequireNpmResolver { + CliRequireNpmResolver(self.clone()) } } #[derive(Debug)] -pub struct RequireNpmPackageResolver(Arc); +pub struct CliRequireNpmResolver(Arc); -impl RequireNpmResolver for RequireNpmPackageResolver { +impl RequireNpmResolver for CliRequireNpmResolver { fn resolve_package_folder_from_package( &self, specifier: &str, diff --git a/cli/proc_state.rs b/cli/proc_state.rs index 75466e5fe8..bfe45bc861 100644 --- a/cli/proc_state.rs +++ b/cli/proc_state.rs @@ -17,10 +17,11 @@ use crate::graph_util::ModuleGraphBuilder; use crate::graph_util::ModuleGraphContainer; use crate::http_util::HttpClient; use crate::module_loader::ModuleLoadPreparer; +use crate::node::CliCjsEsmCodeAnalyzer; use crate::node::CliNodeResolver; -use crate::node::NodeCodeTranslator; use crate::npm::create_npm_fs_resolver; use crate::npm::CliNpmRegistryApi; +use crate::npm::CliRequireNpmResolver; use crate::npm::NpmCache; use crate::npm::NpmPackageResolver; use crate::npm::NpmResolution; @@ -37,6 +38,7 @@ use deno_core::ModuleSpecifier; use deno_core::SharedArrayBufferStore; use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel; +use deno_runtime::deno_node::analyze::NodeCodeTranslator; use deno_runtime::deno_tls::rustls::RootCertStore; use deno_runtime::deno_web::BlobStore; use deno_runtime::inspector_server::InspectorServer; @@ -75,7 +77,8 @@ pub struct Inner { maybe_file_watcher_reporter: Option, pub module_graph_builder: Arc, pub module_load_preparer: Arc, - pub node_code_translator: Arc, + pub node_code_translator: + Arc>, pub node_resolver: Arc, pub npm_api: Arc, pub npm_cache: Arc, @@ -304,10 +307,10 @@ impl ProcState { let file_fetcher = Arc::new(file_fetcher); let node_analysis_cache = NodeAnalysisCache::new(caches.node_analysis_db(&dir)); + let cjs_esm_analyzer = CliCjsEsmCodeAnalyzer::new(node_analysis_cache); let node_code_translator = Arc::new(NodeCodeTranslator::new( - node_analysis_cache, - file_fetcher.clone(), - npm_resolver.clone(), + cjs_esm_analyzer, + npm_resolver.as_require_npm_resolver(), )); let node_resolver = Arc::new(CliNodeResolver::new( npm_resolution.clone(), diff --git a/ext/node/analyze.rs b/ext/node/analyze.rs new file mode 100644 index 0000000000..03bf41995c --- /dev/null +++ b/ext/node/analyze.rs @@ -0,0 +1,564 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +use std::collections::HashSet; +use std::collections::VecDeque; +use std::fmt::Write; +use std::path::Path; +use std::path::PathBuf; + +use deno_core::anyhow::Context; +use deno_core::ModuleSpecifier; +use once_cell::sync::Lazy; + +use deno_core::error::AnyError; + +use crate::package_exports_resolve; +use crate::NodeFs; +use crate::NodeModuleKind; +use crate::NodePermissions; +use crate::NodeResolutionMode; +use crate::PackageJson; +use crate::PathClean; +use crate::RequireNpmResolver; +use crate::NODE_GLOBAL_THIS_NAME; + +static NODE_GLOBALS: &[&str] = &[ + "Buffer", + "clearImmediate", + "clearInterval", + "clearTimeout", + "console", + "global", + "process", + "setImmediate", + "setInterval", + "setTimeout", +]; + +#[derive(Debug, Clone)] +pub struct CjsAnalysis { + pub exports: Vec, + pub reexports: Vec, +} + +/// Code analyzer for CJS and ESM files. +pub trait CjsEsmCodeAnalyzer { + /// Analyzes CommonJs code for exports and reexports, which is + /// then used to determine the wrapper ESM module exports. + fn analyze_cjs( + &self, + specifier: &ModuleSpecifier, + source: &str, + ) -> Result; + + /// Analyzes ESM code for top level declarations. This is used + /// to help inform injecting node specific globals into Node ESM + /// code. For example, if a top level `setTimeout` function exists + /// then we don't want to inject a `setTimeout` declaration. + /// + /// Note: This will go away in the future once we do this all in v8. + fn analyze_esm_top_level_decls( + &self, + specifier: &ModuleSpecifier, + source: &str, + ) -> Result, AnyError>; +} + +pub struct NodeCodeTranslator< + TCjsEsmCodeAnalyzer: CjsEsmCodeAnalyzer, + TRequireNpmResolver: RequireNpmResolver, +> { + cjs_esm_code_analyzer: TCjsEsmCodeAnalyzer, + npm_resolver: TRequireNpmResolver, +} + +impl< + TCjsEsmCodeAnalyzer: CjsEsmCodeAnalyzer, + TRequireNpmResolver: RequireNpmResolver, + > NodeCodeTranslator +{ + pub fn new( + cjs_esm_code_analyzer: TCjsEsmCodeAnalyzer, + npm_resolver: TRequireNpmResolver, + ) -> Self { + Self { + cjs_esm_code_analyzer, + npm_resolver, + } + } + + /// Resolves the code to be used when executing Node specific ESM code. + /// + /// Note: This will go away in the future once we do this all in v8. + pub fn esm_code_with_node_globals( + &self, + specifier: &ModuleSpecifier, + source: &str, + ) -> Result { + let top_level_decls = self + .cjs_esm_code_analyzer + .analyze_esm_top_level_decls(specifier, source)?; + Ok(esm_code_from_top_level_decls(source, &top_level_decls)) + } + + /// Translates given CJS module into ESM. This function will perform static + /// analysis on the file to find defined exports and reexports. + /// + /// For all discovered reexports the analysis will be performed recursively. + /// + /// If successful a source code for equivalent ES module is returned. + pub fn translate_cjs_to_esm( + &self, + specifier: &ModuleSpecifier, + source: &str, + permissions: &mut dyn NodePermissions, + ) -> Result { + let mut temp_var_count = 0; + let mut handled_reexports: HashSet = HashSet::default(); + + let analysis = self.cjs_esm_code_analyzer.analyze_cjs(specifier, source)?; + + let mut source = vec![ + r#"import {createRequire as __internalCreateRequire} from "node:module"; + const require = __internalCreateRequire(import.meta.url);"# + .to_string(), + ]; + + let mut all_exports = analysis + .exports + .iter() + .map(|s| s.to_string()) + .collect::>(); + + // (request, referrer) + let mut reexports_to_handle = VecDeque::new(); + for reexport in analysis.reexports { + reexports_to_handle.push_back((reexport, specifier.clone())); + } + + while let Some((reexport, referrer)) = reexports_to_handle.pop_front() { + if handled_reexports.contains(&reexport) { + continue; + } + + handled_reexports.insert(reexport.to_string()); + + // First, resolve relate reexport specifier + let resolved_reexport = self.resolve::( + &reexport, + &referrer, + // FIXME(bartlomieju): check if these conditions are okay, probably + // should be `deno-require`, because `deno` is already used in `esm_resolver.rs` + &["deno", "require", "default"], + NodeResolutionMode::Execution, + permissions, + )?; + // Second, read the source code from disk + let reexport_specifier = + ModuleSpecifier::from_file_path(&resolved_reexport).unwrap(); + let reexport_file_text = Fs::read_to_string(&resolved_reexport) + .with_context(|| { + format!( + "Could not find '{}' ({}) referenced from {}", + reexport, reexport_specifier, referrer + ) + })?; + { + let analysis = self + .cjs_esm_code_analyzer + .analyze_cjs(&reexport_specifier, &reexport_file_text)?; + + for reexport in analysis.reexports { + reexports_to_handle.push_back((reexport, reexport_specifier.clone())); + } + + all_exports.extend( + analysis + .exports + .into_iter() + .filter(|e| e.as_str() != "default"), + ); + } + } + + source.push(format!( + "const mod = require(\"{}\");", + specifier + .to_file_path() + .unwrap() + .to_str() + .unwrap() + .replace('\\', "\\\\") + .replace('\'', "\\\'") + .replace('\"', "\\\"") + )); + + for export in &all_exports { + if export.as_str() != "default" { + add_export( + &mut source, + export, + &format!("mod[\"{export}\"]"), + &mut temp_var_count, + ); + } + } + + source.push("export default mod;".to_string()); + + let translated_source = source.join("\n"); + Ok(translated_source) + } + + fn resolve( + &self, + specifier: &str, + referrer: &ModuleSpecifier, + conditions: &[&str], + mode: NodeResolutionMode, + permissions: &mut dyn NodePermissions, + ) -> Result { + if specifier.starts_with('/') { + todo!(); + } + + let referrer_path = referrer.to_file_path().unwrap(); + if specifier.starts_with("./") || specifier.starts_with("../") { + if let Some(parent) = referrer_path.parent() { + return file_extension_probe::( + parent.join(specifier), + &referrer_path, + ); + } else { + todo!(); + } + } + + // We've got a bare specifier or maybe bare_specifier/blah.js" + + let (package_specifier, package_subpath) = + parse_specifier(specifier).unwrap(); + + // todo(dsherret): use not_found error on not found here + let module_dir = self.npm_resolver.resolve_package_folder_from_package( + package_specifier.as_str(), + &referrer_path, + mode, + )?; + + let package_json_path = module_dir.join("package.json"); + if Fs::exists(&package_json_path) { + let package_json = PackageJson::load::( + &self.npm_resolver, + permissions, + package_json_path.clone(), + )?; + + if let Some(exports) = &package_json.exports { + return package_exports_resolve::( + &package_json_path, + package_subpath, + exports, + referrer, + NodeModuleKind::Esm, + conditions, + mode, + &self.npm_resolver, + permissions, + ); + } + + // old school + if package_subpath != "." { + let d = module_dir.join(package_subpath); + if Fs::is_dir(&d) { + // subdir might have a package.json that specifies the entrypoint + let package_json_path = d.join("package.json"); + if Fs::exists(&package_json_path) { + let package_json = PackageJson::load::( + &self.npm_resolver, + permissions, + package_json_path, + )?; + if let Some(main) = package_json.main(NodeModuleKind::Cjs) { + return Ok(d.join(main).clean()); + } + } + + return Ok(d.join("index.js").clean()); + } + return file_extension_probe::(d, &referrer_path); + } else if let Some(main) = package_json.main(NodeModuleKind::Cjs) { + return Ok(module_dir.join(main).clean()); + } else { + return Ok(module_dir.join("index.js").clean()); + } + } + Err(not_found(specifier, &referrer_path)) + } +} + +fn esm_code_from_top_level_decls( + file_text: &str, + top_level_decls: &HashSet, +) -> String { + let mut globals = Vec::with_capacity(NODE_GLOBALS.len()); + let has_global_this = top_level_decls.contains("globalThis"); + for global in NODE_GLOBALS.iter() { + if !top_level_decls.contains(&global.to_string()) { + globals.push(*global); + } + } + + let mut result = String::new(); + let global_this_expr = NODE_GLOBAL_THIS_NAME.as_str(); + let global_this_expr = if has_global_this { + global_this_expr + } else { + write!(result, "var globalThis = {global_this_expr};").unwrap(); + "globalThis" + }; + for global in globals { + write!(result, "var {global} = {global_this_expr}.{global};").unwrap(); + } + + // strip the shebang + let file_text = if file_text.starts_with("#!/") { + let start_index = file_text.find('\n').unwrap_or(file_text.len()); + &file_text[start_index..] + } else { + file_text + }; + result.push_str(file_text); + + result +} + +static RESERVED_WORDS: Lazy> = Lazy::new(|| { + HashSet::from([ + "break", + "case", + "catch", + "class", + "const", + "continue", + "debugger", + "default", + "delete", + "do", + "else", + "export", + "extends", + "false", + "finally", + "for", + "function", + "if", + "import", + "in", + "instanceof", + "new", + "null", + "return", + "super", + "switch", + "this", + "throw", + "true", + "try", + "typeof", + "var", + "void", + "while", + "with", + "yield", + "let", + "enum", + "implements", + "interface", + "package", + "private", + "protected", + "public", + "static", + ]) +}); + +fn add_export( + source: &mut Vec, + name: &str, + initializer: &str, + temp_var_count: &mut usize, +) { + fn is_valid_var_decl(name: &str) -> bool { + // it's ok to be super strict here + name + .chars() + .all(|c| c.is_ascii_alphanumeric() || c == '_' || c == '$') + } + + // TODO(bartlomieju): Node actually checks if a given export exists in `exports` object, + // but it might not be necessary here since our analysis is more detailed? + if RESERVED_WORDS.contains(name) || !is_valid_var_decl(name) { + *temp_var_count += 1; + // we can't create an identifier with a reserved word or invalid identifier name, + // so assign it to a temporary variable that won't have a conflict, then re-export + // it as a string + source.push(format!( + "const __deno_export_{temp_var_count}__ = {initializer};" + )); + source.push(format!( + "export {{ __deno_export_{temp_var_count}__ as \"{name}\" }};" + )); + } else { + source.push(format!("export const {name} = {initializer};")); + } +} + +fn parse_specifier(specifier: &str) -> Option<(String, String)> { + let mut separator_index = specifier.find('/'); + let mut valid_package_name = true; + // let mut is_scoped = false; + if specifier.is_empty() { + valid_package_name = false; + } else if specifier.starts_with('@') { + // is_scoped = true; + if let Some(index) = separator_index { + separator_index = specifier[index + 1..].find('/').map(|i| i + index + 1); + } else { + valid_package_name = false; + } + } + + let package_name = if let Some(index) = separator_index { + specifier[0..index].to_string() + } else { + specifier.to_string() + }; + + // Package name cannot have leading . and cannot have percent-encoding or separators. + for ch in package_name.chars() { + if ch == '%' || ch == '\\' { + valid_package_name = false; + break; + } + } + + if !valid_package_name { + return None; + } + + let package_subpath = if let Some(index) = separator_index { + format!(".{}", specifier.chars().skip(index).collect::()) + } else { + ".".to_string() + }; + + Some((package_name, package_subpath)) +} + +fn file_extension_probe( + p: PathBuf, + referrer: &Path, +) -> Result { + let p = p.clean(); + if Fs::exists(&p) { + let file_name = p.file_name().unwrap(); + let p_js = p.with_file_name(format!("{}.js", file_name.to_str().unwrap())); + if Fs::is_file(&p_js) { + return Ok(p_js); + } else if Fs::is_dir(&p) { + return Ok(p.join("index.js")); + } else { + return Ok(p); + } + } else if let Some(file_name) = p.file_name() { + let p_js = p.with_file_name(format!("{}.js", file_name.to_str().unwrap())); + if Fs::is_file(&p_js) { + return Ok(p_js); + } + } + Err(not_found(&p.to_string_lossy(), referrer)) +} + +fn not_found(path: &str, referrer: &Path) -> AnyError { + let msg = format!( + "[ERR_MODULE_NOT_FOUND] Cannot find module \"{}\" imported from \"{}\"", + path, + referrer.to_string_lossy() + ); + std::io::Error::new(std::io::ErrorKind::NotFound, msg).into() +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_esm_code_with_node_globals() { + let r = esm_code_from_top_level_decls( + "export const x = 1;", + &HashSet::from(["x".to_string()]), + ); + assert!(r.contains(&format!( + "var globalThis = {};", + NODE_GLOBAL_THIS_NAME.as_str() + ))); + assert!(r.contains("var process = globalThis.process;")); + assert!(r.contains("export const x = 1;")); + } + + #[test] + fn test_esm_code_with_node_globals_with_shebang() { + let r = esm_code_from_top_level_decls( + "#!/usr/bin/env node\nexport const x = 1;", + &HashSet::from(["x".to_string()]), + ); + assert_eq!( + r, + format!( + concat!( + "var globalThis = {}", + ";var Buffer = globalThis.Buffer;", + "var clearImmediate = globalThis.clearImmediate;var clearInterval = globalThis.clearInterval;", + "var clearTimeout = globalThis.clearTimeout;var console = globalThis.console;", + "var global = globalThis.global;var process = globalThis.process;", + "var setImmediate = globalThis.setImmediate;var setInterval = globalThis.setInterval;", + "var setTimeout = globalThis.setTimeout;\n", + "export const x = 1;" + ), + NODE_GLOBAL_THIS_NAME.as_str(), + ) + ); + } + + #[test] + fn test_add_export() { + let mut temp_var_count = 0; + let mut source = vec![]; + + let exports = vec!["static", "server", "app", "dashed-export"]; + for export in exports { + add_export(&mut source, export, "init", &mut temp_var_count); + } + assert_eq!( + source, + vec![ + "const __deno_export_1__ = init;".to_string(), + "export { __deno_export_1__ as \"static\" };".to_string(), + "export const server = init;".to_string(), + "export const app = init;".to_string(), + "const __deno_export_2__ = init;".to_string(), + "export { __deno_export_2__ as \"dashed-export\" };".to_string(), + ] + ) + } + + #[test] + fn test_parse_specifier() { + assert_eq!( + parse_specifier("@some-package/core/actions"), + Some(("@some-package/core".to_string(), "./actions".to_string())) + ); + } +} diff --git a/ext/node/crypto/x509.rs b/ext/node/crypto/x509.rs index 776103e1e7..402c58b720 100644 --- a/ext/node/crypto/x509.rs +++ b/ext/node/crypto/x509.rs @@ -228,6 +228,8 @@ fn x509name_to_string( name: &X509Name, oid_registry: &oid_registry::OidRegistry, ) -> Result { + // Lifted from https://github.com/rusticata/x509-parser/blob/4d618c2ed6b1fc102df16797545895f7c67ee0fe/src/x509.rs#L543-L566 + // since it's a private function (Copyright 2017 Pierre Chifflier) name.iter_rdn().fold(Ok(String::new()), |acc, rdn| { acc.and_then(|mut _vec| { rdn @@ -244,13 +246,13 @@ fn x509name_to_string( let rdn = format!("{}={}", abbrev, val_str); match _vec2.len() { 0 => Ok(rdn), - _ => Ok(_vec2 + " + " + &rdn), + _ => Ok(_vec2 + " + " + rdn.as_str()), } }) }) .map(|v| match _vec.len() { 0 => v, - _ => _vec + "\n" + &v, + _ => _vec + "\n" + v.as_str(), }) }) }) diff --git a/ext/node/lib.rs b/ext/node/lib.rs index 65db6e45f4..a521e161c9 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -12,6 +12,7 @@ use std::path::Path; use std::path::PathBuf; use std::rc::Rc; +pub mod analyze; mod crypto; pub mod errors; mod idna; From cf9fb18494d67526eb910cad68f8b364432fe584 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Fri, 21 Apr 2023 17:16:25 -0400 Subject: [PATCH 011/320] feat: upgrade deno_ast to 0.26 (#18795) Closes #18792 --- Cargo.lock | 255 ++++++++---------- Cargo.toml | 2 +- cli/Cargo.toml | 12 +- .../bundle/decorators/ts_decorators.out | 12 +- 4 files changed, 122 insertions(+), 159 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index cb5bc2f9d7..d18dbc3638 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -150,11 +150,10 @@ dependencies = [ [[package]] name = "ast_node" -version = "0.8.6" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf94863c5fdfee166d0907c44e5fee970123b2b7307046d35d1e671aa93afbba" +checksum = "52f7fd7740c5752c16281a1c1f9442b1e69ba41738acde85dc604aaf3ce41890" dependencies = [ - "darling", "pmutil", "proc-macro2 1.0.56", "quote 1.0.26", @@ -323,6 +322,12 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" +[[package]] +name = "bitflags" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c70beb79cbb5ce9c4f8e20849978f34225931f665bb49efa6982875a4d5facb3" + [[package]] name = "block-buffer" version = "0.9.0" @@ -451,7 +456,7 @@ version = "4.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce38afc168d8665cfc75c7b1dd9672e50716a137f433f070991619744a67342a" dependencies = [ - "bitflags", + "bitflags 1.3.2", "clap_lex", "is-terminal", "strsim", @@ -655,41 +660,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "darling" -version = "0.13.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a01d95850c592940db9b8194bc39f4bc0e89dee5c4265e4b1807c34a9aba453c" -dependencies = [ - "darling_core", - "darling_macro", -] - -[[package]] -name = "darling_core" -version = "0.13.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2 1.0.56", - "quote 1.0.26", - "strsim", - "syn 1.0.109", -] - -[[package]] -name = "darling_macro" -version = "0.13.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" -dependencies = [ - "darling_core", - "quote 1.0.26", - "syn 1.0.109", -] - [[package]] name = "dashmap" version = "5.4.0" @@ -805,13 +775,13 @@ dependencies = [ [[package]] name = "deno_ast" -version = "0.25.0" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b08341e0ed5b816e24b6582054b37707c8686de5598fa3004dc555131c993308" +checksum = "84b4db18773938f4613617d384b6579983c46fbe9962da7390a9fc7525ccbe9c" dependencies = [ "anyhow", "base64 0.13.1", - "data-url", + "deno_media_type", "dprint-swc-ext", "serde", "swc_atoms", @@ -936,9 +906,9 @@ dependencies = [ [[package]] name = "deno_doc" -version = "0.61.0" +version = "0.62.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae1ba6a3137da0ed19838c09c6fb9c7a07af642786b298fc29e088cc5643e729" +checksum = "0a6e4c826679e4b0dd4f00b23e6c45343ce14903c3df2c210d094ee969312b8a" dependencies = [ "cfg-if", "deno_ast", @@ -954,9 +924,9 @@ dependencies = [ [[package]] name = "deno_emit" -version = "0.19.0" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c01676751a0ee50ebad80734735f9a28c6eabb164050034e10956b72af563941" +checksum = "e00ae58a811d155fc8c256ce54f35c752ee8c7dc777f0675971735d2783bd5e6" dependencies = [ "anyhow", "base64 0.13.1", @@ -1019,9 +989,9 @@ dependencies = [ [[package]] name = "deno_graph" -version = "0.47.1" +version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e81896f3abfe0c6410518cc0285155e6faa2aa87ca8da32fbf1670ef1254ea2" +checksum = "57683392402015acc8f20cc3623035f6b2a2c49f1728eef93536c712adafb2c2" dependencies = [ "anyhow", "data-url", @@ -1094,9 +1064,9 @@ dependencies = [ [[package]] name = "deno_lint" -version = "0.43.0" +version = "0.44.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6afdaeeec629609b4a95b29db5e151a437e133524ad62d4bb4358cf4f70f0465" +checksum = "8314e893e86e7f66cf06926d684a5d8708d737a28056472c9d7d78ef1c00691b" dependencies = [ "anyhow", "deno_ast", @@ -1121,6 +1091,17 @@ dependencies = [ "thiserror", ] +[[package]] +name = "deno_media_type" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63772a60d740a41d97fbffb4788fc3779e6df47289e01892c12be38f4a5beded" +dependencies = [ + "data-url", + "serde", + "url", +] + [[package]] name = "deno_napi" version = "0.29.0" @@ -1552,9 +1533,9 @@ dependencies = [ [[package]] name = "dprint-plugin-typescript" -version = "0.84.0" +version = "0.84.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f763f97007edaf6dd50f59526f29ee8aa8fa056f50644601bde04e0f1ac2e208" +checksum = "8979688409764dd95b356c0d278023cad45fbb24cf788eab8c972ae069a7a3f8" dependencies = [ "anyhow", "deno_ast", @@ -1565,9 +1546,9 @@ dependencies = [ [[package]] name = "dprint-swc-ext" -version = "0.8.0" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "008b6061551bcf644454469e6506c32bb23b765df93d608bf7a8e2494f82fcb3" +checksum = "3c3359a644cca781aece7d7c16bfa80fb35ac83da4e1014a28600debd1ef2a7e" dependencies = [ "bumpalo", "num-bigint", @@ -1607,7 +1588,7 @@ version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "add9a102807b524ec050363f09e06f1504214b0e1c7797f64261c891022dce8b" dependencies = [ - "bitflags", + "bitflags 1.3.2", "byteorder", "lazy_static", "proc-macro-error", @@ -1703,18 +1684,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "enum_kind" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78b940da354ae81ef0926c5eaa428207b8f4f091d3956c891dfbd124162bed99" -dependencies = [ - "pmutil", - "proc-macro2 1.0.56", - "swc_macros_common", - "syn 1.0.109", -] - [[package]] name = "env_logger" version = "0.9.0" @@ -1770,9 +1739,9 @@ dependencies = [ [[package]] name = "eszip" -version = "0.40.0" +version = "0.41.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a0a0addd73b5077a769e23a914a68ec8862c310b6127e8383505f676684f65c" +checksum = "a808622e30489ade8c36b6e706bc819c4c75420b5aca4d1a6b996bea0d995aef" dependencies = [ "anyhow", "base64 0.21.0", @@ -1922,9 +1891,9 @@ dependencies = [ [[package]] name = "from_variant" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0981e470d2ab9f643df3921d54f1952ea100c39fdb6a3fdc820e20d2291df6c" +checksum = "1d449976075322384507443937df2f1d5577afbf4282f12a5a66ef29fa3e6307" dependencies = [ "pmutil", "proc-macro2 1.0.56", @@ -2317,12 +2286,6 @@ dependencies = [ "tokio-rustls", ] -[[package]] -name = "ident_case" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" - [[package]] name = "idna" version = "0.2.3" @@ -2395,7 +2358,7 @@ version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8069d3ec154eb856955c1c0fbffefbf5f3c40a104ec912d4797314c1801abff" dependencies = [ - "bitflags", + "bitflags 1.3.2", "inotify-sys", "libc", ] @@ -2559,7 +2522,7 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8367585489f01bc55dd27404dcf56b95e6da061a256a666ab23be9ba96a2e587" dependencies = [ - "bitflags", + "bitflags 1.3.2", "libc", ] @@ -2779,7 +2742,7 @@ version = "0.93.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9be6e9c7e2d18f651974370d7aff703f9513e0df6e464fd795660edc77e6ca51" dependencies = [ - "bitflags", + "bitflags 1.3.2", "serde", "serde_json", "serde_repr", @@ -2942,7 +2905,7 @@ version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "195cdbc1741b8134346d515b3a56a1c94b0912758009cfd53f99ea0f57b065fc" dependencies = [ - "bitflags", + "bitflags 1.3.2", "cfg-if", "libc", "memoffset", @@ -2964,7 +2927,7 @@ version = "5.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed2c66da08abae1c024c01d635253e402341b4060a12e99b31c7594063bf490a" dependencies = [ - "bitflags", + "bitflags 1.3.2", "crossbeam-channel", "filetime", "fsevent-sys", @@ -3515,7 +3478,7 @@ version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d9cc634bc78768157b5cbfe988ffcd1dcba95cd2b2f03a88316c08c6d00ed63" dependencies = [ - "bitflags", + "bitflags 1.3.2", "memchr", "unicase", ] @@ -3605,7 +3568,7 @@ version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" dependencies = [ - "bitflags", + "bitflags 1.3.2", ] [[package]] @@ -3756,7 +3719,7 @@ version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "01e213bc3ecb39ac32e81e51ebe31fd888a940515173e3a18a35f8c6e896422a" dependencies = [ - "bitflags", + "bitflags 1.3.2", "fallible-iterator", "fallible-streaming-iterator", "hashlink", @@ -3803,7 +3766,7 @@ version = "0.36.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd5c6ff11fecd55b40746d1995a02f2eb375bf8c00d192d521ee09f42bef37bc" dependencies = [ - "bitflags", + "bitflags 1.3.2", "errno", "io-lifetimes", "libc", @@ -3856,7 +3819,7 @@ version = "10.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d1cd5ae51d3f7bf65d7969d579d502168ef578f289452bd8ccc91de28fda20e" dependencies = [ - "bitflags", + "bitflags 1.3.2", "cfg-if", "clipboard-win", "fd-lock", @@ -3970,7 +3933,7 @@ version = "2.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a332be01508d814fed64bf28f798a146d73792121129962fdf335bb3c49a4254" dependencies = [ - "bitflags", + "bitflags 1.3.2", "core-foundation", "core-foundation-sys", "libc", @@ -4337,9 +4300,9 @@ dependencies = [ [[package]] name = "string_enum" -version = "0.3.3" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41491e23e7db79343236a6ced96325ff132eb09e29ac4c5b8132b9c55aaaae89" +checksum = "0090512bdfee4b56d82480d66c0fd8a6f53f0fe0f97e075e949b252acdd482e0" dependencies = [ "pmutil", "proc-macro2 1.0.56", @@ -4362,9 +4325,9 @@ checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" [[package]] name = "swc_atoms" -version = "0.4.39" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ebef84c2948cd0d1ba25acbf1b4bd9d80ab6f057efdbe35d8449b8d54699401" +checksum = "593c2f3e4cea60ddc4179ed731cabebe7eacec209d9e76a3bbcff4b2b020e3f5" dependencies = [ "once_cell", "rustc-hash", @@ -4376,9 +4339,9 @@ dependencies = [ [[package]] name = "swc_bundler" -version = "0.208.4" +version = "0.213.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5adaebcfcb3ebc1b4d6418838250bb12f257bab9277fa2b2c61bb3324152c78f" +checksum = "6153a93eeb264274dfdf6aff3d73fdd098a5b9ef85f85241bdbd8e4149afdcb7" dependencies = [ "ahash", "anyhow", @@ -4407,9 +4370,9 @@ dependencies = [ [[package]] name = "swc_common" -version = "0.29.37" +version = "0.31.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5005cd73617e18592faa31298225b26f1c407b84a681d67efb735c3d3458e101" +checksum = "2b557014d62318e08070c2a3d5eb0278ff73749dd69db53c39a4de4bcd301d6a" dependencies = [ "ahash", "ast_node", @@ -4435,9 +4398,9 @@ dependencies = [ [[package]] name = "swc_config" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4de36224eb9498fccd4e68971f0b83326ccf8592c2d424f257f3a1c76b2b211" +checksum = "89c8fc2c12bb1634c7c32fc3c9b6b963ad8f034cc62c4ecddcf215dc4f6f959d" dependencies = [ "indexmap", "serde", @@ -4447,9 +4410,9 @@ dependencies = [ [[package]] name = "swc_config_macro" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb64bc03d90fd5c90d6ab917bb2b1d7fbd31957df39e31ea24a3f554b4372251" +checksum = "7dadb9998d4f5fc36ef558ed5a092579441579ee8c6fcce84a5228cca9df4004" dependencies = [ "pmutil", "proc-macro2 1.0.56", @@ -4460,11 +4423,11 @@ dependencies = [ [[package]] name = "swc_ecma_ast" -version = "0.100.1" +version = "0.103.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dbfdbe05dde274473a6030dcf5e52e579516aea761d25d7a8d128f2ab597f09" +checksum = "5206233430a6763e2759da76cfc596a64250793f70cd94cace1f82fdcc4d702c" dependencies = [ - "bitflags", + "bitflags 2.1.0", "is-macro", "num-bigint", "scoped-tls", @@ -4477,9 +4440,9 @@ dependencies = [ [[package]] name = "swc_ecma_codegen" -version = "0.135.2" +version = "0.138.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78d196e6979af0cbb91084361ca006db292a6374f75ec04cbb55306051cc4f50" +checksum = "cf45c899625d5132f2993a464a79f2ec7c79854b74fd3c55d1408b76d7d7750c" dependencies = [ "memchr", "num-bigint", @@ -4496,9 +4459,9 @@ dependencies = [ [[package]] name = "swc_ecma_codegen_macros" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0159c99f81f52e48fe692ef7af1b0990b45d3006b14c6629be0b1ffee1b23aea" +checksum = "bf4ee0caee1018808d94ecd09490cb7affd3d504b19aa11c49238f5fc4b54901" dependencies = [ "pmutil", "proc-macro2 1.0.56", @@ -4509,9 +4472,9 @@ dependencies = [ [[package]] name = "swc_ecma_dep_graph" -version = "0.102.2" +version = "0.105.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "188196862dfb9bbf12f5fcf1f0397c0b70852144f666d406f09951ddcf0a73e0" +checksum = "92813e2f77cdf4ad870f0474eee6574f4aba10504dd3730e694d03684a7a68ab" dependencies = [ "swc_atoms", "swc_common", @@ -4521,9 +4484,9 @@ dependencies = [ [[package]] name = "swc_ecma_loader" -version = "0.41.39" +version = "0.43.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "681c1fbb762c82700a5bd23dc39bad892a287ea9fb2121cf56e77f1ddc89afeb" +checksum = "f1d985c6e7111fef3c0103b0414db0d792cb04b492601c94ccae2d494ffdf764" dependencies = [ "ahash", "anyhow", @@ -4535,12 +4498,11 @@ dependencies = [ [[package]] name = "swc_ecma_parser" -version = "0.130.2" +version = "0.133.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "042435aaeb71c4416cde440323ac9fa2c24121c2ec150f0cb79999c2e6ceffaa" +checksum = "8ce724a8fdc90548d882dec3b0288c0698059ce12a59bbfdeea0384f3d52f009" dependencies = [ "either", - "enum_kind", "lexical", "num-bigint", "serde", @@ -4556,12 +4518,13 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_base" -version = "0.122.3" +version = "0.126.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd4141092b17cd85eefc224b035b717e03c910b9fd58e4e637ffd05236d7e13b" +checksum = "3c4236f8b9bea9d3d43cacab34b6e3c925c3f12585382b8f661cb994b987b688" dependencies = [ "better_scoped_tls", - "bitflags", + "bitflags 2.1.0", + "indexmap", "once_cell", "phf", "rustc-hash", @@ -4578,9 +4541,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_classes" -version = "0.111.3" +version = "0.115.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5022c592f0ae17f4dc42031e1c4c60b7e6d2d8d1c2428b986759a92ea853801" +checksum = "bd5b13763feba98586887a92801603c413897805c70ed82e49e4acc1f90683c2" dependencies = [ "swc_atoms", "swc_common", @@ -4592,9 +4555,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_macros" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebf907935ec5492256b523ae7935a824d9fdc0368dcadc41375bad0dca91cd8b" +checksum = "984d5ac69b681fc5438f9abf82b0fda34fe04e119bc75f8213b7e01128c7c9a2" dependencies = [ "pmutil", "proc-macro2 1.0.56", @@ -4605,9 +4568,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_optimization" -version = "0.181.4" +version = "0.186.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "584b8d5b1ea8d174453eeff6abb66ed2e58cbd67b6e83a4d4b8154b463ef4dd3" +checksum = "456966f04224d2125551e0e35c164abe45183cbdd5238753294343814be102d3" dependencies = [ "ahash", "dashmap", @@ -4630,11 +4593,12 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_proposal" -version = "0.156.4" +version = "0.160.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4015c3ab090f27eee0834d45bdcf9666dc6329ed06845d1882cdfe6f4826fca" +checksum = "d21de731e3ff1ea451ac8c377a7130ebf6dbf6ffd18e744c15f86e685e0abd9a" dependencies = [ "either", + "rustc-hash", "serde", "smallvec", "swc_atoms", @@ -4649,16 +4613,15 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_react" -version = "0.167.4" +version = "0.172.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db1c7801b1d7741ab335441dd301ddcc4183fb250d5e6efaab33b03def268c06" +checksum = "a0df18263e6c0804a1a08abd29e87af763dce1bec4b500497a0b62c22df07b2d" dependencies = [ "ahash", "base64 0.13.1", "dashmap", "indexmap", "once_cell", - "regex", "serde", "sha-1 0.10.0", "string_enum", @@ -4675,9 +4638,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_typescript" -version = "0.171.4" +version = "0.176.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "142e8fb5ebe870bc51b3a95c0214af9112d3475b7cd5be4f13b87f3be664841a" +checksum = "d1a3f356bc2b902c13fc1e39bb66c10f350c46bfe93bae5c05402863d94bd307" dependencies = [ "serde", "swc_atoms", @@ -4691,9 +4654,9 @@ dependencies = [ [[package]] name = "swc_ecma_utils" -version = "0.113.3" +version = "0.116.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c44885603c09926118708f4352e04242c2482bc16eb51ad7beb8ad4cf5f7bb6" +checksum = "b462a1b6fc788ee956479adcbb05c282cb142a66a3b016b571fff0538a381196" dependencies = [ "indexmap", "num_cpus", @@ -4709,9 +4672,9 @@ dependencies = [ [[package]] name = "swc_ecma_visit" -version = "0.86.1" +version = "0.89.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "147cf9137da6fe2704a5defd29a1cde849961978f8c92911e6790d50df475fef" +checksum = "ecb23a4a1d77997f54e9b3a4e68d1441e5e8a25ad1a476bbb3b5a620d6562a86" dependencies = [ "num-bigint", "swc_atoms", @@ -4735,21 +4698,21 @@ dependencies = [ [[package]] name = "swc_fast_graph" -version = "0.17.38" +version = "0.19.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3a720ad8028d6c6e992039c862ed7318d143dee3994929793f59067fd69600b" +checksum = "992a92e087f7b2dc9aa626a6bee26530abbffba3572adf3894ccb55d2480f596" dependencies = [ - "ahash", "indexmap", "petgraph", + "rustc-hash", "swc_common", ] [[package]] name = "swc_graph_analyzer" -version = "0.18.41" +version = "0.20.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b25ac475500b0776f1bb82da02eff867819b3c653130023ea957cbd1e91befa8" +checksum = "f9e02ee852ffd7eb1ee42c081b615c2fb40a2876c4631637486207f493d806c6" dependencies = [ "ahash", "auto_impl", @@ -4760,9 +4723,9 @@ dependencies = [ [[package]] name = "swc_macros_common" -version = "0.3.6" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4be988307882648d9bc7c71a6a73322b7520ef0211e920489a98f8391d8caa2" +checksum = "3e582c3e3c2269238524923781df5be49e011dbe29cf7683a2215d600a562ea6" dependencies = [ "pmutil", "proc-macro2 1.0.56", @@ -4772,9 +4735,9 @@ dependencies = [ [[package]] name = "swc_visit" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "470a1963cf182fdcbbac46e3a7fd2caf7329da0e568d3668202da9501c880e16" +checksum = "d1d5999f23421c8e21a0f2bc53a0b9e8244f3b421de89471561af2fbe40b9cca" dependencies = [ "either", "swc_visit_macros", @@ -4782,9 +4745,9 @@ dependencies = [ [[package]] name = "swc_visit_macros" -version = "0.5.5" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6098b717cfd4c85f5cddec734af191dbce461c39975ed567c32ac6d0c6d61a6d" +checksum = "ebeed7eb0f545f48ad30f5aab314e5208b735bcea1d1464f26e20f06db904989" dependencies = [ "Inflector", "pmutil", @@ -5566,7 +5529,7 @@ version = "0.68.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "81c69410b7435f1b74e82e243ba906d71e8b9bb350828291418b9311dbd77222" dependencies = [ - "bitflags", + "bitflags 1.3.2", "fslock", "lazy_static", "which", diff --git a/Cargo.toml b/Cargo.toml index cc693c5e61..de656e81eb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -42,7 +42,7 @@ repository = "https://github.com/denoland/deno" [workspace.dependencies] v8 = { version = "0.68.0", default-features = false } -deno_ast = { version = "0.25.0", features = ["transpiling"] } +deno_ast = { version = "0.26.0", features = ["transpiling"] } deno_core = { version = "0.181.0", path = "./core" } deno_ops = { version = "0.59.0", path = "./ops" } diff --git a/cli/Cargo.toml b/cli/Cargo.toml index a792f7a3a8..64ce5fce7c 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -42,10 +42,10 @@ winres.workspace = true [dependencies] deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "dep_graph", "module_specifier", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] } deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] } -deno_doc = "0.61.0" -deno_emit = "0.19.0" -deno_graph = "=0.47.1" -deno_lint = { version = "0.43.0", features = ["docs"] } +deno_doc = "0.62.0" +deno_emit = "0.20.0" +deno_graph = "=0.48.0" +deno_lint = { version = "0.44.0", features = ["docs"] } deno_lockfile.workspace = true deno_npm = "0.3.0" deno_runtime = { workspace = true, features = ["dont_create_runtime_snapshot", "include_js_files_for_snapshotting"] } @@ -67,10 +67,10 @@ data-url.workspace = true dissimilar = "=1.0.4" dprint-plugin-json = "=0.17.0" dprint-plugin-markdown = "=0.15.2" -dprint-plugin-typescript = "=0.84.0" +dprint-plugin-typescript = "=0.84.2" encoding_rs.workspace = true env_logger = "=0.9.0" -eszip = "=0.40.0" +eszip = "=0.41.0" fancy-regex = "=0.10.0" flate2.workspace = true fs3.workspace = true diff --git a/cli/tests/testdata/bundle/decorators/ts_decorators.out b/cli/tests/testdata/bundle/decorators/ts_decorators.out index 2f503fdd06..e988aadd38 100644 --- a/cli/tests/testdata/bundle/decorators/ts_decorators.out +++ b/cli/tests/testdata/bundle/decorators/ts_decorators.out @@ -3,12 +3,12 @@ // deno-lint-ignore-file // This code was bundled using `deno bundle` and it's not recommended to edit it manually -var __decorate = this && this.__decorate || function(decorators, target, key, desc) { +function _ts_decorate(decorators, target, key, desc) { var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); else for(var i = decorators.length - 1; i >= 0; i--)if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; return c > 3 && r && Object.defineProperty(target, key, r), r; -}; +} function a() { console.log("a(): evaluated"); return (_target, _propertyKey, _descriptor)=>{ @@ -20,15 +20,15 @@ class B { console.log("method"); } } -__decorate([ +_ts_decorate([ a() ], B.prototype, "method", null); -var __decorate1 = this && this.__decorate || function(decorators, target, key, desc) { +function _ts_decorate1(decorators, target, key, desc) { var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); else for(var i = decorators.length - 1; i >= 0; i--)if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; return c > 3 && r && Object.defineProperty(target, key, r), r; -}; +} function Decorator() { return function(target, propertyKey, descriptor) { const originalFn = descriptor.value; @@ -41,7 +41,7 @@ function Decorator() { class SomeClass { async test() {} } -__decorate1([ +_ts_decorate1([ Decorator() ], SomeClass.prototype, "test", null); new SomeClass().test(); From 779d379c68d1489cc01f6a2bfbcf677e08ca6d40 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Sat, 22 Apr 2023 02:36:52 +0200 Subject: [PATCH 012/320] chore: upgrade rusty_v8 to 0.69.0 (#18796) --- .github/workflows/ci.generate.ts | 4 ++-- .github/workflows/ci.yml | 6 +++--- Cargo.lock | 6 +++--- Cargo.toml | 2 +- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/ci.generate.ts b/.github/workflows/ci.generate.ts index 241b38ef99..b59659e624 100755 --- a/.github/workflows/ci.generate.ts +++ b/.github/workflows/ci.generate.ts @@ -17,7 +17,7 @@ const Runners = (() => { })(); // bump the number at the start when you want to purge the cache const prCacheKeyPrefix = - "20-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-"; + "21-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-"; const installPkgsCommand = "sudo apt-get install --no-install-recommends debootstrap clang-15 lld-15"; @@ -476,7 +476,7 @@ const ci = { "~/.cargo/git/db", ].join("\n"), key: - "20-cargo-home-${{ matrix.os }}-${{ hashFiles('Cargo.lock') }}", + "21-cargo-home-${{ matrix.os }}-${{ hashFiles('Cargo.lock') }}", }, }, { diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c7144164b6..501feea016 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -290,7 +290,7 @@ jobs: ~/.cargo/registry/index ~/.cargo/registry/cache ~/.cargo/git/db - key: '20-cargo-home-${{ matrix.os }}-${{ hashFiles(''Cargo.lock'') }}' + key: '21-cargo-home-${{ matrix.os }}-${{ hashFiles(''Cargo.lock'') }}' if: '!(github.event_name == ''pull_request'' && matrix.skip_pr)' - name: Restore cache build output (PR) uses: actions/cache/restore@v3 @@ -302,7 +302,7 @@ jobs: !./target/*/*.zip !./target/*/*.tar.gz key: never_saved - restore-keys: '20-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-' + restore-keys: '21-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-' - name: Apply and update mtime cache if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (!startsWith(github.ref, ''refs/tags/''))' uses: ./.github/mtime_cache @@ -578,7 +578,7 @@ jobs: !./target/*/gn_out !./target/*/*.zip !./target/*/*.tar.gz - key: '20-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' + key: '21-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' publish-canary: name: publish canary runs-on: ubuntu-22.04 diff --git a/Cargo.lock b/Cargo.lock index d18dbc3638..3edd36c28c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5525,13 +5525,13 @@ dependencies = [ [[package]] name = "v8" -version = "0.68.0" +version = "0.69.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81c69410b7435f1b74e82e243ba906d71e8b9bb350828291418b9311dbd77222" +checksum = "687e14c2535fe5749098994fd67773962050abe64bcc6a8c92dbf7221b746f49" dependencies = [ "bitflags 1.3.2", "fslock", - "lazy_static", + "once_cell", "which", ] diff --git a/Cargo.toml b/Cargo.toml index de656e81eb..45f355cdfe 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -41,7 +41,7 @@ license = "MIT" repository = "https://github.com/denoland/deno" [workspace.dependencies] -v8 = { version = "0.68.0", default-features = false } +v8 = { version = "0.69.0", default-features = false } deno_ast = { version = "0.26.0", features = ["transpiling"] } deno_core = { version = "0.181.0", path = "./core" } From a615eb3b56545960ec9684991442dd34a8b2abfc Mon Sep 17 00:00:00 2001 From: David Sherret Date: Fri, 21 Apr 2023 21:02:46 -0400 Subject: [PATCH 013/320] refactor(node): move most of cli/node to ext/node (#18797) This is just a straight refactor and I didn't do any cleanup in ext/node. After this PR we can start to clean it up and make things private that don't need to be public anymore. --- Cargo.lock | 3 + Cargo.toml | 3 + cli/Cargo.toml | 4 +- cli/graph_util.rs | 9 +- cli/lsp/diagnostics.rs | 8 +- cli/lsp/documents.rs | 10 +- cli/lsp/language_server.rs | 22 ++- cli/module_loader.rs | 26 ++-- cli/{node/analyze.rs => node.rs} | 28 ++++ cli/npm/mod.rs | 3 +- cli/npm/resolution.rs | 2 +- cli/npm/resolvers/mod.rs | 91 +++++------- cli/ops/mod.rs | 8 +- cli/proc_state.rs | 17 +-- cli/tools/check.rs | 6 +- cli/tools/info.rs | 10 +- cli/tools/task.rs | 8 +- cli/tsc/mod.rs | 16 +- cli/worker.rs | 23 +-- ext/node/Cargo.toml | 3 + ext/node/analyze.rs | 16 +- ext/node/lib.rs | 107 +++++++++++++- ext/node/ops.rs | 26 ++-- ext/node/package_json.rs | 4 +- ext/node/polyfill.rs | 20 ++- ext/node/resolution.rs | 20 +-- cli/node/mod.rs => ext/node/resolver.rs | 185 +++++++++--------------- runtime/web_worker.rs | 4 +- runtime/worker.rs | 4 +- 29 files changed, 387 insertions(+), 299 deletions(-) rename cli/{node/analyze.rs => node.rs} (82%) rename cli/node/mod.rs => ext/node/resolver.rs (77%) diff --git a/Cargo.lock b/Cargo.lock index 3edd36c28c..ceeb2cf2a0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1132,6 +1132,9 @@ dependencies = [ "cbc", "data-encoding", "deno_core", + "deno_media_type", + "deno_npm", + "deno_semver", "digest 0.10.6", "dsa", "ecb", diff --git a/Cargo.toml b/Cargo.toml index 45f355cdfe..6b49de2311 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -52,6 +52,9 @@ napi_sym = { version = "0.29.0", path = "./cli/napi/sym" } deno_bench_util = { version = "0.93.0", path = "./bench_util" } test_util = { path = "./test_util" } deno_lockfile = "0.13.0" +deno_media_type = { version = "0.1.0", features = ["module_specifier"] } +deno_npm = "0.3.0" +deno_semver = "0.2.1" # exts deno_broadcast_channel = { version = "0.93.0", path = "./ext/broadcast_channel" } diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 64ce5fce7c..96fe458ae2 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -47,9 +47,9 @@ deno_emit = "0.20.0" deno_graph = "=0.48.0" deno_lint = { version = "0.44.0", features = ["docs"] } deno_lockfile.workspace = true -deno_npm = "0.3.0" +deno_npm.workspace = true deno_runtime = { workspace = true, features = ["dont_create_runtime_snapshot", "include_js_files_for_snapshotting"] } -deno_semver = "0.2.1" +deno_semver.workspace = true deno_task_shell = "0.11.0" napi_sym.workspace = true diff --git a/cli/graph_util.rs b/cli/graph_util.rs index dacd3be174..d5bc6ac0d5 100644 --- a/cli/graph_util.rs +++ b/cli/graph_util.rs @@ -9,7 +9,7 @@ use crate::cache::ParsedSourceCache; use crate::colors; use crate::errors::get_error_class_name; use crate::file_fetcher::FileFetcher; -use crate::npm::NpmPackageResolver; +use crate::npm::CliNpmResolver; use crate::resolver::CliGraphResolver; use crate::tools::check; use crate::tools::check::TypeChecker; @@ -29,6 +29,7 @@ use deno_graph::ModuleGraph; use deno_graph::ModuleGraphError; use deno_graph::ResolutionError; use deno_graph::SpecifierError; +use deno_runtime::deno_node; use deno_runtime::permissions::PermissionsContainer; use import_map::ImportMapError; use std::collections::HashMap; @@ -165,7 +166,7 @@ pub fn graph_lock_or_exit(graph: &ModuleGraph, lockfile: &mut Lockfile) { pub struct ModuleGraphBuilder { options: Arc, resolver: Arc, - npm_resolver: Arc, + npm_resolver: Arc, parsed_source_cache: Arc, lockfile: Option>>, emit_cache: cache::EmitCache, @@ -178,7 +179,7 @@ impl ModuleGraphBuilder { pub fn new( options: Arc, resolver: Arc, - npm_resolver: Arc, + npm_resolver: Arc, parsed_source_cache: Arc, lockfile: Option>>, emit_cache: cache::EmitCache, @@ -377,7 +378,7 @@ pub fn get_resolution_error_bare_node_specifier( error: &ResolutionError, ) -> Option<&str> { get_resolution_error_bare_specifier(error).filter(|specifier| { - crate::node::resolve_builtin_node_module(specifier).is_ok() + deno_node::resolve_builtin_node_module(specifier).is_ok() }) } diff --git a/cli/lsp/diagnostics.rs b/cli/lsp/diagnostics.rs index 8c2126561b..965075a2d7 100644 --- a/cli/lsp/diagnostics.rs +++ b/cli/lsp/diagnostics.rs @@ -16,7 +16,6 @@ use super::tsc::TsServer; use crate::args::LintOptions; use crate::graph_util; use crate::graph_util::enhanced_resolution_error_message; -use crate::node; use crate::tools::lint::get_configured_rules; use deno_ast::MediaType; @@ -31,6 +30,7 @@ use deno_graph::Resolution; use deno_graph::ResolutionError; use deno_graph::SpecifierError; use deno_lint::rules::LintRule; +use deno_runtime::deno_node; use deno_runtime::tokio_util::create_basic_runtime; use deno_semver::npm::NpmPackageReqReference; use log::error; @@ -469,8 +469,8 @@ async fn generate_lint_diagnostics( } // ignore any npm package files - if let Some(npm_resolver) = &snapshot.maybe_npm_resolver { - if npm_resolver.in_npm_package(document.specifier()) { + if let Some(node_resolver) = &snapshot.maybe_node_resolver { + if node_resolver.in_npm_package(document.specifier()) { continue; } } @@ -926,7 +926,7 @@ fn diagnose_resolution( } } else if let Some(module_name) = specifier.as_str().strip_prefix("node:") { - if node::resolve_builtin_node_module(module_name).is_err() { + if deno_node::resolve_builtin_node_module(module_name).is_err() { diagnostics.push( DenoDiagnostic::InvalidNodeSpecifier(specifier.clone()) .to_lsp_diagnostic(&range), diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs index 7b206406c1..fd40bb95f2 100644 --- a/cli/lsp/documents.rs +++ b/cli/lsp/documents.rs @@ -18,7 +18,6 @@ use crate::file_fetcher::map_content_type; use crate::file_fetcher::SUPPORTED_SCHEMES; use crate::lsp::logging::lsp_warn; use crate::node::CliNodeResolver; -use crate::node::NodeResolution; use crate::npm::CliNpmRegistryApi; use crate::npm::NpmResolution; use crate::npm::PackageJsonDepsInstaller; @@ -37,8 +36,11 @@ use deno_core::url; use deno_core::ModuleSpecifier; use deno_graph::GraphImport; use deno_graph::Resolution; +use deno_runtime::deno_node; +use deno_runtime::deno_node::NodeResolution; use deno_runtime::deno_node::NodeResolutionMode; use deno_runtime::deno_node::PackageJson; +use deno_runtime::deno_node::RealFs; use deno_runtime::permissions::PermissionsContainer; use deno_semver::npm::NpmPackageReq; use deno_semver::npm::NpmPackageReqReference; @@ -1069,7 +1071,7 @@ impl Documents { // we're in an npm package, so use node resolution results.push(Some(NodeResolution::into_specifier_and_media_type( node_resolver - .resolve( + .resolve::( &specifier, referrer, NodeResolutionMode::Types, @@ -1082,7 +1084,7 @@ impl Documents { } } if let Some(module_name) = specifier.strip_prefix("node:") { - if crate::node::resolve_builtin_node_module(module_name).is_ok() { + if deno_node::resolve_builtin_node_module(module_name).is_ok() { // return itself for node: specifiers because during type checking // we resolve to the ambient modules in the @types/node package // rather than deno_std/node @@ -1457,7 +1459,7 @@ fn node_resolve_npm_req_ref( maybe_node_resolver.map(|node_resolver| { NodeResolution::into_specifier_and_media_type( node_resolver - .resolve_npm_req_reference( + .resolve_npm_req_reference::( &npm_req_ref, NodeResolutionMode::Types, &mut PermissionsContainer::allow_all(), diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index 239ff8a6ef..f1b9cb4347 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -9,6 +9,7 @@ use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::serde_json::Value; use deno_core::ModuleSpecifier; +use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_node::PackageJson; use deno_runtime::deno_web::BlobStore; use import_map::ImportMap; @@ -81,8 +82,8 @@ use crate::lsp::urls::LspUrlKind; use crate::node::CliNodeResolver; use crate::npm::create_npm_fs_resolver; use crate::npm::CliNpmRegistryApi; +use crate::npm::CliNpmResolver; use crate::npm::NpmCache; -use crate::npm::NpmPackageResolver; use crate::npm::NpmResolution; use crate::proc_state::ProcState; use crate::tools::fmt::format_file; @@ -103,7 +104,7 @@ pub struct StateSnapshot { pub documents: Documents, pub maybe_import_map: Option>, pub maybe_node_resolver: Option>, - pub maybe_npm_resolver: Option>, + pub maybe_npm_resolver: Option>, } #[derive(Debug)] @@ -153,7 +154,7 @@ pub struct Inner { /// Npm resolution that is stored in memory. npm_resolution: Arc, /// Resolver for npm packages. - npm_resolver: Arc, + npm_resolver: Arc, /// A collection of measurements which instrument that performance of the LSP. performance: Arc, /// A memoized version of fixable diagnostic codes retrieved from TypeScript. @@ -424,7 +425,7 @@ fn create_lsp_structs( ) -> ( Arc, Arc, - Arc, + Arc, Arc, ) { let registry_url = CliNpmRegistryApi::default_url(); @@ -457,11 +458,7 @@ fn create_lsp_structs( ( api, npm_cache, - Arc::new(NpmPackageResolver::new( - resolution.clone(), - fs_resolver, - None, - )), + Arc::new(CliNpmResolver::new(resolution.clone(), fs_resolver, None)), resolution, ) } @@ -703,19 +700,18 @@ impl Inner { self.npm_resolution.snapshot(), None, )); - let npm_resolver = Arc::new(NpmPackageResolver::new( + let npm_resolver = Arc::new(CliNpmResolver::new( npm_resolution.clone(), create_npm_fs_resolver( self.npm_cache.clone(), &ProgressBar::new(ProgressBarStyle::TextOnly), self.npm_api.base_url().clone(), - npm_resolution.clone(), + npm_resolution, None, ), None, )); - let node_resolver = - Arc::new(CliNodeResolver::new(npm_resolution, npm_resolver.clone())); + let node_resolver = Arc::new(NodeResolver::new(npm_resolver.clone())); Arc::new(StateSnapshot { assets: self.assets.snapshot(), cache_metadata: self.cache_metadata.clone(), diff --git a/cli/module_loader.rs b/cli/module_loader.rs index 07fad6ffc6..c4ef0ed7e9 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -11,10 +11,8 @@ use crate::graph_util::graph_valid_with_cli_options; use crate::graph_util::ModuleGraphBuilder; use crate::graph_util::ModuleGraphContainer; use crate::node; -use crate::node::CliCjsEsmCodeAnalyzer; +use crate::node::CliNodeCodeTranslator; use crate::node::CliNodeResolver; -use crate::node::NodeResolution; -use crate::npm::CliRequireNpmResolver; use crate::proc_state::CjsResolutionStore; use crate::proc_state::FileWatcherReporter; use crate::proc_state::ProcState; @@ -50,7 +48,8 @@ use deno_graph::JsonModule; use deno_graph::Module; use deno_graph::Resolution; use deno_lockfile::Lockfile; -use deno_runtime::deno_node::analyze::NodeCodeTranslator; +use deno_runtime::deno_node; +use deno_runtime::deno_node::NodeResolution; use deno_runtime::deno_node::NodeResolutionMode; use deno_runtime::deno_node::RealFs; use deno_runtime::permissions::PermissionsContainer; @@ -244,8 +243,7 @@ pub struct CliModuleLoader { emitter: Arc, graph_container: Arc, module_load_preparer: Arc, - node_code_translator: - Arc>, + node_code_translator: Arc, node_resolver: Arc, parsed_source_cache: Arc, resolver: Arc, @@ -430,7 +428,7 @@ impl CliModuleLoader { fn handle_node_resolve_result( &self, - result: Result, AnyError>, + result: Result, AnyError>, ) -> Result { let response = match result? { Some(response) => response, @@ -440,7 +438,7 @@ impl CliModuleLoader { // remember that this was a common js resolution self.cjs_resolutions.insert(specifier.clone()); } else if let NodeResolution::BuiltIn(specifier) = &response { - return node::resolve_builtin_node_module(specifier); + return deno_node::resolve_builtin_node_module(specifier); } Ok(response.into_url()) } @@ -468,7 +466,7 @@ impl ModuleLoader for CliModuleLoader { if self.node_resolver.in_npm_package(referrer) { // we're in an npm package, so use node resolution return self - .handle_node_resolve_result(self.node_resolver.resolve( + .handle_node_resolve_result(self.node_resolver.resolve::( specifier, referrer, NodeResolutionMode::Execution, @@ -494,7 +492,7 @@ impl ModuleLoader for CliModuleLoader { return match graph.get(specifier) { Some(Module::Npm(module)) => self .handle_node_resolve_result( - self.node_resolver.resolve_npm_reference( + self.node_resolver.resolve_npm_reference::( &module.nv_reference, NodeResolutionMode::Execution, &mut permissions, @@ -504,7 +502,7 @@ impl ModuleLoader for CliModuleLoader { format!("Could not resolve '{}'.", module.nv_reference) }), Some(Module::Node(module)) => { - node::resolve_builtin_node_module(&module.module_name) + deno_node::resolve_builtin_node_module(&module.module_name) } Some(Module::Esm(module)) => Ok(module.specifier.clone()), Some(Module::Json(module)) => Ok(module.specifier.clone()), @@ -526,7 +524,7 @@ impl ModuleLoader for CliModuleLoader { // Built-in Node modules if let Some(module_name) = specifier.strip_prefix("node:") { - return node::resolve_builtin_node_module(module_name); + return deno_node::resolve_builtin_node_module(module_name); } // FIXME(bartlomieju): this is a hacky way to provide compatibility with REPL @@ -556,9 +554,9 @@ impl ModuleLoader for CliModuleLoader { { return self .handle_node_resolve_result( - self.node_resolver.resolve_npm_req_reference( + self.node_resolver.resolve_npm_req_reference::( &reference, - deno_runtime::deno_node::NodeResolutionMode::Execution, + NodeResolutionMode::Execution, &mut permissions, ), ) diff --git a/cli/node/analyze.rs b/cli/node.rs similarity index 82% rename from cli/node/analyze.rs rename to cli/node.rs index 27818639ef..3ec9500e88 100644 --- a/cli/node/analyze.rs +++ b/cli/node.rs @@ -1,6 +1,7 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. use std::collections::HashSet; +use std::sync::Arc; use deno_ast::swc::common::SyntaxContext; use deno_ast::view::Node; @@ -13,8 +14,35 @@ use deno_ast::SourceRanged; use deno_core::error::AnyError; use deno_runtime::deno_node::analyze::CjsAnalysis as ExtNodeCjsAnalysis; use deno_runtime::deno_node::analyze::CjsEsmCodeAnalyzer; +use deno_runtime::deno_node::analyze::NodeCodeTranslator; +use deno_runtime::deno_node::NodeResolver; use crate::cache::NodeAnalysisCache; +use crate::npm::CliNpmResolver; +use crate::util::fs::canonicalize_path_maybe_not_exists; + +pub type CliNodeCodeTranslator = + NodeCodeTranslator>; +pub type CliNodeResolver = NodeResolver>; + +/// Resolves a specifier that is pointing into a node_modules folder. +/// +/// Note: This should be called whenever getting the specifier from +/// a Module::External(module) reference because that module might +/// not be fully resolved at the time deno_graph is analyzing it +/// because the node_modules folder might not exist at that time. +pub fn resolve_specifier_into_node_modules( + specifier: &ModuleSpecifier, +) -> ModuleSpecifier { + specifier + .to_file_path() + .ok() + // this path might not exist at the time the graph is being created + // because the node_modules folder might not yet exist + .and_then(|path| canonicalize_path_maybe_not_exists(&path).ok()) + .and_then(|path| ModuleSpecifier::from_file_path(path).ok()) + .unwrap_or_else(|| specifier.clone()) +} pub struct CliCjsEsmCodeAnalyzer { cache: NodeAnalysisCache, diff --git a/cli/npm/mod.rs b/cli/npm/mod.rs index 8f6ac77bc6..488f8eae6a 100644 --- a/cli/npm/mod.rs +++ b/cli/npm/mod.rs @@ -13,6 +13,5 @@ pub use installer::PackageJsonDepsInstaller; pub use registry::CliNpmRegistryApi; pub use resolution::NpmResolution; pub use resolvers::create_npm_fs_resolver; -pub use resolvers::CliRequireNpmResolver; -pub use resolvers::NpmPackageResolver; +pub use resolvers::CliNpmResolver; pub use resolvers::NpmProcessState; diff --git a/cli/npm/resolution.rs b/cli/npm/resolution.rs index 26fc356ffb..1b191b2455 100644 --- a/cli/npm/resolution.rs +++ b/cli/npm/resolution.rs @@ -154,7 +154,7 @@ impl NpmResolution { Ok(()) } - pub fn pkg_req_ref_to_nv_ref( + pub fn resolve_nv_ref_from_pkg_req_ref( &self, req_ref: &NpmPackageReqReference, ) -> Result { diff --git a/cli/npm/resolvers/mod.rs b/cli/npm/resolvers/mod.rs index f693d3d23a..8b871beaf7 100644 --- a/cli/npm/resolvers/mod.rs +++ b/cli/npm/resolvers/mod.rs @@ -20,10 +20,12 @@ use deno_npm::resolution::SerializedNpmResolutionSnapshot; use deno_npm::NpmPackageId; use deno_runtime::deno_node::NodePermissions; use deno_runtime::deno_node::NodeResolutionMode; +use deno_runtime::deno_node::NpmResolver; use deno_runtime::deno_node::PathClean; -use deno_runtime::deno_node::RequireNpmResolver; use deno_semver::npm::NpmPackageNv; +use deno_semver::npm::NpmPackageNvReference; use deno_semver::npm::NpmPackageReq; +use deno_semver::npm::NpmPackageReqReference; use global::GlobalNpmPackageResolver; use serde::Deserialize; use serde::Serialize; @@ -45,13 +47,13 @@ pub struct NpmProcessState { } /// Brings together the npm resolution with the file system. -pub struct NpmPackageResolver { +pub struct CliNpmResolver { fs_resolver: Arc, resolution: Arc, maybe_lockfile: Option>>, } -impl std::fmt::Debug for NpmPackageResolver { +impl std::fmt::Debug for CliNpmResolver { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("NpmPackageResolver") .field("fs_resolver", &"") @@ -61,7 +63,7 @@ impl std::fmt::Debug for NpmPackageResolver { } } -impl NpmPackageResolver { +impl CliNpmResolver { pub fn new( resolution: Arc, fs_resolver: Arc, @@ -85,15 +87,6 @@ impl NpmPackageResolver { self.resolution.resolve_pkg_id_from_pkg_req(req) } - /// Resolves an npm package folder path from a Deno module. - pub fn resolve_package_folder_from_deno_module( - &self, - pkg_nv: &NpmPackageNv, - ) -> Result { - let pkg_id = self.resolution.resolve_pkg_id_from_deno_module(pkg_nv)?; - self.resolve_pkg_folder_from_deno_module_at_pkg_id(&pkg_id) - } - fn resolve_pkg_folder_from_deno_module_at_pkg_id( &self, pkg_id: &NpmPackageId, @@ -108,20 +101,6 @@ impl NpmPackageResolver { Ok(path) } - /// Resolves an npm package folder path from an npm package referrer. - pub fn resolve_package_folder_from_package( - &self, - name: &str, - referrer: &ModuleSpecifier, - mode: NodeResolutionMode, - ) -> Result { - let path = self - .fs_resolver - .resolve_package_folder_from_package(name, referrer, mode)?; - log::debug!("Resolved {} from {} to {}", name, referrer, path.display()); - Ok(path) - } - /// Resolve the root folder of the package the provided specifier is in. /// /// This will error when the provided specifier is not in an npm package. @@ -228,26 +207,20 @@ impl NpmPackageResolver { self.fs_resolver.cache_packages().await?; Ok(()) } - - pub fn as_require_npm_resolver(self: &Arc) -> CliRequireNpmResolver { - CliRequireNpmResolver(self.clone()) - } } -#[derive(Debug)] -pub struct CliRequireNpmResolver(Arc); - -impl RequireNpmResolver for CliRequireNpmResolver { +impl NpmResolver for CliNpmResolver { fn resolve_package_folder_from_package( &self, - specifier: &str, - referrer: &std::path::Path, + name: &str, + referrer: &ModuleSpecifier, mode: NodeResolutionMode, ) -> Result { - let referrer = path_to_specifier(referrer)?; - self - .0 - .resolve_package_folder_from_package(specifier, &referrer, mode) + let path = self + .fs_resolver + .resolve_package_folder_from_package(name, referrer, mode)?; + log::debug!("Resolved {} from {} to {}", name, referrer, path.display()); + Ok(path) } fn resolve_package_folder_from_path( @@ -255,18 +228,34 @@ impl RequireNpmResolver for CliRequireNpmResolver { path: &Path, ) -> Result { let specifier = path_to_specifier(path)?; - self.0.resolve_package_folder_from_specifier(&specifier) + self.resolve_package_folder_from_specifier(&specifier) } - fn in_npm_package(&self, path: &Path) -> bool { - let specifier = - match ModuleSpecifier::from_file_path(path.to_path_buf().clean()) { - Ok(p) => p, - Err(_) => return false, - }; + fn resolve_package_folder_from_deno_module( + &self, + pkg_nv: &NpmPackageNv, + ) -> Result { + let pkg_id = self.resolution.resolve_pkg_id_from_deno_module(pkg_nv)?; + self.resolve_pkg_folder_from_deno_module_at_pkg_id(&pkg_id) + } + + fn resolve_pkg_id_from_pkg_req( + &self, + req: &NpmPackageReq, + ) -> Result { + self.resolution.resolve_pkg_id_from_pkg_req(req) + } + + fn resolve_nv_ref_from_pkg_req_ref( + &self, + req_ref: &NpmPackageReqReference, + ) -> Result { + self.resolution.resolve_nv_ref_from_pkg_req_ref(req_ref) + } + + fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool { self - .0 - .resolve_package_folder_from_specifier(&specifier) + .resolve_package_folder_from_specifier(specifier) .is_ok() } @@ -275,7 +264,7 @@ impl RequireNpmResolver for CliRequireNpmResolver { permissions: &mut dyn NodePermissions, path: &Path, ) -> Result<(), AnyError> { - self.0.fs_resolver.ensure_read_permission(permissions, path) + self.fs_resolver.ensure_read_permission(permissions, path) } } diff --git a/cli/ops/mod.rs b/cli/ops/mod.rs index 9adc944cee..d39f19270a 100644 --- a/cli/ops/mod.rs +++ b/cli/ops/mod.rs @@ -2,7 +2,7 @@ use std::sync::Arc; -use crate::npm::NpmPackageResolver; +use crate::npm::CliNpmResolver; use deno_core::error::AnyError; use deno_core::op; use deno_core::Extension; @@ -11,14 +11,14 @@ use deno_core::OpState; pub mod bench; pub mod testing; -pub fn cli_exts(npm_resolver: Arc) -> Vec { +pub fn cli_exts(npm_resolver: Arc) -> Vec { vec![deno_cli::init_ops(npm_resolver)] } deno_core::extension!(deno_cli, ops = [op_npm_process_state], options = { - npm_resolver: Arc, + npm_resolver: Arc, }, state = |state, options| { state.put(options.npm_resolver); @@ -30,6 +30,6 @@ deno_core::extension!(deno_cli, #[op] fn op_npm_process_state(state: &mut OpState) -> Result { - let npm_resolver = state.borrow_mut::>(); + let npm_resolver = state.borrow_mut::>(); Ok(npm_resolver.get_npm_process_state()) } diff --git a/cli/proc_state.rs b/cli/proc_state.rs index bfe45bc861..6b7e9b1f28 100644 --- a/cli/proc_state.rs +++ b/cli/proc_state.rs @@ -18,12 +18,12 @@ use crate::graph_util::ModuleGraphContainer; use crate::http_util::HttpClient; use crate::module_loader::ModuleLoadPreparer; use crate::node::CliCjsEsmCodeAnalyzer; +use crate::node::CliNodeCodeTranslator; use crate::node::CliNodeResolver; use crate::npm::create_npm_fs_resolver; use crate::npm::CliNpmRegistryApi; -use crate::npm::CliRequireNpmResolver; +use crate::npm::CliNpmResolver; use crate::npm::NpmCache; -use crate::npm::NpmPackageResolver; use crate::npm::NpmResolution; use crate::npm::PackageJsonDepsInstaller; use crate::resolver::CliGraphResolver; @@ -39,6 +39,7 @@ use deno_core::SharedArrayBufferStore; use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel; use deno_runtime::deno_node::analyze::NodeCodeTranslator; +use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_tls::rustls::RootCertStore; use deno_runtime::deno_web::BlobStore; use deno_runtime::inspector_server::InspectorServer; @@ -77,12 +78,11 @@ pub struct Inner { maybe_file_watcher_reporter: Option, pub module_graph_builder: Arc, pub module_load_preparer: Arc, - pub node_code_translator: - Arc>, + pub node_code_translator: Arc, pub node_resolver: Arc, pub npm_api: Arc, pub npm_cache: Arc, - pub npm_resolver: Arc, + pub npm_resolver: Arc, pub npm_resolution: Arc, pub package_json_deps_installer: Arc, pub cjs_resolutions: Arc, @@ -252,7 +252,7 @@ impl ProcState { npm_resolution.clone(), cli_options.node_modules_dir_path(), ); - let npm_resolver = Arc::new(NpmPackageResolver::new( + let npm_resolver = Arc::new(CliNpmResolver::new( npm_resolution.clone(), npm_fs_resolver, lockfile.as_ref().cloned(), @@ -310,12 +310,9 @@ impl ProcState { let cjs_esm_analyzer = CliCjsEsmCodeAnalyzer::new(node_analysis_cache); let node_code_translator = Arc::new(NodeCodeTranslator::new( cjs_esm_analyzer, - npm_resolver.as_require_npm_resolver(), - )); - let node_resolver = Arc::new(CliNodeResolver::new( - npm_resolution.clone(), npm_resolver.clone(), )); + let node_resolver = Arc::new(NodeResolver::new(npm_resolver.clone())); let type_checker = Arc::new(TypeChecker::new( dir.clone(), caches.clone(), diff --git a/cli/tools/check.rs b/cli/tools/check.rs index c7f4042233..36bc25d6a8 100644 --- a/cli/tools/check.rs +++ b/cli/tools/check.rs @@ -22,7 +22,7 @@ use crate::cache::DenoDir; use crate::cache::FastInsecureHasher; use crate::cache::TypeCheckCache; use crate::node::CliNodeResolver; -use crate::npm::NpmPackageResolver; +use crate::npm::CliNpmResolver; use crate::tsc; use crate::version; @@ -43,7 +43,7 @@ pub struct TypeChecker { caches: Arc, cli_options: Arc, node_resolver: Arc, - npm_resolver: Arc, + npm_resolver: Arc, } impl TypeChecker { @@ -52,7 +52,7 @@ impl TypeChecker { caches: Arc, cli_options: Arc, node_resolver: Arc, - npm_resolver: Arc, + npm_resolver: Arc, ) -> Self { Self { deno_dir, diff --git a/cli/tools/info.rs b/cli/tools/info.rs index 69faa10fbc..a59f8a4c84 100644 --- a/cli/tools/info.rs +++ b/cli/tools/info.rs @@ -28,7 +28,7 @@ use crate::args::Flags; use crate::args::InfoFlags; use crate::display; use crate::graph_util::graph_lock_or_exit; -use crate::npm::NpmPackageResolver; +use crate::npm::CliNpmResolver; use crate::proc_state::ProcState; use crate::util::checksum; @@ -141,7 +141,7 @@ fn print_cache_info( fn add_npm_packages_to_json( json: &mut serde_json::Value, - npm_resolver: &NpmPackageResolver, + npm_resolver: &CliNpmResolver, ) { // ideally deno_graph could handle this, but for now we just modify the json here let snapshot = npm_resolver.snapshot(); @@ -318,7 +318,7 @@ struct NpmInfo { impl NpmInfo { pub fn build<'a>( graph: &'a ModuleGraph, - npm_resolver: &'a NpmPackageResolver, + npm_resolver: &'a CliNpmResolver, npm_snapshot: &'a NpmResolutionSnapshot, ) -> Self { let mut info = NpmInfo::default(); @@ -344,7 +344,7 @@ impl NpmInfo { fn fill_package_info<'a>( &mut self, package: &NpmResolutionPackage, - npm_resolver: &'a NpmPackageResolver, + npm_resolver: &'a CliNpmResolver, npm_snapshot: &'a NpmResolutionSnapshot, ) { self @@ -380,7 +380,7 @@ struct GraphDisplayContext<'a> { impl<'a> GraphDisplayContext<'a> { pub fn write( graph: &'a ModuleGraph, - npm_resolver: &'a NpmPackageResolver, + npm_resolver: &'a CliNpmResolver, writer: &mut TWrite, ) -> fmt::Result { let npm_snapshot = npm_resolver.snapshot(); diff --git a/cli/tools/task.rs b/cli/tools/task.rs index c64e2a77cd..898cdd8d90 100644 --- a/cli/tools/task.rs +++ b/cli/tools/task.rs @@ -5,7 +5,7 @@ use crate::args::Flags; use crate::args::TaskFlags; use crate::colors; use crate::node::CliNodeResolver; -use crate::npm::NpmPackageResolver; +use crate::npm::CliNpmResolver; use crate::proc_state::ProcState; use crate::util::fs::canonicalize_path; use deno_core::anyhow::bail; @@ -13,6 +13,7 @@ use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::futures; use deno_core::futures::future::LocalBoxFuture; +use deno_runtime::deno_node::RealFs; use deno_semver::npm::NpmPackageNv; use deno_task_shell::ExecuteResult; use deno_task_shell::ShellCommand; @@ -234,13 +235,14 @@ impl ShellCommand for NpmPackageBinCommand { } fn resolve_npm_commands( - npm_resolver: &NpmPackageResolver, + npm_resolver: &CliNpmResolver, node_resolver: &CliNodeResolver, ) -> Result>, AnyError> { let mut result = HashMap::new(); let snapshot = npm_resolver.snapshot(); for id in snapshot.top_level_packages() { - let bin_commands = node_resolver.resolve_binary_commands(&id.nv)?; + let bin_commands = + node_resolver.resolve_binary_commands::(&id.nv)?; for bin_command in bin_commands { result.insert( bin_command.to_string(), diff --git a/cli/tsc/mod.rs b/cli/tsc/mod.rs index 57a4a1be83..0d956b661f 100644 --- a/cli/tsc/mod.rs +++ b/cli/tsc/mod.rs @@ -5,7 +5,6 @@ use crate::args::TypeCheckMode; use crate::cache::FastInsecureHasher; use crate::node; use crate::node::CliNodeResolver; -use crate::node::NodeResolution; use crate::util::checksum; use crate::util::path::mapped_specifier_for_tsc; @@ -33,7 +32,10 @@ use deno_core::Snapshot; use deno_graph::Module; use deno_graph::ModuleGraph; use deno_graph::ResolutionResolved; +use deno_runtime::deno_node; +use deno_runtime::deno_node::NodeResolution; use deno_runtime::deno_node::NodeResolutionMode; +use deno_runtime::deno_node::RealFs; use deno_runtime::permissions::PermissionsContainer; use deno_semver::npm::NpmPackageReqReference; use lsp_types::Url; @@ -537,7 +539,7 @@ fn op_resolve( }; for specifier in args.specifiers { if let Some(module_name) = specifier.strip_prefix("node:") { - if crate::node::resolve_builtin_node_module(module_name).is_ok() { + if deno_node::resolve_builtin_node_module(module_name).is_ok() { // return itself for node: specifiers because during type checking // we resolve to the ambient modules in the @types/node package // rather than deno_std/node @@ -635,7 +637,7 @@ fn resolve_graph_specifier_types( } Some(Module::Npm(module)) => { if let Some(node_resolver) = &state.maybe_node_resolver { - let maybe_resolution = node_resolver.resolve_npm_reference( + let maybe_resolution = node_resolver.resolve_npm_reference::( &module.nv_reference, NodeResolutionMode::Types, &mut PermissionsContainer::allow_all(), @@ -653,7 +655,9 @@ fn resolve_graph_specifier_types( let specifier = node::resolve_specifier_into_node_modules(&module.specifier); NodeResolution::into_specifier_and_media_type( - node_resolver.url_to_node_resolution(specifier).ok(), + node_resolver + .url_to_node_resolution::(specifier) + .ok(), ) })) } @@ -674,7 +678,7 @@ fn resolve_non_graph_specifier_types( // we're in an npm package, so use node resolution Ok(Some(NodeResolution::into_specifier_and_media_type( node_resolver - .resolve( + .resolve::( specifier, referrer, NodeResolutionMode::Types, @@ -688,7 +692,7 @@ fn resolve_non_graph_specifier_types( // we don't need this special code here. // This could occur when resolving npm:@types/node when it is // injected and not part of the graph - let maybe_resolution = node_resolver.resolve_npm_req_reference( + let maybe_resolution = node_resolver.resolve_npm_req_reference::( &npm_ref, NodeResolutionMode::Types, &mut PermissionsContainer::allow_all(), diff --git a/cli/worker.rs b/cli/worker.rs index 7ee8fc8021..c73e4edbed 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -13,6 +13,8 @@ use deno_core::Extension; use deno_core::ModuleId; use deno_runtime::colors; use deno_runtime::deno_node; +use deno_runtime::deno_node::NodeResolution; +use deno_runtime::deno_node::RealFs; use deno_runtime::fmt_errors::format_js_error; use deno_runtime::ops::worker_host::CreateWebWorkerCb; use deno_runtime::ops::worker_host::WorkerEventCb; @@ -27,7 +29,6 @@ use deno_semver::npm::NpmPackageReqReference; use crate::args::DenoSubcommand; use crate::errors; use crate::module_loader::CliModuleLoader; -use crate::node; use crate::ops; use crate::proc_state::ProcState; use crate::tools; @@ -258,16 +259,16 @@ pub async fn create_custom_worker( ps.npm_resolver .add_package_reqs(vec![package_ref.req.clone()]) .await?; - let node_resolution = - ps.node_resolver.resolve_binary_export(&package_ref)?; - let is_main_cjs = - matches!(node_resolution, node::NodeResolution::CommonJs(_)); + let node_resolution = ps + .node_resolver + .resolve_binary_export::(&package_ref)?; + let is_main_cjs = matches!(node_resolution, NodeResolution::CommonJs(_)); (node_resolution.into_url(), is_main_cjs) } else if ps.options.is_npm_main() { - let node_resolution = - ps.node_resolver.url_to_node_resolution(main_module)?; - let is_main_cjs = - matches!(node_resolution, node::NodeResolution::CommonJs(_)); + let node_resolution = ps + .node_resolver + .url_to_node_resolution::(main_module)?; + let is_main_cjs = matches!(node_resolution, NodeResolution::CommonJs(_)); (node_resolution.into_url(), is_main_cjs) } else { (main_module, false) @@ -344,7 +345,7 @@ pub async fn create_custom_worker( should_break_on_first_statement: ps.options.inspect_brk().is_some(), should_wait_for_inspector_session: ps.options.inspect_wait().is_some(), module_loader, - npm_resolver: Some(Rc::new(ps.npm_resolver.as_require_npm_resolver())), + npm_resolver: Some(Rc::new(ps.npm_resolver.clone())), get_error_class_fn: Some(&errors::get_error_class_name), cache_storage_dir, origin_storage_dir, @@ -467,7 +468,7 @@ fn create_web_worker_callback( format_js_error_fn: Some(Arc::new(format_js_error)), source_map_getter: Some(Box::new(module_loader.clone())), module_loader, - npm_resolver: Some(Rc::new(ps.npm_resolver.as_require_npm_resolver())), + npm_resolver: Some(Rc::new(ps.npm_resolver.clone())), worker_type: args.worker_type, maybe_inspector_server, get_error_class_fn: Some(&errors::get_error_class_name), diff --git a/ext/node/Cargo.toml b/ext/node/Cargo.toml index 0d647e4f02..576e62d559 100644 --- a/ext/node/Cargo.toml +++ b/ext/node/Cargo.toml @@ -18,6 +18,9 @@ aes.workspace = true cbc.workspace = true data-encoding = "2.3.3" deno_core.workspace = true +deno_media_type.workspace = true +deno_npm.workspace = true +deno_semver.workspace = true digest = { version = "0.10.5", features = ["core-api", "std"] } dsa = "0.6.1" ecb.workspace = true diff --git a/ext/node/analyze.rs b/ext/node/analyze.rs index 03bf41995c..a206f4425a 100644 --- a/ext/node/analyze.rs +++ b/ext/node/analyze.rs @@ -17,9 +17,9 @@ use crate::NodeFs; use crate::NodeModuleKind; use crate::NodePermissions; use crate::NodeResolutionMode; +use crate::NpmResolver; use crate::PackageJson; use crate::PathClean; -use crate::RequireNpmResolver; use crate::NODE_GLOBAL_THIS_NAME; static NODE_GLOBALS: &[&str] = &[ @@ -66,20 +66,18 @@ pub trait CjsEsmCodeAnalyzer { pub struct NodeCodeTranslator< TCjsEsmCodeAnalyzer: CjsEsmCodeAnalyzer, - TRequireNpmResolver: RequireNpmResolver, + TNpmResolver: NpmResolver, > { cjs_esm_code_analyzer: TCjsEsmCodeAnalyzer, - npm_resolver: TRequireNpmResolver, + npm_resolver: TNpmResolver, } -impl< - TCjsEsmCodeAnalyzer: CjsEsmCodeAnalyzer, - TRequireNpmResolver: RequireNpmResolver, - > NodeCodeTranslator +impl + NodeCodeTranslator { pub fn new( cjs_esm_code_analyzer: TCjsEsmCodeAnalyzer, - npm_resolver: TRequireNpmResolver, + npm_resolver: TNpmResolver, ) -> Self { Self { cjs_esm_code_analyzer, @@ -242,7 +240,7 @@ impl< // todo(dsherret): use not_found error on not found here let module_dir = self.npm_resolver.resolve_package_folder_from_package( package_specifier.as_str(), - &referrer_path, + referrer, mode, )?; diff --git a/ext/node/lib.rs b/ext/node/lib.rs index a521e161c9..38772d0fc7 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -5,12 +5,20 @@ use deno_core::located_script_name; use deno_core::op; use deno_core::serde_json; use deno_core::JsRuntime; +use deno_core::ModuleSpecifier; +use deno_npm::resolution::PackageReqNotFoundError; +use deno_npm::NpmPackageId; +use deno_semver::npm::NpmPackageNv; +use deno_semver::npm::NpmPackageNvReference; +use deno_semver::npm::NpmPackageReq; +use deno_semver::npm::NpmPackageReqReference; use once_cell::sync::Lazy; use std::collections::HashSet; use std::io; use std::path::Path; use std::path::PathBuf; use std::rc::Rc; +use std::sync::Arc; pub mod analyze; mod crypto; @@ -21,14 +29,15 @@ mod package_json; mod path; mod polyfill; mod resolution; +mod resolver; mod v8; mod winerror; mod zlib; pub use package_json::PackageJson; pub use path::PathClean; -pub use polyfill::find_builtin_node_module; pub use polyfill::is_builtin_node_module; +pub use polyfill::resolve_builtin_node_module; pub use polyfill::NodeModulePolyfill; pub use polyfill::SUPPORTED_BUILTIN_NODE_MODULES; pub use resolution::get_closest_package_json; @@ -41,6 +50,8 @@ pub use resolution::path_to_declaration_path; pub use resolution::NodeModuleKind; pub use resolution::NodeResolutionMode; pub use resolution::DEFAULT_CONDITIONS; +pub use resolver::NodeResolution; +pub use resolver::NodeResolver; pub trait NodeEnv { type P: NodePermissions; @@ -51,6 +62,14 @@ pub trait NodePermissions { fn check_read(&mut self, path: &Path) -> Result<(), AnyError>; } +pub(crate) struct AllowAllNodePermissions; + +impl NodePermissions for AllowAllNodePermissions { + fn check_read(&mut self, _path: &Path) -> Result<(), AnyError> { + Ok(()) + } +} + #[derive(Default, Clone)] pub struct NodeFsMetadata { pub is_file: bool, @@ -114,20 +133,47 @@ impl NodeFs for RealFs { } } -pub trait RequireNpmResolver { +pub trait NpmResolver { + /// Resolves an npm package folder path from an npm package referrer. fn resolve_package_folder_from_package( &self, specifier: &str, - referrer: &Path, + referrer: &ModuleSpecifier, mode: NodeResolutionMode, ) -> Result; + /// Resolves the npm package folder path from the specified path. fn resolve_package_folder_from_path( &self, path: &Path, ) -> Result; - fn in_npm_package(&self, path: &Path) -> bool; + /// Resolves an npm package folder path from a Deno module. + fn resolve_package_folder_from_deno_module( + &self, + pkg_nv: &NpmPackageNv, + ) -> Result; + + fn resolve_pkg_id_from_pkg_req( + &self, + req: &NpmPackageReq, + ) -> Result; + + fn resolve_nv_ref_from_pkg_req_ref( + &self, + req_ref: &NpmPackageReqReference, + ) -> Result; + + fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool; + + fn in_npm_package_at_path(&self, path: &Path) -> bool { + let specifier = + match ModuleSpecifier::from_file_path(path.to_path_buf().clean()) { + Ok(p) => p, + Err(_) => return false, + }; + self.in_npm_package(&specifier) + } fn ensure_read_permission( &self, @@ -136,6 +182,57 @@ pub trait RequireNpmResolver { ) -> Result<(), AnyError>; } +impl NpmResolver for Arc { + fn resolve_package_folder_from_package( + &self, + specifier: &str, + referrer: &ModuleSpecifier, + mode: NodeResolutionMode, + ) -> Result { + (**self).resolve_package_folder_from_package(specifier, referrer, mode) + } + + fn resolve_package_folder_from_path( + &self, + path: &Path, + ) -> Result { + (**self).resolve_package_folder_from_path(path) + } + + fn resolve_package_folder_from_deno_module( + &self, + pkg_nv: &NpmPackageNv, + ) -> Result { + (**self).resolve_package_folder_from_deno_module(pkg_nv) + } + + fn resolve_pkg_id_from_pkg_req( + &self, + req: &NpmPackageReq, + ) -> Result { + (**self).resolve_pkg_id_from_pkg_req(req) + } + + fn resolve_nv_ref_from_pkg_req_ref( + &self, + req_ref: &NpmPackageReqReference, + ) -> Result { + (**self).resolve_nv_ref_from_pkg_req_ref(req_ref) + } + + fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool { + (**self).in_npm_package(specifier) + } + + fn ensure_read_permission( + &self, + permissions: &mut dyn NodePermissions, + path: &Path, + ) -> Result<(), AnyError> { + (**self).ensure_read_permission(permissions, path) + } +} + pub static NODE_GLOBAL_THIS_NAME: Lazy = Lazy::new(|| { let now = std::time::SystemTime::now(); let seconds = now @@ -490,7 +587,7 @@ deno_core::extension!(deno_node, "zlib.ts", ], options = { - maybe_npm_resolver: Option>, + maybe_npm_resolver: Option>, }, state = |state, options| { if let Some(npm_resolver) = options.maybe_npm_resolver { diff --git a/ext/node/ops.rs b/ext/node/ops.rs index 3db23b5eaf..662168acc2 100644 --- a/ext/node/ops.rs +++ b/ext/node/ops.rs @@ -7,6 +7,7 @@ use deno_core::normalize_path; use deno_core::op; use deno_core::url::Url; use deno_core::JsRuntimeInspector; +use deno_core::ModuleSpecifier; use deno_core::OpState; use std::cell::RefCell; use std::path::Path; @@ -20,8 +21,8 @@ use super::resolution; use super::NodeModuleKind; use super::NodePermissions; use super::NodeResolutionMode; +use super::NpmResolver; use super::PackageJson; -use super::RequireNpmResolver; fn ensure_read_permission

( state: &mut OpState, @@ -31,7 +32,7 @@ where P: NodePermissions + 'static, { let resolver = { - let resolver = state.borrow::>(); + let resolver = state.borrow::>(); resolver.clone() }; let permissions = state.borrow_mut::

(); @@ -191,11 +192,11 @@ fn op_require_resolve_deno_dir( request: String, parent_filename: String, ) -> Option { - let resolver = state.borrow::>(); + let resolver = state.borrow::>(); resolver .resolve_package_folder_from_package( &request, - &PathBuf::from(parent_filename), + &ModuleSpecifier::from_file_path(parent_filename).unwrap(), NodeResolutionMode::Execution, ) .ok() @@ -204,8 +205,8 @@ fn op_require_resolve_deno_dir( #[op] fn op_require_is_deno_dir_package(state: &mut OpState, path: String) -> bool { - let resolver = state.borrow::>(); - resolver.in_npm_package(&PathBuf::from(path)) + let resolver = state.borrow::>(); + resolver.in_npm_package_at_path(&PathBuf::from(path)) } #[op] @@ -375,7 +376,7 @@ where return Ok(None); } - let resolver = state.borrow::>().clone(); + let resolver = state.borrow::>().clone(); let permissions = state.borrow_mut::(); let pkg = resolution::get_package_scope_config::( &Url::from_file_path(parent_path.unwrap()).unwrap(), @@ -462,10 +463,11 @@ fn op_require_resolve_exports( where Env: NodeEnv + 'static, { - let resolver = state.borrow::>().clone(); + let resolver = state.borrow::>().clone(); let permissions = state.borrow_mut::(); - let pkg_path = if resolver.in_npm_package(&PathBuf::from(&modules_path)) + let pkg_path = if resolver + .in_npm_package_at_path(&PathBuf::from(&modules_path)) && !uses_local_node_modules_dir { modules_path @@ -515,7 +517,7 @@ where state, PathBuf::from(&filename).parent().unwrap(), )?; - let resolver = state.borrow::>().clone(); + let resolver = state.borrow::>().clone(); let permissions = state.borrow_mut::(); resolution::get_closest_package_json::( &Url::from_file_path(filename).unwrap(), @@ -532,7 +534,7 @@ fn op_require_read_package_scope( where Env: NodeEnv + 'static, { - let resolver = state.borrow::>().clone(); + let resolver = state.borrow::>().clone(); let permissions = state.borrow_mut::(); let package_json_path = PathBuf::from(package_json_path); PackageJson::load::(&*resolver, permissions, package_json_path).ok() @@ -549,7 +551,7 @@ where { let parent_path = PathBuf::from(&parent_filename); ensure_read_permission::(state, &parent_path)?; - let resolver = state.borrow::>().clone(); + let resolver = state.borrow::>().clone(); let permissions = state.borrow_mut::(); let pkg = PackageJson::load::( &*resolver, diff --git a/ext/node/package_json.rs b/ext/node/package_json.rs index 60f50ad787..08f78681ae 100644 --- a/ext/node/package_json.rs +++ b/ext/node/package_json.rs @@ -4,7 +4,7 @@ use crate::NodeFs; use crate::NodeModuleKind; use crate::NodePermissions; -use super::RequireNpmResolver; +use super::NpmResolver; use deno_core::anyhow; use deno_core::anyhow::bail; @@ -63,7 +63,7 @@ impl PackageJson { } pub fn load( - resolver: &dyn RequireNpmResolver, + resolver: &dyn NpmResolver, permissions: &mut dyn NodePermissions, path: PathBuf, ) -> Result { diff --git a/ext/node/polyfill.rs b/ext/node/polyfill.rs index 1fbb4afa3d..b334d2d341 100644 --- a/ext/node/polyfill.rs +++ b/ext/node/polyfill.rs @@ -1,8 +1,22 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -pub fn find_builtin_node_module( - module_name: &str, -) -> Option<&NodeModulePolyfill> { +use deno_core::error::generic_error; +use deno_core::error::AnyError; +use deno_core::url::Url; +use deno_core::ModuleSpecifier; + +// TODO(bartlomieju): seems super wasteful to parse the specifier each time +pub fn resolve_builtin_node_module(module_name: &str) -> Result { + if let Some(module) = find_builtin_node_module(module_name) { + return Ok(ModuleSpecifier::parse(module.specifier).unwrap()); + } + + Err(generic_error(format!( + "Unknown built-in \"node:\" module: {module_name}" + ))) +} + +fn find_builtin_node_module(module_name: &str) -> Option<&NodeModulePolyfill> { SUPPORTED_BUILTIN_NODE_MODULES .iter() .find(|m| m.name == module_name) diff --git a/ext/node/resolution.rs b/ext/node/resolution.rs index 1422ba6b02..d324f4b4b9 100644 --- a/ext/node/resolution.rs +++ b/ext/node/resolution.rs @@ -16,7 +16,7 @@ use crate::package_json::PackageJson; use crate::path::PathClean; use crate::NodeFs; use crate::NodePermissions; -use crate::RequireNpmResolver; +use crate::NpmResolver; pub static DEFAULT_CONDITIONS: &[&str] = &["deno", "node", "import"]; pub static REQUIRE_CONDITIONS: &[&str] = &["require", "node"]; @@ -190,7 +190,7 @@ pub fn package_imports_resolve( referrer_kind: NodeModuleKind, conditions: &[&str], mode: NodeResolutionMode, - npm_resolver: &dyn RequireNpmResolver, + npm_resolver: &dyn NpmResolver, permissions: &mut dyn NodePermissions, ) -> Result { if name == "#" || name.starts_with("#/") || name.ends_with('/') { @@ -328,7 +328,7 @@ fn resolve_package_target_string( internal: bool, conditions: &[&str], mode: NodeResolutionMode, - npm_resolver: &dyn RequireNpmResolver, + npm_resolver: &dyn NpmResolver, permissions: &mut dyn NodePermissions, ) -> Result { if !subpath.is_empty() && !pattern && !target.ends_with('/') { @@ -438,7 +438,7 @@ fn resolve_package_target( internal: bool, conditions: &[&str], mode: NodeResolutionMode, - npm_resolver: &dyn RequireNpmResolver, + npm_resolver: &dyn NpmResolver, permissions: &mut dyn NodePermissions, ) -> Result, AnyError> { if let Some(target) = target.as_str() { @@ -576,7 +576,7 @@ pub fn package_exports_resolve( referrer_kind: NodeModuleKind, conditions: &[&str], mode: NodeResolutionMode, - npm_resolver: &dyn RequireNpmResolver, + npm_resolver: &dyn NpmResolver, permissions: &mut dyn NodePermissions, ) -> Result { if package_exports.contains_key(&package_subpath) @@ -733,7 +733,7 @@ pub fn package_resolve( referrer_kind: NodeModuleKind, conditions: &[&str], mode: NodeResolutionMode, - npm_resolver: &dyn RequireNpmResolver, + npm_resolver: &dyn NpmResolver, permissions: &mut dyn NodePermissions, ) -> Result, AnyError> { let (package_name, package_subpath, _is_scoped) = @@ -763,7 +763,7 @@ pub fn package_resolve( let package_dir_path = npm_resolver.resolve_package_folder_from_package( &package_name, - &referrer.to_file_path().unwrap(), + referrer, mode, )?; let package_json_path = package_dir_path.join("package.json"); @@ -815,7 +815,7 @@ pub fn package_resolve( pub fn get_package_scope_config( referrer: &ModuleSpecifier, - npm_resolver: &dyn RequireNpmResolver, + npm_resolver: &dyn NpmResolver, permissions: &mut dyn NodePermissions, ) -> Result { let root_folder = npm_resolver @@ -826,7 +826,7 @@ pub fn get_package_scope_config( pub fn get_closest_package_json( url: &ModuleSpecifier, - npm_resolver: &dyn RequireNpmResolver, + npm_resolver: &dyn NpmResolver, permissions: &mut dyn NodePermissions, ) -> Result { let package_json_path = @@ -836,7 +836,7 @@ pub fn get_closest_package_json( fn get_closest_package_json_path( url: &ModuleSpecifier, - npm_resolver: &dyn RequireNpmResolver, + npm_resolver: &dyn NpmResolver, ) -> Result { let file_path = url.to_file_path().unwrap(); let mut current_dir = file_path.parent().unwrap(); diff --git a/cli/node/mod.rs b/ext/node/resolver.rs similarity index 77% rename from cli/node/mod.rs rename to ext/node/resolver.rs index eb584879e8..41e1cf4d4d 100644 --- a/cli/node/mod.rs +++ b/ext/node/resolver.rs @@ -2,45 +2,34 @@ use std::path::Path; use std::path::PathBuf; -use std::sync::Arc; -use deno_ast::MediaType; -use deno_ast::ModuleSpecifier; use deno_core::anyhow::bail; use deno_core::anyhow::Context; use deno_core::error::generic_error; use deno_core::error::AnyError; use deno_core::serde_json::Value; use deno_core::url::Url; -use deno_runtime::deno_node; -use deno_runtime::deno_node::errors; -use deno_runtime::deno_node::find_builtin_node_module; -use deno_runtime::deno_node::get_closest_package_json; -use deno_runtime::deno_node::legacy_main_resolve; -use deno_runtime::deno_node::package_exports_resolve; -use deno_runtime::deno_node::package_imports_resolve; -use deno_runtime::deno_node::package_resolve; -use deno_runtime::deno_node::path_to_declaration_path; -use deno_runtime::deno_node::NodeModuleKind; -use deno_runtime::deno_node::NodePermissions; -use deno_runtime::deno_node::NodeResolutionMode; -use deno_runtime::deno_node::PackageJson; -use deno_runtime::deno_node::RealFs; -use deno_runtime::deno_node::RequireNpmResolver; -use deno_runtime::deno_node::DEFAULT_CONDITIONS; -use deno_runtime::permissions::PermissionsContainer; +use deno_core::ModuleSpecifier; +use deno_media_type::MediaType; use deno_semver::npm::NpmPackageNv; use deno_semver::npm::NpmPackageNvReference; use deno_semver::npm::NpmPackageReqReference; -use crate::npm::CliRequireNpmResolver; -use crate::npm::NpmPackageResolver; -use crate::npm::NpmResolution; -use crate::util::fs::canonicalize_path_maybe_not_exists; - -mod analyze; - -pub use analyze::CliCjsEsmCodeAnalyzer; +use crate::errors; +use crate::get_closest_package_json; +use crate::legacy_main_resolve; +use crate::package_exports_resolve; +use crate::package_imports_resolve; +use crate::package_resolve; +use crate::path_to_declaration_path; +use crate::AllowAllNodePermissions; +use crate::NodeFs; +use crate::NodeModuleKind; +use crate::NodePermissions; +use crate::NodeResolutionMode; +use crate::NpmResolver; +use crate::PackageJson; +use crate::DEFAULT_CONDITIONS; #[derive(Debug)] pub enum NodeResolution { @@ -101,33 +90,15 @@ impl NodeResolution { } } -// TODO(bartlomieju): seems super wasteful to parse specified each time -pub fn resolve_builtin_node_module(module_name: &str) -> Result { - if let Some(module) = find_builtin_node_module(module_name) { - return Ok(ModuleSpecifier::parse(module.specifier).unwrap()); - } - - Err(generic_error(format!( - "Unknown built-in \"node:\" module: {module_name}" - ))) -} - #[derive(Debug)] -pub struct CliNodeResolver { - npm_resolution: Arc, - npm_resolver: Arc, - require_npm_resolver: CliRequireNpmResolver, +pub struct NodeResolver { + npm_resolver: TRequireNpmResolver, } -impl CliNodeResolver { - pub fn new( - npm_resolution: Arc, - npm_package_resolver: Arc, - ) -> Self { +impl NodeResolver { + pub fn new(require_npm_resolver: TRequireNpmResolver) -> Self { Self { - npm_resolution, - require_npm_resolver: npm_package_resolver.as_require_npm_resolver(), - npm_resolver: npm_package_resolver, + npm_resolver: require_npm_resolver, } } @@ -137,7 +108,7 @@ impl CliNodeResolver { /// This function is an implementation of `defaultResolve` in /// `lib/internal/modules/esm/resolve.js` from Node. - pub fn resolve( + pub fn resolve( &self, specifier: &str, referrer: &ModuleSpecifier, @@ -147,7 +118,7 @@ impl CliNodeResolver { // Note: if we are here, then the referrer is an esm module // TODO(bartlomieju): skipped "policy" part as we don't plan to support it - if deno_node::is_builtin_node_module(specifier) { + if crate::is_builtin_node_module(specifier) { return Ok(Some(NodeResolution::BuiltIn(specifier.to_string()))); } @@ -162,7 +133,7 @@ impl CliNodeResolver { let split_specifier = url.as_str().split(':'); let specifier = split_specifier.skip(1).collect::(); - if deno_node::is_builtin_node_module(&specifier) { + if crate::is_builtin_node_module(&specifier) { return Ok(Some(NodeResolution::BuiltIn(specifier))); } } @@ -178,7 +149,7 @@ impl CliNodeResolver { } } - let url = self.module_resolve( + let url = self.module_resolve::( specifier, referrer, DEFAULT_CONDITIONS, @@ -196,7 +167,7 @@ impl CliNodeResolver { // todo(16370): the module kind is not correct here. I think we need // typescript to tell us if the referrer is esm or cjs let path = - match path_to_declaration_path::(path, NodeModuleKind::Esm) { + match path_to_declaration_path::(path, NodeModuleKind::Esm) { Some(path) => path, None => return Ok(None), }; @@ -204,13 +175,13 @@ impl CliNodeResolver { } }; - let resolve_response = self.url_to_node_resolution(url)?; + let resolve_response = self.url_to_node_resolution::(url)?; // TODO(bartlomieju): skipped checking errors for commonJS resolution and // "preserveSymlinksMain"/"preserveSymlinks" options. Ok(Some(resolve_response)) } - fn module_resolve( + fn module_resolve( &self, specifier: &str, referrer: &ModuleSpecifier, @@ -226,7 +197,7 @@ impl CliNodeResolver { // todo(dsherret): the node module kind is not correct and we // should use the value provided by typescript instead let declaration_path = - path_to_declaration_path::(file_path, NodeModuleKind::Esm); + path_to_declaration_path::(file_path, NodeModuleKind::Esm); declaration_path.map(|declaration_path| { ModuleSpecifier::from_file_path(declaration_path).unwrap() }) @@ -235,13 +206,13 @@ impl CliNodeResolver { } } else if specifier.starts_with('#') { Some( - package_imports_resolve::( + package_imports_resolve::( specifier, referrer, NodeModuleKind::Esm, conditions, mode, - &self.require_npm_resolver, + &self.npm_resolver, permissions, ) .map(|p| ModuleSpecifier::from_file_path(p).unwrap())?, @@ -249,34 +220,36 @@ impl CliNodeResolver { } else if let Ok(resolved) = Url::parse(specifier) { Some(resolved) } else { - package_resolve::( + package_resolve::( specifier, referrer, NodeModuleKind::Esm, conditions, mode, - &self.require_npm_resolver, + &self.npm_resolver, permissions, )? .map(|p| ModuleSpecifier::from_file_path(p).unwrap()) }; Ok(match url { - Some(url) => Some(finalize_resolution(url, referrer)?), + Some(url) => Some(finalize_resolution::(url, referrer)?), None => None, }) } - pub fn resolve_npm_req_reference( + pub fn resolve_npm_req_reference( &self, reference: &NpmPackageReqReference, mode: NodeResolutionMode, permissions: &mut dyn NodePermissions, ) -> Result, AnyError> { - let reference = self.npm_resolution.pkg_req_ref_to_nv_ref(reference)?; - self.resolve_npm_reference(&reference, mode, permissions) + let reference = self + .npm_resolver + .resolve_nv_ref_from_pkg_req_ref(reference)?; + self.resolve_npm_reference::(&reference, mode, permissions) } - pub fn resolve_npm_reference( + pub fn resolve_npm_reference( &self, reference: &NpmPackageNvReference, mode: NodeResolutionMode, @@ -286,7 +259,7 @@ impl CliNodeResolver { .npm_resolver .resolve_package_folder_from_deno_module(&reference.nv)?; let node_module_kind = NodeModuleKind::Esm; - let maybe_resolved_path = package_config_resolve( + let maybe_resolved_path = package_config_resolve::( &reference .sub_path .as_ref() @@ -296,7 +269,7 @@ impl CliNodeResolver { node_module_kind, DEFAULT_CONDITIONS, mode, - &self.require_npm_resolver, + &self.npm_resolver, permissions, ) .with_context(|| { @@ -309,23 +282,20 @@ impl CliNodeResolver { let resolved_path = match mode { NodeResolutionMode::Execution => resolved_path, NodeResolutionMode::Types => { - match path_to_declaration_path::( - resolved_path, - node_module_kind, - ) { + match path_to_declaration_path::(resolved_path, node_module_kind) { Some(path) => path, None => return Ok(None), } } }; let url = ModuleSpecifier::from_file_path(resolved_path).unwrap(); - let resolve_response = self.url_to_node_resolution(url)?; + let resolve_response = self.url_to_node_resolution::(url)?; // TODO(bartlomieju): skipped checking errors for commonJS resolution and // "preserveSymlinksMain"/"preserveSymlinks" options. Ok(Some(resolve_response)) } - pub fn resolve_binary_commands( + pub fn resolve_binary_commands( &self, pkg_nv: &NpmPackageNv, ) -> Result, AnyError> { @@ -333,9 +303,9 @@ impl CliNodeResolver { .npm_resolver .resolve_package_folder_from_deno_module(pkg_nv)?; let package_json_path = package_folder.join("package.json"); - let package_json = PackageJson::load::( - &self.require_npm_resolver, - &mut PermissionsContainer::allow_all(), + let package_json = PackageJson::load::( + &self.npm_resolver, + &mut AllowAllNodePermissions, package_json_path, )?; @@ -348,12 +318,12 @@ impl CliNodeResolver { }) } - pub fn resolve_binary_export( + pub fn resolve_binary_export( &self, pkg_ref: &NpmPackageReqReference, ) -> Result { let pkg_nv = self - .npm_resolution + .npm_resolver .resolve_pkg_id_from_pkg_req(&pkg_ref.req)? .nv; let bin_name = pkg_ref.sub_path.as_deref(); @@ -361,9 +331,9 @@ impl CliNodeResolver { .npm_resolver .resolve_package_folder_from_deno_module(&pkg_nv)?; let package_json_path = package_folder.join("package.json"); - let package_json = PackageJson::load::( - &self.require_npm_resolver, - &mut PermissionsContainer::allow_all(), + let package_json = PackageJson::load::( + &self.npm_resolver, + &mut AllowAllNodePermissions, package_json_path, )?; let bin = match &package_json.bin { @@ -377,13 +347,13 @@ impl CliNodeResolver { let url = ModuleSpecifier::from_file_path(package_folder.join(bin_entry)).unwrap(); - let resolve_response = self.url_to_node_resolution(url)?; + let resolve_response = self.url_to_node_resolution::(url)?; // TODO(bartlomieju): skipped checking errors for commonJS resolution and // "preserveSymlinksMain"/"preserveSymlinks" options. Ok(resolve_response) } - pub fn url_to_node_resolution( + pub fn url_to_node_resolution( &self, url: ModuleSpecifier, ) -> Result { @@ -391,10 +361,10 @@ impl CliNodeResolver { if url_str.starts_with("http") { Ok(NodeResolution::Esm(url)) } else if url_str.ends_with(".js") || url_str.ends_with(".d.ts") { - let package_config = get_closest_package_json::( + let package_config = get_closest_package_json::( &url, - &self.require_npm_resolver, - &mut PermissionsContainer::allow_all(), + &self.npm_resolver, + &mut AllowAllNodePermissions, )?; if package_config.typ == "module" { Ok(NodeResolution::Esm(url)) @@ -413,25 +383,6 @@ impl CliNodeResolver { } } -/// Resolves a specifier that is pointing into a node_modules folder. -/// -/// Note: This should be called whenever getting the specifier from -/// a Module::External(module) reference because that module might -/// not be fully resolved at the time deno_graph is analyzing it -/// because the node_modules folder might not exist at that time. -pub fn resolve_specifier_into_node_modules( - specifier: &ModuleSpecifier, -) -> ModuleSpecifier { - specifier - .to_file_path() - .ok() - // this path might not exist at the time the graph is being created - // because the node_modules folder might not yet exist - .and_then(|path| canonicalize_path_maybe_not_exists(&path).ok()) - .and_then(|path| ModuleSpecifier::from_file_path(path).ok()) - .unwrap_or_else(|| specifier.clone()) -} - fn resolve_bin_entry_value<'a>( pkg_nv: &NpmPackageNv, bin_name: Option<&str>, @@ -488,24 +439,24 @@ fn resolve_bin_entry_value<'a>( } } -fn package_config_resolve( +fn package_config_resolve( package_subpath: &str, package_dir: &Path, referrer_kind: NodeModuleKind, conditions: &[&str], mode: NodeResolutionMode, - npm_resolver: &dyn RequireNpmResolver, + npm_resolver: &dyn NpmResolver, permissions: &mut dyn NodePermissions, ) -> Result, AnyError> { let package_json_path = package_dir.join("package.json"); let referrer = ModuleSpecifier::from_directory_path(package_dir).unwrap(); - let package_config = PackageJson::load::( + let package_config = PackageJson::load::( npm_resolver, permissions, package_json_path.clone(), )?; if let Some(exports) = &package_config.exports { - let result = package_exports_resolve::( + let result = package_exports_resolve::( &package_json_path, package_subpath.to_string(), exports, @@ -521,7 +472,7 @@ fn package_config_resolve( Err(exports_err) => { if mode.is_types() && package_subpath == "." { if let Ok(Some(path)) = - legacy_main_resolve::(&package_config, referrer_kind, mode) + legacy_main_resolve::(&package_config, referrer_kind, mode) { return Ok(Some(path)); } else { @@ -533,13 +484,13 @@ fn package_config_resolve( } } if package_subpath == "." { - return legacy_main_resolve::(&package_config, referrer_kind, mode); + return legacy_main_resolve::(&package_config, referrer_kind, mode); } Ok(Some(package_dir.join(package_subpath))) } -fn finalize_resolution( +fn finalize_resolution( resolved: ModuleSpecifier, base: &ModuleSpecifier, ) -> Result { @@ -567,8 +518,8 @@ fn finalize_resolution( p_str.to_string() }; - let (is_dir, is_file) = if let Ok(stats) = std::fs::metadata(p) { - (stats.is_dir(), stats.is_file()) + let (is_dir, is_file) = if let Ok(stats) = Fs::metadata(p) { + (stats.is_dir, stats.is_file) } else { (false, false) }; diff --git a/runtime/web_worker.rs b/runtime/web_worker.rs index 0d743cfc62..06540a9bbf 100644 --- a/runtime/web_worker.rs +++ b/runtime/web_worker.rs @@ -37,7 +37,7 @@ use deno_core::SourceMapGetter; use deno_fs::StdFs; use deno_io::Stdio; use deno_kv::sqlite::SqliteDbHandler; -use deno_node::RequireNpmResolver; +use deno_node::NpmResolver; use deno_tls::rustls::RootCertStore; use deno_web::create_entangled_message_port; use deno_web::BlobStore; @@ -333,7 +333,7 @@ pub struct WebWorkerOptions { pub root_cert_store: Option, pub seed: Option, pub module_loader: Rc, - pub npm_resolver: Option>, + pub npm_resolver: Option>, pub create_web_worker_cb: Arc, pub preload_module_cb: Arc, pub pre_execute_module_cb: Arc, diff --git a/runtime/worker.rs b/runtime/worker.rs index 14abd12b55..5cd60604d2 100644 --- a/runtime/worker.rs +++ b/runtime/worker.rs @@ -33,7 +33,7 @@ use deno_core::SourceMapGetter; use deno_fs::StdFs; use deno_io::Stdio; use deno_kv::sqlite::SqliteDbHandler; -use deno_node::RequireNpmResolver; +use deno_node::NpmResolver; use deno_tls::rustls::RootCertStore; use deno_web::BlobStore; use log::debug; @@ -94,7 +94,7 @@ pub struct WorkerOptions { /// If not provided runtime will error if code being /// executed tries to load modules. pub module_loader: Rc, - pub npm_resolver: Option>, + pub npm_resolver: Option>, // Callbacks invoked when creating new instance of WebWorker pub create_web_worker_cb: Arc, pub web_worker_preload_module_cb: Arc, From 068228cb454d14a6f5943061a5a6569b9e395e23 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Sat, 22 Apr 2023 11:17:31 +0200 Subject: [PATCH 014/320] refactor: rewrite tests to "fastwebsockets" crate (#18781) Migrating off of `tokio-tungstenite` crate. --------- Co-authored-by: Divy Srivastava --- Cargo.lock | 8 +- Cargo.toml | 1 + cli/Cargo.toml | 2 + cli/tests/integration/inspector_tests.rs | 90 ++++++++---- cli/tests/testdata/run/websocket_test.ts | 2 +- ext/websocket/Cargo.toml | 2 +- runtime/Cargo.toml | 1 + runtime/inspector_server.rs | 96 ++++++------- test_util/Cargo.toml | 3 +- test_util/src/lib.rs | 169 +++++++++++++++-------- 10 files changed, 229 insertions(+), 145 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ceeb2cf2a0..114a6e0e80 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -721,12 +721,14 @@ dependencies = [ "env_logger", "eszip", "fancy-regex", + "fastwebsockets", "flaky_test", "flate2", "fs3", "fwdansi", "glibc_version", "http", + "hyper", "import_map 0.15.0", "indexmap", "jsonc-parser", @@ -1235,6 +1237,7 @@ dependencies = [ "deno_webstorage", "dlopen", "encoding_rs", + "fastwebsockets", "filetime", "fs3", "fwdansi", @@ -1791,9 +1794,9 @@ dependencies = [ [[package]] name = "fastwebsockets" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcf2f933f24f45831bd66580a8f9394e440f1f5a23806cf0d4d8b6649e1a01e9" +checksum = "a9e973e2bd2dbd77cc9e929ede2ce65984a35ac5481976afbfbd509cb40dc965" dependencies = [ "base64 0.21.0", "cc", @@ -4864,6 +4867,7 @@ dependencies = [ "atty", "base64 0.13.1", "console_static_text", + "fastwebsockets", "flate2", "futures", "hyper", diff --git a/Cargo.toml b/Cargo.toml index 6b49de2311..aa12e16295 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -91,6 +91,7 @@ data-url = "=0.2.0" dlopen = "0.1.8" encoding_rs = "=0.8.31" ecb = "=0.1.1" +fastwebsockets = "=0.2.5" flate2 = "=1.0.24" fs3 = "0.5.0" futures = "0.3.21" diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 96fe458ae2..ebd8583304 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -72,9 +72,11 @@ encoding_rs.workspace = true env_logger = "=0.9.0" eszip = "=0.41.0" fancy-regex = "=0.10.0" +fastwebsockets.workspace = true flate2.workspace = true fs3.workspace = true http.workspace = true +hyper.workspace = true import_map = "=0.15.0" indexmap.workspace = true jsonc-parser = { version = "=0.21.0", features = ["serde"] } diff --git a/cli/tests/integration/inspector_tests.rs b/cli/tests/integration/inspector_tests.rs index 067963786d..35ff014030 100644 --- a/cli/tests/integration/inspector_tests.rs +++ b/cli/tests/integration/inspector_tests.rs @@ -1,15 +1,16 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use deno_core::anyhow::anyhow; use deno_core::error::AnyError; -use deno_core::futures::prelude::*; -use deno_core::futures::stream::SplitSink; -use deno_core::futures::stream::SplitStream; use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::url; use deno_runtime::deno_fetch::reqwest; use deno_runtime::deno_websocket::tokio_tungstenite; -use deno_runtime::deno_websocket::tokio_tungstenite::tungstenite; +use fastwebsockets::FragmentCollector; +use fastwebsockets::Frame; +use hyper::upgrade::Upgraded; +use hyper::Request; use std::io::BufRead; use test_util as util; use test_util::TempDir; @@ -17,18 +18,20 @@ use tokio::net::TcpStream; use util::http_server; use util::DenoChild; +struct SpawnExecutor; + +impl hyper::rt::Executor for SpawnExecutor +where + Fut: std::future::Future + Send + 'static, + Fut::Output: Send + 'static, +{ + fn execute(&self, fut: Fut) { + tokio::task::spawn(fut); + } +} + struct InspectorTester { - socket_tx: SplitSink< - tokio_tungstenite::WebSocketStream< - tokio_tungstenite::MaybeTlsStream, - >, - tungstenite::Message, - >, - socket_rx: SplitStream< - tokio_tungstenite::WebSocketStream< - tokio_tungstenite::MaybeTlsStream, - >, - >, + socket: FragmentCollector, notification_filter: Box bool + 'static>, child: DenoChild, stderr_lines: Box>, @@ -52,17 +55,42 @@ impl InspectorTester { let mut stderr_lines = std::io::BufReader::new(stderr).lines().map(|r| r.unwrap()); - let ws_url = extract_ws_url_from_stderr(&mut stderr_lines); + let uri = extract_ws_url_from_stderr(&mut stderr_lines); + + let domain = &uri.host().unwrap().to_string(); + let port = &uri.port().unwrap_or(match uri.scheme() { + "wss" | "https" => 443, + _ => 80, + }); + let addr = format!("{domain}:{port}"); + + let stream = TcpStream::connect(addr).await.unwrap(); + + let host = uri.host_str().unwrap(); + + let req = Request::builder() + .method("GET") + .uri(uri.path()) + .header("Host", host) + .header(hyper::header::UPGRADE, "websocket") + .header(hyper::header::CONNECTION, "Upgrade") + .header( + "Sec-WebSocket-Key", + fastwebsockets::handshake::generate_key(), + ) + .header("Sec-WebSocket-Version", "13") + .body(hyper::Body::empty()) + .unwrap(); let (socket, response) = - tokio_tungstenite::connect_async(ws_url).await.unwrap(); + fastwebsockets::handshake::client(&SpawnExecutor, req, stream) + .await + .unwrap(); + assert_eq!(response.status(), 101); // Switching protocols. - let (socket_tx, socket_rx) = socket.split(); - Self { - socket_tx, - socket_rx, + socket: FragmentCollector::new(socket), notification_filter: Box::new(notification_filter), child, stderr_lines: Box::new(stderr_lines), @@ -74,10 +102,10 @@ impl InspectorTester { // TODO(bartlomieju): add graceful error handling for msg in messages { let result = self - .socket_tx - .send(msg.to_string().into()) + .socket + .write_frame(Frame::text(msg.to_string().into_bytes())) .await - .map_err(|e| e.into()); + .map_err(|e| anyhow!(e)); self.handle_error(result); } } @@ -111,8 +139,9 @@ impl InspectorTester { async fn recv(&mut self) -> String { loop { - let result = self.socket_rx.next().await.unwrap().map_err(|e| e.into()); - let message = self.handle_error(result).to_string(); + let result = self.socket.read_frame().await.map_err(|e| anyhow!(e)); + let message = + String::from_utf8(self.handle_error(result).payload).unwrap(); if (self.notification_filter)(&message) { return message; } @@ -236,7 +265,7 @@ fn skip_check_line( let mut line = stderr_lines.next().unwrap(); line = util::strip_ansi_codes(&line).to_string(); - if line.starts_with("Check") { + if line.starts_with("Check") || line.starts_with("Download") { continue; } @@ -514,8 +543,11 @@ async fn inspector_does_not_hang() { } // Check that we can gracefully close the websocket connection. - tester.socket_tx.close().await.unwrap(); - tester.socket_rx.for_each(|_| async {}).await; + tester + .socket + .write_frame(Frame::close_raw(vec![])) + .await + .unwrap(); assert_eq!(&tester.stdout_lines.next().unwrap(), "done"); assert!(tester.child.wait().unwrap().success()); diff --git a/cli/tests/testdata/run/websocket_test.ts b/cli/tests/testdata/run/websocket_test.ts index a9dc34ad1d..27bc5adf92 100644 --- a/cli/tests/testdata/run/websocket_test.ts +++ b/cli/tests/testdata/run/websocket_test.ts @@ -161,7 +161,7 @@ Deno.test("websocket error", async () => { assert(err instanceof ErrorEvent); // Error message got changed because we don't use warp in test_util - assertEquals(err.message, "UnexpectedEof: tls handshake eof"); + assertEquals(err.message, "InvalidData: received corrupt message"); promise1.resolve(); }; await promise1; diff --git a/ext/websocket/Cargo.toml b/ext/websocket/Cargo.toml index a96b6cceb9..53e184e1e2 100644 --- a/ext/websocket/Cargo.toml +++ b/ext/websocket/Cargo.toml @@ -16,7 +16,7 @@ path = "lib.rs" [dependencies] deno_core.workspace = true deno_tls.workspace = true -fastwebsockets = { version = "0.2.4", features = ["upgrade"] } +fastwebsockets = { workspace = true, features = ["upgrade"] } http.workspace = true hyper.workspace = true serde.workspace = true diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index 58f292e8f9..20cbda0bfb 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -80,6 +80,7 @@ deno_web.workspace = true deno_webidl.workspace = true deno_websocket.workspace = true deno_webstorage.workspace = true +fastwebsockets.workspace = true atty.workspace = true console_static_text.workspace = true diff --git a/runtime/inspector_server.rs b/runtime/inspector_server.rs index d65e813cb6..25d0d796c1 100644 --- a/runtime/inspector_server.rs +++ b/runtime/inspector_server.rs @@ -1,7 +1,7 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -use core::convert::Infallible as Never; // Alias for the future `!` type. -use deno_core::error::AnyError; +// Alias for the future `!` type. +use core::convert::Infallible as Never; use deno_core::futures::channel::mpsc; use deno_core::futures::channel::mpsc::UnboundedReceiver; use deno_core::futures::channel::mpsc::UnboundedSender; @@ -18,8 +18,9 @@ use deno_core::serde_json::Value; use deno_core::InspectorMsg; use deno_core::InspectorSessionProxy; use deno_core::JsRuntime; -use deno_websocket::tokio_tungstenite::tungstenite; -use deno_websocket::tokio_tungstenite::WebSocketStream; +use fastwebsockets::Frame; +use fastwebsockets::OpCode; +use fastwebsockets::WebSocket; use std::cell::RefCell; use std::collections::HashMap; use std::convert::Infallible; @@ -145,35 +146,27 @@ fn handle_ws_request( let info = maybe_inspector_info.unwrap(); info.new_session_tx.clone() }; - - let resp = tungstenite::handshake::server::create_response(&req) - .map(|resp| resp.map(|_| hyper::Body::empty())) - .or_else(|e| match e { - tungstenite::error::Error::HttpFormat(http_error) => Err(http_error), - _ => http::Response::builder() - .status(http::StatusCode::BAD_REQUEST) - .body("Not a valid Websocket Request".into()), - })?; - let (parts, _) = req.into_parts(); - let req = http::Request::from_parts(parts, body); + let mut req = http::Request::from_parts(parts, body); + + let (resp, fut) = match fastwebsockets::upgrade::upgrade(&mut req) { + Ok(e) => e, + _ => { + return http::Response::builder() + .status(http::StatusCode::BAD_REQUEST) + .body("Not a valid Websocket Request".into()); + } + }; // spawn a task that will wait for websocket connection and then pump messages between // the socket and inspector proxy tokio::task::spawn_local(async move { - let upgrade_result = hyper::upgrade::on(req).await; - let upgraded = if let Ok(u) = upgrade_result { - u + let websocket = if let Ok(w) = fut.await { + w } else { eprintln!("Inspector server failed to upgrade to WS connection"); return; }; - let websocket = WebSocketStream::from_raw_socket( - upgraded, - tungstenite::protocol::Role::Server, - None, - ) - .await; // The 'outbound' channel carries messages sent to the websocket. let (outbound_tx, outbound_rx) = mpsc::unbounded(); @@ -324,37 +317,36 @@ async fn server( /// 'futures' crate, therefore they can't participate in Tokio's cooperative /// task yielding. async fn pump_websocket_messages( - websocket: WebSocketStream, + mut websocket: WebSocket, inbound_tx: UnboundedSender, - outbound_rx: UnboundedReceiver, + mut outbound_rx: UnboundedReceiver, ) { - let (websocket_tx, websocket_rx) = websocket.split(); - - let outbound_pump = outbound_rx - .map(|msg| tungstenite::Message::text(msg.content)) - .map(Ok) - .forward(websocket_tx) - .map_err(|_| ()); - - let inbound_pump = async move { - let _result = websocket_rx - .map_err(AnyError::from) - .map_ok(|msg| { - // Messages that cannot be converted to strings are ignored. - if let Ok(msg_text) = msg.into_text() { - let _ = inbound_tx.unbounded_send(msg_text); + 'pump: loop { + tokio::select! { + Some(msg) = outbound_rx.next() => { + let msg = Frame::text(msg.content.into_bytes()); + let _ = websocket.write_frame(msg).await; } - }) - .try_collect::<()>() - .await; - - // Users don't care if there was an error coming from debugger, - // just about the fact that debugger did disconnect. - eprintln!("Debugger session ended"); - - Ok(()) - }; - let _ = future::try_join(outbound_pump, inbound_pump).await; + Ok(msg) = websocket.read_frame() => { + match msg.opcode { + OpCode::Text => { + if let Ok(s) = String::from_utf8(msg.payload) { + let _ = inbound_tx.unbounded_send(s); + } + } + OpCode::Close => { + // Users don't care if there was an error coming from debugger, + // just about the fact that debugger did disconnect. + eprintln!("Debugger session ended"); + break 'pump; + } + _ => { + // Ignore other messages. + } + } + } + } + } } /// Inspector information that is sent from the isolate thread to the server diff --git a/test_util/Cargo.toml b/test_util/Cargo.toml index cb1ea46cc2..5934913112 100644 --- a/test_util/Cargo.toml +++ b/test_util/Cargo.toml @@ -19,6 +19,7 @@ async-stream = "0.3.3" atty.workspace = true base64.workspace = true console_static_text.workspace = true +fastwebsockets = { workspace = true, features = ["upgrade"] } flate2.workspace = true futures.workspace = true hyper = { workspace = true, features = ["server", "http1", "http2", "runtime"] } @@ -40,7 +41,7 @@ tar.workspace = true tempfile.workspace = true tokio.workspace = true tokio-rustls.workspace = true -tokio-tungstenite.workspace = true +tokio-tungstenite = { workspace = true, features = ["rustls-tls-webpki-roots"] } url.workspace = true [target.'cfg(unix)'.dependencies] diff --git a/test_util/src/lib.rs b/test_util/src/lib.rs index 6a6614ad0a..e647c0a4cb 100644 --- a/test_util/src/lib.rs +++ b/test_util/src/lib.rs @@ -2,6 +2,7 @@ // Usage: provide a port as argument to run hyper_hello benchmark server // otherwise this starts multiple servers on many ports for test endpoints. use anyhow::anyhow; +use futures::Future; use futures::FutureExt; use futures::Stream; use futures::StreamExt; @@ -9,6 +10,7 @@ use hyper::header::HeaderValue; use hyper::server::Server; use hyper::service::make_service_fn; use hyper::service::service_fn; +use hyper::upgrade::Upgraded; use hyper::Body; use hyper::Request; use hyper::Response; @@ -49,7 +51,6 @@ use tokio::net::TcpListener; use tokio::net::TcpStream; use tokio_rustls::rustls; use tokio_rustls::TlsAcceptor; -use tokio_tungstenite::accept_async; use url::Url; pub mod assertions; @@ -302,69 +303,128 @@ async fn basic_auth_redirect( Ok(resp) } +async fn echo_websocket_handler( + ws: fastwebsockets::WebSocket, +) -> Result<(), anyhow::Error> { + let mut ws = fastwebsockets::FragmentCollector::new(ws); + + loop { + let frame = ws.read_frame().await.unwrap(); + match frame.opcode { + fastwebsockets::OpCode::Close => break, + fastwebsockets::OpCode::Text | fastwebsockets::OpCode::Binary => { + ws.write_frame(frame).await.unwrap(); + } + _ => {} + } + } + + Ok(()) +} + +type WsHandler = + fn( + fastwebsockets::WebSocket, + ) -> Pin> + Send>>; + +fn spawn_ws_server(stream: S, handler: WsHandler) +where + S: tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin + Send + 'static, +{ + let srv_fn = service_fn(move |mut req: Request| async move { + let (response, upgrade_fut) = fastwebsockets::upgrade::upgrade(&mut req) + .map_err(|e| anyhow!("Error upgrading websocket connection: {}", e))?; + + tokio::spawn(async move { + let ws = upgrade_fut + .await + .map_err(|e| anyhow!("Error upgrading websocket connection: {}", e)) + .unwrap(); + + if let Err(e) = handler(ws).await { + eprintln!("Error in websocket connection: {}", e); + } + }); + + Ok::<_, anyhow::Error>(response) + }); + + tokio::spawn(async move { + let conn_fut = hyper::server::conn::Http::new() + .serve_connection(stream, srv_fn) + .with_upgrades(); + + if let Err(e) = conn_fut.await { + eprintln!("websocket server error: {e:?}"); + } + }); +} + async fn run_ws_server(addr: &SocketAddr) { let listener = TcpListener::bind(addr).await.unwrap(); println!("ready: ws"); // Eye catcher for HttpServerCount while let Ok((stream, _addr)) = listener.accept().await { - tokio::spawn(async move { - let ws_stream_fut = accept_async(stream); - - let ws_stream = ws_stream_fut.await; - if let Ok(ws_stream) = ws_stream { - let (tx, rx) = ws_stream.split(); - rx.forward(tx) - .map(|result| { - if let Err(e) = result { - println!("websocket server error: {e:?}"); - } - }) - .await; - } - }); + spawn_ws_server(stream, |ws| Box::pin(echo_websocket_handler(ws))); } } +async fn ping_websocket_handler( + ws: fastwebsockets::WebSocket, +) -> Result<(), anyhow::Error> { + use fastwebsockets::Frame; + use fastwebsockets::OpCode; + + let mut ws = fastwebsockets::FragmentCollector::new(ws); + + for i in 0..9 { + ws.write_frame(Frame::new(true, OpCode::Ping, None, vec![])) + .await + .unwrap(); + + let frame = ws.read_frame().await.unwrap(); + assert_eq!(frame.opcode, OpCode::Pong); + assert!(frame.payload.is_empty()); + + ws.write_frame(Frame::text(format!("hello {}", i).as_bytes().to_vec())) + .await + .unwrap(); + + let frame = ws.read_frame().await.unwrap(); + assert_eq!(frame.opcode, OpCode::Text); + assert_eq!(frame.payload, format!("hello {}", i).as_bytes()); + } + + ws.write_frame(fastwebsockets::Frame::close(1000, b"")) + .await + .unwrap(); + + Ok(()) +} + async fn run_ws_ping_server(addr: &SocketAddr) { let listener = TcpListener::bind(addr).await.unwrap(); println!("ready: ws"); // Eye catcher for HttpServerCount while let Ok((stream, _addr)) = listener.accept().await { - tokio::spawn(async move { - let ws_stream = accept_async(stream).await; - use futures::SinkExt; - use tokio_tungstenite::tungstenite::Message; - if let Ok(mut ws_stream) = ws_stream { - for i in 0..9 { - ws_stream.send(Message::Ping(vec![])).await.unwrap(); - - let msg = ws_stream.next().await.unwrap().unwrap(); - assert_eq!(msg, Message::Pong(vec![])); - - ws_stream - .send(Message::Text(format!("hello {}", i))) - .await - .unwrap(); - - let msg = ws_stream.next().await.unwrap().unwrap(); - assert_eq!(msg, Message::Text(format!("hello {}", i))); - } - - ws_stream.close(None).await.unwrap(); - } - }); + spawn_ws_server(stream, |ws| Box::pin(ping_websocket_handler(ws))); } } +async fn close_websocket_handler( + ws: fastwebsockets::WebSocket, +) -> Result<(), anyhow::Error> { + let mut ws = fastwebsockets::FragmentCollector::new(ws); + + ws.write_frame(fastwebsockets::Frame::close_raw(vec![])) + .await + .unwrap(); + + Ok(()) +} + async fn run_ws_close_server(addr: &SocketAddr) { let listener = TcpListener::bind(addr).await.unwrap(); while let Ok((stream, _addr)) = listener.accept().await { - tokio::spawn(async move { - let ws_stream_fut = accept_async(stream); - - let ws_stream = ws_stream_fut.await; - if let Ok(mut ws_stream) = ws_stream { - ws_stream.close(None).await.unwrap(); - } - }); + spawn_ws_server(stream, |ws| Box::pin(close_websocket_handler(ws))); } } @@ -471,18 +531,9 @@ async fn run_wss_server(addr: &SocketAddr) { tokio::spawn(async move { match acceptor.accept(stream).await { Ok(tls_stream) => { - let ws_stream_fut = accept_async(tls_stream); - let ws_stream = ws_stream_fut.await; - if let Ok(ws_stream) = ws_stream { - let (tx, rx) = ws_stream.split(); - rx.forward(tx) - .map(|result| { - if let Err(e) = result { - println!("Websocket server error: {e:?}"); - } - }) - .await; - } + spawn_ws_server(tls_stream, |ws| { + Box::pin(echo_websocket_handler(ws)) + }); } Err(e) => { eprintln!("TLS accept error: {e:?}"); From d137501a639cb315772866f6775fcd9f43e28f5b Mon Sep 17 00:00:00 2001 From: Levente Kurusa Date: Sat, 22 Apr 2023 13:20:00 +0200 Subject: [PATCH 015/320] feat(node/http): implement ClientRequest.setTimeout() (#18783) - implement setTimeout with matching semantics of Node - add the test from Node but leave it turned off because ClientRequest has no underlying socket --- cli/tests/node_compat/config.jsonc | 3 +++ ext/node/polyfills/http.ts | 25 +++++++++++++++++++++++-- 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/cli/tests/node_compat/config.jsonc b/cli/tests/node_compat/config.jsonc index ddbdf458ff..ce1cf3a08c 100644 --- a/cli/tests/node_compat/config.jsonc +++ b/cli/tests/node_compat/config.jsonc @@ -351,6 +351,9 @@ "test-http-agent-getname.js", "test-http-client-get-url.js", "test-http-client-read-in-error.js", + // TODO(lev): ClientRequest.socket is not polyfilled so this test keeps + // failing + //"test-http-client-set-timeout.js", "test-http-localaddress.js", "test-http-outgoing-buffer.js", "test-http-outgoing-internal-headernames-getter.js", diff --git a/ext/node/polyfills/http.ts b/ext/node/polyfills/http.ts index 9104183cac..d8ec7650bc 100644 --- a/ext/node/polyfills/http.ts +++ b/ext/node/polyfills/http.ts @@ -18,6 +18,7 @@ import { urlToHttpOptions } from "ext:deno_node/internal/url.ts"; import { constants, TCP } from "ext:deno_node/internal_binding/tcp_wrap.ts"; import * as denoHttp from "ext:deno_http/01_http.js"; import * as httpRuntime from "ext:runtime/40_http.js"; +import { connResetException } from "ext:deno_node/internal/errors.ts"; enum STATUS_CODES { /** RFC 7231, 6.2.1 */ @@ -259,16 +260,21 @@ class ClientRequest extends NodeWritable { method: this.opts.method, client, headers: this.opts.headers, + signal: this.opts.signal ?? undefined, }; const mayResponse = fetch(this._createUrlStrFromOptions(this.opts), opts) .catch((e) => { if (e.message.includes("connection closed before message completed")) { // Node.js seems ignoring this error + } else if (e.message.includes("The signal has been aborted")) { + // Remap this error + this.emit("error", connResetException("socket hang up")); } else { this.emit("error", e); } return undefined; }); + const res = new IncomingMessageForClient( await mayResponse, this._createSocket(), @@ -279,6 +285,10 @@ class ClientRequest extends NodeWritable { client.close(); }); } + if (this.opts.timeout != undefined) { + clearTimeout(this.opts.timeout); + this.opts.timeout = undefined; + } this.cb?.(res); } @@ -340,8 +350,19 @@ class ClientRequest extends NodeWritable { }${path}`; } - setTimeout() { - console.log("not implemented: ClientRequest.setTimeout"); + setTimeout(timeout: number, callback?: () => void) { + const controller = new AbortController(); + this.opts.signal = controller.signal; + + this.opts.timeout = setTimeout(() => { + controller.abort(); + + this.emit("timeout"); + + if (callback !== undefined) { + callback(); + } + }, timeout); } } From bdffcb409fd1e257db280ab73e07cc319711256c Mon Sep 17 00:00:00 2001 From: Matt Mastracci Date: Sat, 22 Apr 2023 11:48:21 -0600 Subject: [PATCH 016/320] feat(ext/http): Rework Deno.serve using hyper 1.0-rc3 (#18619) This is a rewrite of the `Deno.serve` API to live on top of hyper 1.0-rc3. The code should be more maintainable long-term, and avoids some of the slower mpsc patterns that made the older code less efficient than it could have been. Missing features: - `upgradeHttp` and `upgradeHttpRaw` (`upgradeWebSocket` is available, however). - Automatic compression is unavailable on responses. --- Cargo.lock | 64 +- Cargo.toml | 3 +- cli/bench/http/deno_http_serve_https.js | 18 + cli/tests/unit/serve_test.ts | 347 +++++++++-- cli/tests/unit/websocket_test.ts | 16 + core/io.rs | 33 +- ext/http/00_serve.js | 534 +++++++++++++++++ ext/http/01_http.js | 257 +------- ext/http/Cargo.toml | 8 + ext/http/http_next.rs | 765 ++++++++++++++++++++++++ ext/http/lib.rs | 84 ++- ext/http/request_body.rs | 84 +++ ext/http/request_properties.rs | 249 ++++++++ ext/http/response_body.rs | 253 ++++++++ ext/net/Cargo.toml | 1 + ext/net/lib.rs | 1 + ext/net/ops_tls.rs | 29 +- ext/net/ops_unix.rs | 4 +- ext/net/raw.rs | 304 ++++++++++ ext/websocket/Cargo.toml | 4 +- ext/websocket/lib.rs | 75 ++- ext/websocket/stream.rs | 115 ++++ 22 files changed, 2912 insertions(+), 336 deletions(-) create mode 100644 cli/bench/http/deno_http_serve_https.js create mode 100644 ext/http/00_serve.js create mode 100644 ext/http/http_next.rs create mode 100644 ext/http/request_body.rs create mode 100644 ext/http/request_properties.rs create mode 100644 ext/http/response_body.rs create mode 100644 ext/net/raw.rs create mode 100644 ext/websocket/stream.rs diff --git a/Cargo.lock b/Cargo.lock index 114a6e0e80..ac188de531 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -728,7 +728,7 @@ dependencies = [ "fwdansi", "glibc_version", "http", - "hyper", + "hyper 0.14.26", "import_map 0.15.0", "indexmap", "jsonc-parser", @@ -1022,11 +1022,14 @@ dependencies = [ "bytes", "cache_control", "deno_core", + "deno_net", "deno_websocket", "flate2", "fly-accept-encoding", + "http", "httparse", - "hyper", + "hyper 0.14.26", + "hyper 1.0.0-rc.3", "memmem", "mime", "once_cell", @@ -1035,6 +1038,8 @@ dependencies = [ "pin-project", "ring", "serde", + "slab", + "thiserror", "tokio", "tokio-util", ] @@ -1119,6 +1124,7 @@ dependencies = [ "deno_core", "deno_tls", "log", + "pin-project", "serde", "socket2", "tokio", @@ -1242,7 +1248,7 @@ dependencies = [ "fs3", "fwdansi", "http", - "hyper", + "hyper 0.14.26", "libc", "log", "netif", @@ -1345,11 +1351,13 @@ dependencies = [ name = "deno_websocket" version = "0.104.0" dependencies = [ + "bytes", "deno_core", + "deno_net", "deno_tls", "fastwebsockets", "http", - "hyper", + "hyper 0.14.26", "serde", "tokio", "tokio-rustls", @@ -1794,13 +1802,13 @@ dependencies = [ [[package]] name = "fastwebsockets" -version = "0.2.5" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9e973e2bd2dbd77cc9e929ede2ce65984a35ac5481976afbfbd509cb40dc965" +checksum = "2fbc4aeb6c0ab927a93b5e5fc70d4c7f834260fc414021ac40c58d046ea0e394" dependencies = [ "base64 0.21.0", "cc", - "hyper", + "hyper 0.14.26", "pin-project", "rand", "sha1", @@ -2237,6 +2245,16 @@ dependencies = [ "pin-project-lite", ] +[[package]] +name = "http-body" +version = "1.0.0-rc.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "951dfc2e32ac02d67c90c0d65bd27009a635dc9b381a2cc7d284ab01e3a0150d" +dependencies = [ + "bytes", + "http", +] + [[package]] name = "httparse" version = "1.8.0" @@ -2267,7 +2285,7 @@ dependencies = [ "futures-util", "h2", "http", - "http-body", + "http-body 0.4.5", "httparse", "httpdate", "itoa", @@ -2279,6 +2297,28 @@ dependencies = [ "want", ] +[[package]] +name = "hyper" +version = "1.0.0-rc.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b75264b2003a3913f118d35c586e535293b3e22e41f074930762929d071e092" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body 1.0.0-rc.2", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "tokio", + "tracing", + "want", +] + [[package]] name = "hyper-rustls" version = "0.23.2" @@ -2286,7 +2326,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1788965e61b367cd03a62950836d5cd41560c3577d90e40e0819373194d1661c" dependencies = [ "http", - "hyper", + "hyper 0.14.26", "rustls", "tokio", "tokio-rustls", @@ -3614,8 +3654,8 @@ dependencies = [ "futures-util", "h2", "http", - "http-body", - "hyper", + "http-body 0.4.5", + "hyper 0.14.26", "hyper-rustls", "ipnet", "js-sys", @@ -4870,7 +4910,7 @@ dependencies = [ "fastwebsockets", "flate2", "futures", - "hyper", + "hyper 0.14.26", "lazy-regex", "lsp-types", "nix", diff --git a/Cargo.toml b/Cargo.toml index aa12e16295..9edd7f8357 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -91,7 +91,7 @@ data-url = "=0.2.0" dlopen = "0.1.8" encoding_rs = "=0.8.31" ecb = "=0.1.1" -fastwebsockets = "=0.2.5" +fastwebsockets = "=0.2.6" flate2 = "=1.0.24" fs3 = "0.5.0" futures = "0.3.21" @@ -126,6 +126,7 @@ serde_json = "1.0.85" serde_repr = "=0.1.9" sha2 = { version = "0.10.6", features = ["oid"] } signature = "=1.6.4" +slab = "0.4" smallvec = "1.8" socket2 = "0.4.7" tar = "=0.4.38" diff --git a/cli/bench/http/deno_http_serve_https.js b/cli/bench/http/deno_http_serve_https.js new file mode 100644 index 0000000000..cea659e093 --- /dev/null +++ b/cli/bench/http/deno_http_serve_https.js @@ -0,0 +1,18 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +const addr = Deno.args[0] ?? "127.0.0.1:4500"; +const [hostname, port] = addr.split(":"); +const { serve } = Deno; + +function readFileSync(file) { + return Deno.readTextFileSync(new URL(file, import.meta.url).pathname); +} + +const CERT = readFileSync("../../tests/testdata/tls/localhost.crt"); +const KEY = readFileSync("../../tests/testdata/tls/localhost.key"); + +function handler() { + return new Response("Hello World"); +} + +serve(handler, { hostname, port, reusePort: true, cert: CERT, key: KEY }); diff --git a/cli/tests/unit/serve_test.ts b/cli/tests/unit/serve_test.ts index 32d436d04f..8344f1be5e 100644 --- a/cli/tests/unit/serve_test.ts +++ b/cli/tests/unit/serve_test.ts @@ -2,6 +2,7 @@ // deno-lint-ignore-file +import { assertMatch } from "https://deno.land/std@v0.42.0/testing/asserts.ts"; import { Buffer, BufReader, BufWriter } from "../../../test_util/std/io/mod.ts"; import { TextProtoReader } from "../testdata/run/textproto.ts"; import { @@ -31,6 +32,27 @@ function onListen( }; } +Deno.test(async function httpServerShutsDownPortBeforeResolving() { + const ac = new AbortController(); + const listeningPromise = deferred(); + + const server = Deno.serve({ + handler: (_req) => new Response("ok"), + port: 4501, + signal: ac.signal, + onListen: onListen(listeningPromise), + }); + + await listeningPromise; + assertThrows(() => Deno.listen({ port: 4501 })); + + ac.abort(); + await server; + + const listener = Deno.listen({ port: 4501 }); + listener!.close(); +}); + Deno.test(async function httpServerCanResolveHostnames() { const ac = new AbortController(); const listeningPromise = deferred(); @@ -120,6 +142,71 @@ Deno.test({ permissions: { net: true } }, async function httpServerBasic() { await server; }); +Deno.test({ permissions: { net: true } }, async function httpServerOnError() { + const ac = new AbortController(); + const promise = deferred(); + const listeningPromise = deferred(); + let requestStash: Request | null; + + const server = Deno.serve({ + handler: async (request: Request) => { + requestStash = request; + await new Promise((r) => setTimeout(r, 100)); + throw "fail"; + }, + port: 4501, + signal: ac.signal, + onListen: onListen(listeningPromise), + onError: () => { + return new Response("failed: " + requestStash!.url, { status: 500 }); + }, + }); + + await listeningPromise; + const resp = await fetch("http://127.0.0.1:4501/", { + headers: { "connection": "close" }, + }); + const text = await resp.text(); + ac.abort(); + await server; + + assertEquals(text, "failed: http://127.0.0.1:4501/"); +}); + +Deno.test( + { permissions: { net: true } }, + async function httpServerOnErrorFails() { + const ac = new AbortController(); + const promise = deferred(); + const listeningPromise = deferred(); + let requestStash: Request | null; + + const server = Deno.serve({ + handler: async (request: Request) => { + requestStash = request; + await new Promise((r) => setTimeout(r, 100)); + throw "fail"; + }, + port: 4501, + signal: ac.signal, + onListen: onListen(listeningPromise), + onError: () => { + throw "again"; + }, + }); + + await listeningPromise; + const resp = await fetch("http://127.0.0.1:4501/", { + headers: { "connection": "close" }, + }); + const text = await resp.text(); + ac.abort(); + await server; + + assertEquals(text, "Internal Server Error"); + }, +); + Deno.test({ permissions: { net: true } }, async function httpServerOverload1() { const ac = new AbortController(); const promise = deferred(); @@ -238,7 +325,7 @@ Deno.test( console.log = (msg) => { try { const match = msg.match(/Listening on http:\/\/localhost:(\d+)\//); - assert(!!match); + assert(!!match, `Didn't match ${msg}`); const port = +match[1]; assert(port > 0 && port < 65536); } finally { @@ -301,6 +388,109 @@ Deno.test( }, ); +function createUrlTest( + name: string, + methodAndPath: string, + host: string | null, + expected: string, +) { + Deno.test(`httpServerUrl${name}`, async () => { + const listeningPromise: Deferred = deferred(); + const urlPromise = deferred(); + const ac = new AbortController(); + const server = Deno.serve({ + handler: async (request: Request) => { + urlPromise.resolve(request.url); + return new Response(""); + }, + port: 0, + signal: ac.signal, + onListen: ({ port }: { port: number }) => { + listeningPromise.resolve(port); + }, + onError: createOnErrorCb(ac), + }); + + const port = await listeningPromise; + const conn = await Deno.connect({ port }); + + const encoder = new TextEncoder(); + const body = `${methodAndPath} HTTP/1.1\r\n${ + host ? ("Host: " + host + "\r\n") : "" + }Content-Length: 5\r\n\r\n12345`; + const writeResult = await conn.write(encoder.encode(body)); + assertEquals(body.length, writeResult); + + try { + const expectedResult = expected.replace("HOST", "localhost").replace( + "PORT", + `${port}`, + ); + assertEquals(await urlPromise, expectedResult); + } finally { + ac.abort(); + await server; + conn.close(); + } + }); +} + +createUrlTest("WithPath", "GET /path", null, "http://HOST:PORT/path"); +createUrlTest( + "WithPathAndHost", + "GET /path", + "deno.land", + "http://deno.land/path", +); +createUrlTest( + "WithAbsolutePath", + "GET http://localhost/path", + null, + "http://localhost/path", +); +createUrlTest( + "WithAbsolutePathAndHost", + "GET http://localhost/path", + "deno.land", + "http://localhost/path", +); +createUrlTest( + "WithPortAbsolutePath", + "GET http://localhost:1234/path", + null, + "http://localhost:1234/path", +); +createUrlTest( + "WithPortAbsolutePathAndHost", + "GET http://localhost:1234/path", + "deno.land", + "http://localhost:1234/path", +); +createUrlTest( + "WithPortAbsolutePathAndHostWithPort", + "GET http://localhost:1234/path", + "deno.land:9999", + "http://localhost:1234/path", +); + +createUrlTest("WithAsterisk", "OPTIONS *", null, "*"); +createUrlTest( + "WithAuthorityForm", + "CONNECT deno.land:80", + null, + "deno.land:80", +); + +// TODO(mmastrac): These should probably be 400 errors +createUrlTest("WithInvalidAsterisk", "GET *", null, "*"); +createUrlTest("WithInvalidNakedPath", "GET path", null, "path"); +createUrlTest( + "WithInvalidNakedAuthority", + "GET deno.land:1234", + null, + "deno.land:1234", +); + Deno.test( { permissions: { net: true } }, async function httpServerGetRequestBody() { @@ -536,7 +726,10 @@ Deno.test({ permissions: { net: true } }, async function httpServerWebSocket() { response, socket, } = Deno.upgradeWebSocket(request); - socket.onerror = () => fail(); + socket.onerror = (e) => { + console.error(e); + fail(); + }; socket.onmessage = (m) => { socket.send(m.data); socket.close(1001); @@ -553,7 +746,10 @@ Deno.test({ permissions: { net: true } }, async function httpServerWebSocket() { const def = deferred(); const ws = new WebSocket("ws://localhost:4501"); ws.onmessage = (m) => assertEquals(m.data, "foo"); - ws.onerror = () => fail(); + ws.onerror = (e) => { + console.error(e); + fail(); + }; ws.onclose = () => def.resolve(); ws.onopen = () => ws.send("foo"); @@ -562,6 +758,50 @@ Deno.test({ permissions: { net: true } }, async function httpServerWebSocket() { await server; }); +Deno.test( + { permissions: { net: true } }, + async function httpServerWebSocketCanAccessRequest() { + const ac = new AbortController(); + const listeningPromise = deferred(); + const server = Deno.serve({ + handler: async (request) => { + const { + response, + socket, + } = Deno.upgradeWebSocket(request); + socket.onerror = (e) => { + console.error(e); + fail(); + }; + socket.onmessage = (m) => { + socket.send(request.url.toString()); + socket.close(1001); + }; + return response; + }, + port: 4501, + signal: ac.signal, + onListen: onListen(listeningPromise), + onError: createOnErrorCb(ac), + }); + + await listeningPromise; + const def = deferred(); + const ws = new WebSocket("ws://localhost:4501"); + ws.onmessage = (m) => assertEquals(m.data, "http://localhost:4501/"); + ws.onerror = (e) => { + console.error(e); + fail(); + }; + ws.onclose = () => def.resolve(); + ws.onopen = () => ws.send("foo"); + + await def; + ac.abort(); + await server; + }, +); + Deno.test( { permissions: { net: true } }, async function httpVeryLargeRequest() { @@ -682,47 +922,46 @@ Deno.test( }, ); -// FIXME: auto request body reading is intefering with passing it as response. -// Deno.test( -// { permissions: { net: true } }, -// async function httpServerStreamDuplex() { -// const promise = deferred(); -// const ac = new AbortController(); +Deno.test( + { permissions: { net: true } }, + async function httpServerStreamDuplex() { + const promise = deferred(); + const ac = new AbortController(); -// const server = Deno.serve(request => { -// assert(request.body); + const server = Deno.serve((request) => { + assert(request.body); -// promise.resolve(); -// return new Response(request.body); -// }, { port: 2333, signal: ac.signal }); + promise.resolve(); + return new Response(request.body); + }, { port: 2333, signal: ac.signal }); -// const ts = new TransformStream(); -// const writable = ts.writable.getWriter(); + const ts = new TransformStream(); + const writable = ts.writable.getWriter(); -// const resp = await fetch("http://127.0.0.1:2333/", { -// method: "POST", -// body: ts.readable, -// }); + const resp = await fetch("http://127.0.0.1:2333/", { + method: "POST", + body: ts.readable, + }); -// await promise; -// assert(resp.body); -// const reader = resp.body.getReader(); -// await writable.write(new Uint8Array([1])); -// const chunk1 = await reader.read(); -// assert(!chunk1.done); -// assertEquals(chunk1.value, new Uint8Array([1])); -// await writable.write(new Uint8Array([2])); -// const chunk2 = await reader.read(); -// assert(!chunk2.done); -// assertEquals(chunk2.value, new Uint8Array([2])); -// await writable.close(); -// const chunk3 = await reader.read(); -// assert(chunk3.done); + await promise; + assert(resp.body); + const reader = resp.body.getReader(); + await writable.write(new Uint8Array([1])); + const chunk1 = await reader.read(); + assert(!chunk1.done); + assertEquals(chunk1.value, new Uint8Array([1])); + await writable.write(new Uint8Array([2])); + const chunk2 = await reader.read(); + assert(!chunk2.done); + assertEquals(chunk2.value, new Uint8Array([2])); + await writable.close(); + const chunk3 = await reader.read(); + assert(chunk3.done); -// ac.abort(); -// await server; -// }, -// ); + ac.abort(); + await server; + }, +); Deno.test( { permissions: { net: true } }, @@ -867,10 +1106,10 @@ Deno.test( let responseText = new TextDecoder("iso-8859-1").decode(buf); clientConn.close(); - assert(/\r\n[Xx]-[Hh]eader-[Tt]est: Æ\r\n/.test(responseText)); - ac.abort(); await server; + + assertMatch(responseText, /\r\n[Xx]-[Hh]eader-[Tt]est: Æ\r\n/); }, ); @@ -1355,12 +1594,11 @@ createServerLengthTest("autoResponseWithKnownLengthEmpty", { expects_con_len: true, }); -// FIXME: https://github.com/denoland/deno/issues/15892 -// createServerLengthTest("autoResponseWithUnknownLengthEmpty", { -// body: stream(""), -// expects_chunked: true, -// expects_con_len: false, -// }); +createServerLengthTest("autoResponseWithUnknownLengthEmpty", { + body: stream(""), + expects_chunked: true, + expects_con_len: false, +}); Deno.test( { permissions: { net: true } }, @@ -1841,6 +2079,7 @@ Deno.test( method: "GET", headers: { "connection": "close" }, }); + assertEquals(resp.status, 204); assertEquals(resp.headers.get("Content-Length"), null); } finally { ac.abort(); @@ -2162,11 +2401,11 @@ Deno.test( count++; return new Response(`hello world ${count}`); }, { - async onListen() { - const res1 = await fetch("http://localhost:9000/"); + async onListen({ port }: { port: number }) { + const res1 = await fetch(`http://localhost:${port}/`); assertEquals(await res1.text(), "hello world 1"); - const res2 = await fetch("http://localhost:9000/"); + const res2 = await fetch(`http://localhost:${port}/`); assertEquals(await res2.text(), "hello world 2"); promise.resolve(); @@ -2199,13 +2438,13 @@ Deno.test( return new Response("ok"); }, signal: ac.signal, - onListen: onListen(listeningPromise), + onListen: ({ port }: { port: number }) => listeningPromise.resolve(port), onError: createOnErrorCb(ac), }); try { - await listeningPromise; - const resp = await fetch("http://localhost:9000/", { + const port = await listeningPromise; + const resp = await fetch(`http://localhost:${port}/`, { headers: { connection: "close" }, method: "POST", body: '{"sus":true}', @@ -2238,8 +2477,8 @@ Deno.test( }, }), ), { - async onListen() { - const res1 = await fetch("http://localhost:9000/"); + async onListen({ port }) { + const res1 = await fetch(`http://localhost:${port}/`); assertEquals((await res1.text()).length, 40 * 50_000); promise.resolve(); diff --git a/cli/tests/unit/websocket_test.ts b/cli/tests/unit/websocket_test.ts index 997d8f0df6..999eede414 100644 --- a/cli/tests/unit/websocket_test.ts +++ b/cli/tests/unit/websocket_test.ts @@ -43,6 +43,22 @@ Deno.test(async function websocketPingPong() { ws.close(); }); +// TODO(mmastrac): This requires us to ignore bad certs +// Deno.test(async function websocketSecureConnect() { +// const promise = deferred(); +// const ws = new WebSocket("wss://localhost:4243/"); +// assertEquals(ws.url, "wss://localhost:4243/"); +// ws.onerror = (error) => { +// console.log(error); +// fail(); +// }; +// ws.onopen = () => ws.close(); +// ws.onclose = () => { +// promise.resolve(); +// }; +// await promise; +// }); + // https://github.com/denoland/deno/issues/18700 Deno.test( { sanitizeOps: false, sanitizeResources: false }, diff --git a/core/io.rs b/core/io.rs index 103fe79c1f..567d50bd48 100644 --- a/core/io.rs +++ b/core/io.rs @@ -3,6 +3,7 @@ use std::ops::Deref; use std::ops::DerefMut; +use bytes::Buf; use serde_v8::ZeroCopyBuf; /// BufView is a wrapper around an underlying contiguous chunk of bytes. It can @@ -26,11 +27,11 @@ enum BufViewInner { } impl BufView { - fn from_inner(inner: BufViewInner) -> Self { + const fn from_inner(inner: BufViewInner) -> Self { Self { inner, cursor: 0 } } - pub fn empty() -> Self { + pub const fn empty() -> Self { Self::from_inner(BufViewInner::Empty) } @@ -65,6 +66,20 @@ impl BufView { } } +impl Buf for BufView { + fn remaining(&self) -> usize { + self.len() + } + + fn chunk(&self) -> &[u8] { + self.deref() + } + + fn advance(&mut self, cnt: usize) { + self.advance_cursor(cnt) + } +} + impl Deref for BufView { type Target = [u8]; @@ -210,6 +225,20 @@ impl BufMutView { } } +impl Buf for BufMutView { + fn remaining(&self) -> usize { + self.len() + } + + fn chunk(&self) -> &[u8] { + self.deref() + } + + fn advance(&mut self, cnt: usize) { + self.advance_cursor(cnt) + } +} + impl Deref for BufMutView { type Target = [u8]; diff --git a/ext/http/00_serve.js b/ext/http/00_serve.js new file mode 100644 index 0000000000..91bd360944 --- /dev/null +++ b/ext/http/00_serve.js @@ -0,0 +1,534 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +const core = globalThis.Deno.core; +const primordials = globalThis.__bootstrap.primordials; + +const { BadResourcePrototype } = core; +import { InnerBody } from "ext:deno_fetch/22_body.js"; +import { Event } from "ext:deno_web/02_event.js"; +import { + fromInnerResponse, + newInnerResponse, + toInnerResponse, +} from "ext:deno_fetch/23_response.js"; +import { fromInnerRequest } from "ext:deno_fetch/23_request.js"; +import { AbortController } from "ext:deno_web/03_abort_signal.js"; +import { + _eventLoop, + _idleTimeoutDuration, + _idleTimeoutTimeout, + _protocol, + _readyState, + _rid, + _role, + _server, + _serverHandleIdleTimeout, + SERVER, + WebSocket, +} from "ext:deno_websocket/01_websocket.js"; +import { + Deferred, + getReadableStreamResourceBacking, + readableStreamForRid, + ReadableStreamPrototype, +} from "ext:deno_web/06_streams.js"; +const { + ObjectPrototypeIsPrototypeOf, + SafeSet, + SafeSetIterator, + SetPrototypeAdd, + SetPrototypeDelete, + Symbol, + TypeError, + Uint8ArrayPrototype, + Uint8Array, +} = primordials; + +const _upgraded = Symbol("_upgraded"); + +function internalServerError() { + // "Internal Server Error" + return new Response( + new Uint8Array([ + 73, + 110, + 116, + 101, + 114, + 110, + 97, + 108, + 32, + 83, + 101, + 114, + 118, + 101, + 114, + 32, + 69, + 114, + 114, + 111, + 114, + ]), + { status: 500 }, + ); +} + +// Used to ensure that user returns a valid response (but not a different response) from handlers that are upgraded. +const UPGRADE_RESPONSE_SENTINEL = fromInnerResponse( + newInnerResponse(101), + "immutable", +); + +class InnerRequest { + #slabId; + #context; + #methodAndUri; + #streamRid; + #body; + #upgraded; + + constructor(slabId, context) { + this.#slabId = slabId; + this.#context = context; + this.#upgraded = false; + } + + close() { + if (this.#streamRid !== undefined) { + core.close(this.#streamRid); + this.#streamRid = undefined; + } + this.#slabId = undefined; + } + + get [_upgraded]() { + return this.#upgraded; + } + + _wantsUpgrade(upgradeType, ...originalArgs) { + // upgradeHttp is async + // TODO(mmastrac) + if (upgradeType == "upgradeHttp") { + throw "upgradeHttp is unavailable in Deno.serve at this time"; + } + + // upgradeHttpRaw is async + // TODO(mmastrac) + if (upgradeType == "upgradeHttpRaw") { + throw "upgradeHttp is unavailable in Deno.serve at this time"; + } + + // upgradeWebSocket is sync + if (upgradeType == "upgradeWebSocket") { + const response = originalArgs[0]; + const ws = originalArgs[1]; + + this.url(); + this.headerList; + this.close(); + + const goAhead = new Deferred(); + this.#upgraded = () => { + goAhead.resolve(); + }; + + // Start the upgrade in the background. + (async () => { + try { + // Returns the connection and extra bytes, which we can pass directly to op_ws_server_create + const upgrade = await core.opAsync2( + "op_upgrade", + this.#slabId, + response.headerList, + ); + const wsRid = core.ops.op_ws_server_create(upgrade[0], upgrade[1]); + + // We have to wait for the go-ahead signal + await goAhead; + + ws[_rid] = wsRid; + ws[_readyState] = WebSocket.OPEN; + ws[_role] = SERVER; + const event = new Event("open"); + ws.dispatchEvent(event); + + ws[_eventLoop](); + if (ws[_idleTimeoutDuration]) { + ws.addEventListener( + "close", + () => clearTimeout(ws[_idleTimeoutTimeout]), + ); + } + ws[_serverHandleIdleTimeout](); + } catch (error) { + const event = new ErrorEvent("error", { error }); + ws.dispatchEvent(event); + } + })(); + return { response: UPGRADE_RESPONSE_SENTINEL, socket: ws }; + } + } + + url() { + if (this.#methodAndUri === undefined) { + if (this.#slabId === undefined) { + throw new TypeError("request closed"); + } + // TODO(mmastrac): This is quite slow as we're serializing a large number of values. We may want to consider + // splitting this up into multiple ops. + this.#methodAndUri = core.ops.op_get_request_method_and_url(this.#slabId); + } + + const path = this.#methodAndUri[2]; + + // * is valid for OPTIONS + if (path === "*") { + return "*"; + } + + // If the path is empty, return the authority (valid for CONNECT) + if (path == "") { + return this.#methodAndUri[1]; + } + + // CONNECT requires an authority + if (this.#methodAndUri[0] == "CONNECT") { + return this.#methodAndUri[1]; + } + + const hostname = this.#methodAndUri[1]; + if (hostname) { + // Construct a URL from the scheme, the hostname, and the path + return this.#context.scheme + hostname + path; + } + + // Construct a URL from the scheme, the fallback hostname, and the path + return this.#context.scheme + this.#context.fallbackHost + path; + } + + get remoteAddr() { + if (this.#methodAndUri === undefined) { + if (this.#slabId === undefined) { + throw new TypeError("request closed"); + } + this.#methodAndUri = core.ops.op_get_request_method_and_url(this.#slabId); + } + return { + transport: "tcp", + hostname: this.#methodAndUri[3], + port: this.#methodAndUri[4], + }; + } + + get method() { + if (this.#methodAndUri === undefined) { + if (this.#slabId === undefined) { + throw new TypeError("request closed"); + } + this.#methodAndUri = core.ops.op_get_request_method_and_url(this.#slabId); + } + return this.#methodAndUri[0]; + } + + get body() { + if (this.#slabId === undefined) { + throw new TypeError("request closed"); + } + if (this.#body !== undefined) { + return this.#body; + } + // If the method is GET or HEAD, we do not want to include a body here, even if the Rust + // side of the code is willing to provide it to us. + if (this.method == "GET" || this.method == "HEAD") { + this.#body = null; + return null; + } + this.#streamRid = core.ops.op_read_request_body(this.#slabId); + this.#body = new InnerBody(readableStreamForRid(this.#streamRid, false)); + return this.#body; + } + + get headerList() { + if (this.#slabId === undefined) { + throw new TypeError("request closed"); + } + return core.ops.op_get_request_headers(this.#slabId); + } + + get slabId() { + return this.#slabId; + } +} + +class CallbackContext { + scheme; + fallbackHost; + serverRid; + closed; + + initialize(args) { + this.serverRid = args[0]; + this.scheme = args[1]; + this.fallbackHost = args[2]; + this.closed = false; + } + + close() { + try { + this.closed = true; + core.tryClose(this.serverRid); + } catch { + // Pass + } + } +} + +function fastSyncResponseOrStream(req, respBody) { + if (respBody === null || respBody === undefined) { + // Don't set the body + return null; + } + + const stream = respBody.streamOrStatic; + const body = stream.body; + + if (ObjectPrototypeIsPrototypeOf(Uint8ArrayPrototype, body)) { + core.ops.op_set_response_body_bytes(req, body); + return null; + } + + if (typeof body === "string") { + core.ops.op_set_response_body_text(req, body); + return null; + } + + // At this point in the response it needs to be a stream + if (!ObjectPrototypeIsPrototypeOf(ReadableStreamPrototype, stream)) { + throw TypeError("invalid response"); + } + const resourceBacking = getReadableStreamResourceBacking(stream); + if (resourceBacking) { + core.ops.op_set_response_body_resource( + req, + resourceBacking.rid, + resourceBacking.autoClose, + ); + return null; + } + + return stream; +} + +async function asyncResponse(responseBodies, req, status, stream) { + const responseRid = core.ops.op_set_response_body_stream(req); + SetPrototypeAdd(responseBodies, responseRid); + const reader = stream.getReader(); + core.ops.op_set_promise_complete(req, status); + try { + while (true) { + const { value, done } = await reader.read(); + if (done) { + break; + } + await core.writeAll(responseRid, value); + } + } catch (error) { + await reader.cancel(error); + } finally { + core.tryClose(responseRid); + SetPrototypeDelete(responseBodies, responseRid); + reader.releaseLock(); + } +} + +/** + * Maps the incoming request slab ID to a fully-fledged Request object, passes it to the user-provided + * callback, then extracts the response that was returned from that callback. The response is then pulled + * apart and handled on the Rust side. + * + * This function returns a promise that will only reject in the case of abnormal exit. + */ +function mapToCallback(responseBodies, context, signal, callback, onError) { + return async function (req) { + const innerRequest = new InnerRequest(req, context); + const request = fromInnerRequest(innerRequest, signal, "immutable"); + + // Get the response from the user-provided callback. If that fails, use onError. If that fails, return a fallback + // 500 error. + let response; + try { + response = await callback(request, { + remoteAddr: innerRequest.remoteAddr, + }); + } catch (error) { + try { + response = await onError(error); + } catch (error) { + console.error("Exception in onError while handling exception", error); + response = internalServerError(); + } + } + + const inner = toInnerResponse(response); + if (innerRequest[_upgraded]) { + // We're done here as the connection has been upgraded during the callback and no longer requires servicing. + if (response !== UPGRADE_RESPONSE_SENTINEL) { + console.error("Upgrade response was not returned from callback"); + context.close(); + } + innerRequest[_upgraded](); + return; + } + + // Did everything shut down while we were waiting? + if (context.closed) { + innerRequest.close(); + return; + } + + const status = inner.status; + const headers = inner.headerList; + if (headers && headers.length > 0) { + if (headers.length == 1) { + core.ops.op_set_response_header(req, headers[0][0], headers[0][1]); + } else { + core.ops.op_set_response_headers(req, headers); + } + } + + // Attempt to response quickly to this request, otherwise extract the stream + const stream = fastSyncResponseOrStream(req, inner.body); + if (stream !== null) { + // Handle the stream asynchronously + await asyncResponse(responseBodies, req, status, stream); + } else { + core.ops.op_set_promise_complete(req, status); + } + + innerRequest.close(); + }; +} + +async function serve(arg1, arg2) { + let options = undefined; + let handler = undefined; + if (typeof arg1 === "function") { + handler = arg1; + options = arg2; + } else if (typeof arg2 === "function") { + handler = arg2; + options = arg1; + } else { + options = arg1; + } + if (handler === undefined) { + if (options === undefined) { + throw new TypeError( + "No handler was provided, so an options bag is mandatory.", + ); + } + handler = options.handler; + } + if (typeof handler !== "function") { + throw new TypeError("A handler function must be provided."); + } + if (options === undefined) { + options = {}; + } + + const wantsHttps = options.cert || options.key; + const signal = options.signal; + const onError = options.onError ?? function (error) { + console.error(error); + return internalServerError(); + }; + const listenOpts = { + hostname: options.hostname ?? "0.0.0.0", + port: options.port ?? (wantsHttps ? 9000 : 8000), + reusePort: options.reusePort ?? false, + }; + + const abortController = new AbortController(); + + const responseBodies = new SafeSet(); + const context = new CallbackContext(); + const callback = mapToCallback( + responseBodies, + context, + abortController.signal, + handler, + onError, + ); + + if (wantsHttps) { + if (!options.cert || !options.key) { + throw new TypeError( + "Both cert and key must be provided to enable HTTPS.", + ); + } + listenOpts.cert = options.cert; + listenOpts.key = options.key; + listenOpts.alpnProtocols = ["h2", "http/1.1"]; + const listener = Deno.listenTls(listenOpts); + listenOpts.port = listener.addr.port; + context.initialize(core.ops.op_serve_http( + listener.rid, + )); + } else { + const listener = Deno.listen(listenOpts); + listenOpts.port = listener.addr.port; + context.initialize(core.ops.op_serve_http( + listener.rid, + )); + } + + signal?.addEventListener( + "abort", + () => context.close(), + { once: true }, + ); + + const onListen = options.onListen ?? function ({ port }) { + // If the hostname is "0.0.0.0", we display "localhost" in console + // because browsers in Windows don't resolve "0.0.0.0". + // See the discussion in https://github.com/denoland/deno_std/issues/1165 + const hostname = listenOpts.hostname == "0.0.0.0" + ? "localhost" + : listenOpts.hostname; + console.log(`Listening on ${context.scheme}${hostname}:${port}/`); + }; + + onListen({ port: listenOpts.port }); + + while (true) { + const rid = context.serverRid; + let req; + try { + req = await core.opAsync("op_http_wait", rid); + } catch (error) { + if (ObjectPrototypeIsPrototypeOf(BadResourcePrototype, error)) { + break; + } + throw new Deno.errors.Http(error); + } + if (req === 0xffffffff) { + break; + } + callback(req).catch((error) => { + // Abnormal exit + console.error( + "Terminating Deno.serve loop due to unexpected error", + error, + ); + context.close(); + }); + } + + for (const streamRid of new SafeSetIterator(responseBodies)) { + core.tryClose(streamRid); + } +} + +export { serve }; diff --git a/ext/http/01_http.js b/ext/http/01_http.js index 5bfa58655e..95e2cee740 100644 --- a/ext/http/01_http.js +++ b/ext/http/01_http.js @@ -32,8 +32,8 @@ import { SERVER, WebSocket, } from "ext:deno_websocket/01_websocket.js"; -import { listen, TcpConn, UnixConn } from "ext:deno_net/01_net.js"; -import { listenTls, TlsConn } from "ext:deno_net/02_tls.js"; +import { TcpConn, UnixConn } from "ext:deno_net/01_net.js"; +import { TlsConn } from "ext:deno_net/02_tls.js"; import { Deferred, getReadableStreamResourceBacking, @@ -41,18 +41,17 @@ import { readableStreamForRid, ReadableStreamPrototype, } from "ext:deno_web/06_streams.js"; +import { serve } from "ext:deno_http/00_serve.js"; const { ArrayPrototypeIncludes, ArrayPrototypeMap, ArrayPrototypePush, Error, ObjectPrototypeIsPrototypeOf, - PromisePrototypeCatch, SafeSet, SafeSetIterator, SetPrototypeAdd, SetPrototypeDelete, - SetPrototypeClear, StringPrototypeCharCodeAt, StringPrototypeIncludes, StringPrototypeToLowerCase, @@ -406,6 +405,7 @@ const websocketCvf = buildCaseInsensitiveCommaValueFinder("websocket"); const upgradeCvf = buildCaseInsensitiveCommaValueFinder("upgrade"); function upgradeWebSocket(request, options = {}) { + const inner = toInnerRequest(request); const upgrade = request.headers.get("upgrade"); const upgradeHasWebSocketOption = upgrade !== null && websocketCvf(upgrade); @@ -455,25 +455,39 @@ function upgradeWebSocket(request, options = {}) { } } - const response = fromInnerResponse(r, "immutable"); - const socket = webidl.createBranded(WebSocket); setEventTargetData(socket); socket[_server] = true; - response[_ws] = socket; socket[_idleTimeoutDuration] = options.idleTimeout ?? 120; socket[_idleTimeoutTimeout] = null; + if (inner._wantsUpgrade) { + return inner._wantsUpgrade("upgradeWebSocket", r, socket); + } + + const response = fromInnerResponse(r, "immutable"); + + response[_ws] = socket; + return { response, socket }; } function upgradeHttp(req) { + const inner = toInnerRequest(req); + if (inner._wantsUpgrade) { + return inner._wantsUpgrade("upgradeHttp", arguments); + } + req[_deferred] = new Deferred(); return req[_deferred].promise; } async function upgradeHttpRaw(req, tcpConn) { const inner = toInnerRequest(req); + if (inner._wantsUpgrade) { + return inner._wantsUpgrade("upgradeHttpRaw", arguments); + } + const res = await core.opAsync("op_http_upgrade_early", inner[streamRid]); return new TcpConn(res, tcpConn.remoteAddr, tcpConn.localAddr); } @@ -552,233 +566,4 @@ function buildCaseInsensitiveCommaValueFinder(checkText) { internals.buildCaseInsensitiveCommaValueFinder = buildCaseInsensitiveCommaValueFinder; -function hostnameForDisplay(hostname) { - // If the hostname is "0.0.0.0", we display "localhost" in console - // because browsers in Windows don't resolve "0.0.0.0". - // See the discussion in https://github.com/denoland/deno_std/issues/1165 - return hostname === "0.0.0.0" ? "localhost" : hostname; -} - -async function respond(handler, requestEvent, connInfo, onError) { - let response; - - try { - response = await handler(requestEvent.request, connInfo); - - if (response.bodyUsed && response.body !== null) { - throw new TypeError("Response body already consumed."); - } - } catch (e) { - // Invoke `onError` handler if the request handler throws. - response = await onError(e); - } - - try { - // Send the response. - await requestEvent.respondWith(response); - } catch { - // `respondWith()` can throw for various reasons, including downstream and - // upstream connection errors, as well as errors thrown during streaming - // of the response content. In order to avoid false negatives, we ignore - // the error here and let `serveHttp` close the connection on the - // following iteration if it is in fact a downstream connection error. - } -} - -async function serveConnection( - server, - activeHttpConnections, - handler, - httpConn, - connInfo, - onError, -) { - while (!server.closed) { - let requestEvent = null; - - try { - // Yield the new HTTP request on the connection. - requestEvent = await httpConn.nextRequest(); - } catch { - // Connection has been closed. - break; - } - - if (requestEvent === null) { - break; - } - - respond(handler, requestEvent, connInfo, onError); - } - - SetPrototypeDelete(activeHttpConnections, httpConn); - try { - httpConn.close(); - } catch { - // Connection has already been closed. - } -} - -async function serve(arg1, arg2) { - let options = undefined; - let handler = undefined; - if (typeof arg1 === "function") { - handler = arg1; - options = arg2; - } else if (typeof arg2 === "function") { - handler = arg2; - options = arg1; - } else { - options = arg1; - } - if (handler === undefined) { - if (options === undefined) { - throw new TypeError( - "No handler was provided, so an options bag is mandatory.", - ); - } - handler = options.handler; - } - if (typeof handler !== "function") { - throw new TypeError("A handler function must be provided."); - } - if (options === undefined) { - options = {}; - } - - const signal = options.signal; - const onError = options.onError ?? function (error) { - console.error(error); - return new Response("Internal Server Error", { status: 500 }); - }; - const onListen = options.onListen ?? function ({ port }) { - console.log( - `Listening on http://${hostnameForDisplay(listenOpts.hostname)}:${port}/`, - ); - }; - const listenOpts = { - hostname: options.hostname ?? "127.0.0.1", - port: options.port ?? 9000, - reusePort: options.reusePort ?? false, - }; - - if (options.cert || options.key) { - if (!options.cert || !options.key) { - throw new TypeError( - "Both cert and key must be provided to enable HTTPS.", - ); - } - listenOpts.cert = options.cert; - listenOpts.key = options.key; - } - - let listener; - if (listenOpts.cert && listenOpts.key) { - listener = listenTls({ - hostname: listenOpts.hostname, - port: listenOpts.port, - cert: listenOpts.cert, - key: listenOpts.key, - reusePort: listenOpts.reusePort, - }); - } else { - listener = listen({ - hostname: listenOpts.hostname, - port: listenOpts.port, - reusePort: listenOpts.reusePort, - }); - } - - const serverDeferred = new Deferred(); - const activeHttpConnections = new SafeSet(); - - const server = { - transport: listenOpts.cert && listenOpts.key ? "https" : "http", - hostname: listenOpts.hostname, - port: listenOpts.port, - closed: false, - - close() { - if (server.closed) { - return; - } - server.closed = true; - try { - listener.close(); - } catch { - // Might have been already closed. - } - - for (const httpConn of new SafeSetIterator(activeHttpConnections)) { - try { - httpConn.close(); - } catch { - // Might have been already closed. - } - } - - SetPrototypeClear(activeHttpConnections); - serverDeferred.resolve(); - }, - - async serve() { - while (!server.closed) { - let conn; - - try { - conn = await listener.accept(); - } catch { - // Listener has been closed. - if (!server.closed) { - console.log("Listener has closed unexpectedly"); - } - break; - } - - let httpConn; - try { - const rid = ops.op_http_start(conn.rid); - httpConn = new HttpConn(rid, conn.remoteAddr, conn.localAddr); - } catch { - // Connection has been closed; - continue; - } - - SetPrototypeAdd(activeHttpConnections, httpConn); - - const connInfo = { - localAddr: conn.localAddr, - remoteAddr: conn.remoteAddr, - }; - // Serve the HTTP connection - serveConnection( - server, - activeHttpConnections, - handler, - httpConn, - connInfo, - onError, - ); - } - await serverDeferred.promise; - }, - }; - - signal?.addEventListener( - "abort", - () => { - try { - server.close(); - } catch { - // Pass - } - }, - { once: true }, - ); - - onListen(listener.addr); - - await PromisePrototypeCatch(server.serve(), console.error); -} - export { _ws, HttpConn, serve, upgradeHttp, upgradeHttpRaw, upgradeWebSocket }; diff --git a/ext/http/Cargo.toml b/ext/http/Cargo.toml index 382fd3184f..bb965d9b25 100644 --- a/ext/http/Cargo.toml +++ b/ext/http/Cargo.toml @@ -10,6 +10,9 @@ readme = "README.md" repository.workspace = true description = "HTTP server implementation for Deno" +[features] +"__zombie_http_tracking" = [] + [lib] path = "lib.rs" @@ -24,11 +27,14 @@ brotli = "3.3.4" bytes.workspace = true cache_control.workspace = true deno_core.workspace = true +deno_net.workspace = true deno_websocket.workspace = true flate2.workspace = true fly-accept-encoding = "0.2.0" +http.workspace = true httparse.workspace = true hyper = { workspace = true, features = ["server", "stream", "http1", "http2", "runtime"] } +hyper1 = { package = "hyper", features = ["full"], version = "1.0.0-rc.3" } memmem.workspace = true mime = "0.3.16" once_cell.workspace = true @@ -37,6 +43,8 @@ phf = { version = "0.10", features = ["macros"] } pin-project.workspace = true ring.workspace = true serde.workspace = true +slab.workspace = true +thiserror.workspace = true tokio.workspace = true tokio-util = { workspace = true, features = ["io"] } diff --git a/ext/http/http_next.rs b/ext/http/http_next.rs new file mode 100644 index 0000000000..25088e1ab0 --- /dev/null +++ b/ext/http/http_next.rs @@ -0,0 +1,765 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use crate::extract_network_stream; +use crate::request_body::HttpRequestBody; +use crate::request_properties::DefaultHttpRequestProperties; +use crate::request_properties::HttpConnectionProperties; +use crate::request_properties::HttpListenProperties; +use crate::request_properties::HttpPropertyExtractor; +use crate::response_body::CompletionHandle; +use crate::response_body::ResponseBytes; +use crate::response_body::ResponseBytesInner; +use crate::response_body::V8StreamHttpResponseBody; +use crate::LocalExecutor; +use deno_core::error::AnyError; +use deno_core::futures::TryFutureExt; +use deno_core::op; +use deno_core::AsyncRefCell; +use deno_core::BufView; +use deno_core::ByteString; +use deno_core::CancelFuture; +use deno_core::CancelHandle; +use deno_core::CancelTryFuture; +use deno_core::OpState; +use deno_core::RcRef; +use deno_core::Resource; +use deno_core::ResourceId; +use deno_core::ZeroCopyBuf; +use deno_net::ops_tls::TlsStream; +use deno_net::raw::put_network_stream_resource; +use deno_net::raw::NetworkStream; +use deno_net::raw::NetworkStreamAddress; +use http::request::Parts; +use hyper1::body::Incoming; +use hyper1::header::COOKIE; +use hyper1::http::HeaderName; +use hyper1::http::HeaderValue; +use hyper1::server::conn::http1; +use hyper1::server::conn::http2; +use hyper1::service::service_fn; +use hyper1::upgrade::OnUpgrade; +use hyper1::StatusCode; +use pin_project::pin_project; +use pin_project::pinned_drop; +use slab::Slab; +use std::borrow::Cow; +use std::cell::RefCell; +use std::future::Future; +use std::io; +use std::net::Ipv4Addr; +use std::net::SocketAddr; +use std::net::SocketAddrV4; +use std::pin::Pin; +use std::rc::Rc; +use tokio::task::spawn_local; +use tokio::task::JoinHandle; + +type Request = hyper1::Request; +type Response = hyper1::Response; + +pub struct HttpSlabRecord { + request_info: HttpConnectionProperties, + request_parts: Parts, + request_body: Option, + // The response may get taken before we tear this down + response: Option, + body: Option>, + promise: CompletionHandle, + #[cfg(__zombie_http_tracking)] + alive: bool, +} + +thread_local! { + pub static SLAB: RefCell> = RefCell::new(Slab::with_capacity(1024)); +} + +/// Generates getters and setters for the [`SLAB`]. For example, +/// `with!(with_req, with_req_mut, Parts, http, http.request_parts);` expands to: +/// +/// ```ignore +/// #[inline(always)] +/// #[allow(dead_code)] +/// pub(crate) fn with_req_mut(key: usize, f: impl FnOnce(&mut Parts) -> T) -> T { +/// SLAB.with(|slab| { +/// let mut borrow = slab.borrow_mut(); +/// let mut http = borrow.get_mut(key).unwrap(); +/// #[cfg(__zombie_http_tracking)] +/// if !http.alive { +/// panic!("Attempted to access a dead HTTP object") +/// } +/// f(&mut http.expr) +/// }) +/// } + +/// #[inline(always)] +/// #[allow(dead_code)] +/// pub(crate) fn with_req(key: usize, f: impl FnOnce(&Parts) -> T) -> T { +/// SLAB.with(|slab| { +/// let mut borrow = slab.borrow(); +/// let mut http = borrow.get(key).unwrap(); +/// #[cfg(__zombie_http_tracking)] +/// if !http.alive { +/// panic!("Attempted to access a dead HTTP object") +/// } +/// f(&http.expr) +/// }) +/// } +/// ``` +macro_rules! with { + ($ref:ident, $mut:ident, $type:ty, $http:ident, $expr:expr) => { + #[inline(always)] + #[allow(dead_code)] + pub(crate) fn $mut(key: usize, f: impl FnOnce(&mut $type) -> T) -> T { + SLAB.with(|slab| { + let mut borrow = slab.borrow_mut(); + #[allow(unused_mut)] // TODO(mmastrac): compiler issue? + let mut $http = borrow.get_mut(key).unwrap(); + #[cfg(__zombie_http_tracking)] + if !$http.alive { + panic!("Attempted to access a dead HTTP object") + } + f(&mut $expr) + }) + } + + #[inline(always)] + #[allow(dead_code)] + pub(crate) fn $ref(key: usize, f: impl FnOnce(&$type) -> T) -> T { + SLAB.with(|slab| { + let borrow = slab.borrow(); + let $http = borrow.get(key).unwrap(); + #[cfg(__zombie_http_tracking)] + if !$http.alive { + panic!("Attempted to access a dead HTTP object") + } + f(&$expr) + }) + } + }; +} + +with!(with_req, with_req_mut, Parts, http, http.request_parts); +with!( + with_req_body, + with_req_body_mut, + Option, + http, + http.request_body +); +with!( + with_resp, + with_resp_mut, + Option, + http, + http.response +); +with!( + with_body, + with_body_mut, + Option>, + http, + http.body +); +with!( + with_promise, + with_promise_mut, + CompletionHandle, + http, + http.promise +); +with!(with_http, with_http_mut, HttpSlabRecord, http, http); + +fn slab_insert( + request: Request, + request_info: HttpConnectionProperties, +) -> usize { + SLAB.with(|slab| { + let (request_parts, request_body) = request.into_parts(); + slab.borrow_mut().insert(HttpSlabRecord { + request_info, + request_parts, + request_body: Some(request_body), + response: Some(Response::new(ResponseBytes::default())), + body: None, + promise: CompletionHandle::default(), + #[cfg(__zombie_http_tracking)] + alive: true, + }) + }) +} + +#[op] +pub fn op_upgrade_raw(_index: usize) {} + +#[op] +pub async fn op_upgrade( + state: Rc>, + index: usize, + headers: Vec<(ByteString, ByteString)>, +) -> Result<(ResourceId, ZeroCopyBuf), AnyError> { + // Stage 1: set the respnse to 101 Switching Protocols and send it + let upgrade = with_http_mut(index, |http| { + // Manually perform the upgrade. We're peeking into hyper's underlying machinery here a bit + let upgrade = http.request_parts.extensions.remove::().unwrap(); + + let response = http.response.as_mut().unwrap(); + *response.status_mut() = StatusCode::SWITCHING_PROTOCOLS; + for (name, value) in headers { + response.headers_mut().append( + HeaderName::from_bytes(&name).unwrap(), + HeaderValue::from_bytes(&value).unwrap(), + ); + } + http.promise.complete(true); + upgrade + }); + + // Stage 2: wait for the request to finish upgrading + let upgraded = upgrade.await?; + + // Stage 3: return the extracted raw network stream + let (stream, bytes) = extract_network_stream(upgraded); + + // We're allocating for those extra bytes, but they are probably going to be empty most of the time + Ok(( + put_network_stream_resource( + &mut state.borrow_mut().resource_table, + stream, + )?, + ZeroCopyBuf::from(bytes.to_vec()), + )) +} + +#[op] +pub fn op_set_promise_complete(index: usize, status: u16) { + with_resp_mut(index, |resp| { + // The Javascript code will never provide a status that is invalid here (see 23_response.js) + *resp.as_mut().unwrap().status_mut() = + StatusCode::from_u16(status).unwrap(); + }); + with_promise_mut(index, |promise| { + promise.complete(true); + }); +} + +#[op] +pub fn op_get_request_method_and_url( + index: usize, +) -> (String, Option, String, String, Option) { + // TODO(mmastrac): Passing method can be optimized + with_http(index, |http| { + let request_properties = DefaultHttpRequestProperties::request_properties( + &http.request_info, + &http.request_parts.uri, + &http.request_parts.headers, + ); + + // Only extract the path part - we handle authority elsewhere + let path = match &http.request_parts.uri.path_and_query() { + Some(path_and_query) => path_and_query.to_string(), + None => "".to_owned(), + }; + + ( + http.request_parts.method.as_str().to_owned(), + request_properties.authority, + path, + String::from(http.request_info.peer_address.as_ref()), + http.request_info.peer_port, + ) + }) +} + +#[op] +pub fn op_get_request_header(index: usize, name: String) -> Option { + with_req(index, |req| { + let value = req.headers.get(name); + value.map(|value| value.as_bytes().into()) + }) +} + +#[op] +pub fn op_get_request_headers(index: usize) -> Vec<(ByteString, ByteString)> { + with_req(index, |req| { + let headers = &req.headers; + let mut vec = Vec::with_capacity(headers.len()); + let mut cookies: Option> = None; + for (name, value) in headers { + if name == COOKIE { + if let Some(ref mut cookies) = cookies { + cookies.push(value.as_bytes()); + } else { + cookies = Some(vec![value.as_bytes()]); + } + } else { + let name: &[u8] = name.as_ref(); + vec.push((name.into(), value.as_bytes().into())) + } + } + + // We treat cookies specially, because we don't want them to get them + // mangled by the `Headers` object in JS. What we do is take all cookie + // headers and concat them into a single cookie header, separated by + // semicolons. + // TODO(mmastrac): This should probably happen on the JS side on-demand + if let Some(cookies) = cookies { + let cookie_sep = "; ".as_bytes(); + vec.push(( + ByteString::from(COOKIE.as_str()), + ByteString::from(cookies.join(cookie_sep)), + )); + } + vec + }) +} + +#[op] +pub fn op_read_request_body(state: &mut OpState, index: usize) -> ResourceId { + let incoming = with_req_body_mut(index, |body| body.take().unwrap()); + let body_resource = Rc::new(HttpRequestBody::new(incoming)); + let res = state.resource_table.add_rc(body_resource.clone()); + with_body_mut(index, |body| { + *body = Some(body_resource); + }); + res +} + +#[op] +pub fn op_set_response_header( + index: usize, + name: ByteString, + value: ByteString, +) { + with_resp_mut(index, |resp| { + let resp_headers = resp.as_mut().unwrap().headers_mut(); + // These are valid latin-1 strings + let name = HeaderName::from_bytes(&name).unwrap(); + let value = HeaderValue::from_bytes(&value).unwrap(); + resp_headers.append(name, value); + }); +} + +#[op] +pub fn op_set_response_headers( + index: usize, + headers: Vec<(ByteString, ByteString)>, +) { + // TODO(mmastrac): Invalid headers should be handled? + with_resp_mut(index, |resp| { + let resp_headers = resp.as_mut().unwrap().headers_mut(); + resp_headers.reserve(headers.len()); + for (name, value) in headers { + // These are valid latin-1 strings + let name = HeaderName::from_bytes(&name).unwrap(); + let value = HeaderValue::from_bytes(&value).unwrap(); + resp_headers.append(name, value); + } + }) +} + +#[op] +pub fn op_set_response_body_resource( + state: &mut OpState, + index: usize, + stream_rid: ResourceId, + auto_close: bool, +) -> Result<(), AnyError> { + // If the stream is auto_close, we will hold the last ref to it until the response is complete. + let resource = if auto_close { + state.resource_table.take_any(stream_rid)? + } else { + state.resource_table.get_any(stream_rid)? + }; + + with_resp_mut(index, move |response| { + let future = resource.clone().read(64 * 1024); + response + .as_mut() + .unwrap() + .body_mut() + .initialize(ResponseBytesInner::Resource(auto_close, resource, future)); + }); + + Ok(()) +} + +#[op] +pub fn op_set_response_body_stream( + state: &mut OpState, + index: usize, +) -> Result { + // TODO(mmastrac): what should this channel size be? + let (tx, rx) = tokio::sync::mpsc::channel(1); + let (tx, rx) = ( + V8StreamHttpResponseBody::new(tx), + ResponseBytesInner::V8Stream(rx), + ); + + with_resp_mut(index, move |response| { + response.as_mut().unwrap().body_mut().initialize(rx); + }); + + Ok(state.resource_table.add(tx)) +} + +#[op] +pub fn op_set_response_body_text(index: usize, text: String) { + if !text.is_empty() { + with_resp_mut(index, move |response| { + response + .as_mut() + .unwrap() + .body_mut() + .initialize(ResponseBytesInner::Bytes(BufView::from(text.into_bytes()))) + }); + } +} + +#[op] +pub fn op_set_response_body_bytes(index: usize, buffer: ZeroCopyBuf) { + if !buffer.is_empty() { + with_resp_mut(index, |response| { + response + .as_mut() + .unwrap() + .body_mut() + .initialize(ResponseBytesInner::Bytes(BufView::from(buffer))) + }); + }; +} + +#[op] +pub async fn op_http_track( + state: Rc>, + index: usize, + server_rid: ResourceId, +) -> Result<(), AnyError> { + let handle = with_resp(index, |resp| { + resp.as_ref().unwrap().body().completion_handle() + }); + + let join_handle = state + .borrow_mut() + .resource_table + .get::(server_rid)?; + + match handle.or_cancel(join_handle.cancel_handle()).await { + Ok(true) => Ok(()), + Ok(false) => { + Err(AnyError::msg("connection closed before message completed")) + } + Err(_e) => Ok(()), + } +} + +#[pin_project(PinnedDrop)] +pub struct SlabFuture>(usize, #[pin] F); + +pub fn new_slab_future( + request: Request, + request_info: HttpConnectionProperties, + tx: tokio::sync::mpsc::Sender, +) -> SlabFuture> { + let index = slab_insert(request, request_info); + let rx = with_promise(index, |promise| promise.clone()); + SlabFuture(index, async move { + if tx.send(index).await.is_ok() { + // We only need to wait for completion if we aren't closed + rx.await; + } + }) +} + +impl> SlabFuture {} + +#[pinned_drop] +impl> PinnedDrop for SlabFuture { + fn drop(self: Pin<&mut Self>) { + SLAB.with(|slab| { + #[cfg(__zombie_http_tracking)] + { + slab.borrow_mut().get_mut(self.0).unwrap().alive = false; + } + #[cfg(not(__zombie_http_tracking))] + { + slab.borrow_mut().remove(self.0); + } + }); + } +} + +impl> Future for SlabFuture { + type Output = Result; + + fn poll( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll { + let index = self.0; + self + .project() + .1 + .poll(cx) + .map(|_| Ok(with_resp_mut(index, |resp| resp.take().unwrap()))) + } +} + +fn serve_https( + mut io: TlsStream, + request_info: HttpConnectionProperties, + cancel: RcRef, + tx: tokio::sync::mpsc::Sender, +) -> JoinHandle> { + // TODO(mmastrac): This is faster if we can use tokio::spawn but then the send bounds get us + let svc = service_fn(move |req: Request| { + new_slab_future(req, request_info.clone(), tx.clone()) + }); + spawn_local(async { + io.handshake().await?; + let handshake = io.get_ref().1.alpn_protocol(); + // h2 + if handshake == Some(&[104, 50]) { + let conn = http2::Builder::new(LocalExecutor).serve_connection(io, svc); + + conn.map_err(AnyError::from).try_or_cancel(cancel).await + } else { + let conn = http1::Builder::new() + .keep_alive(true) + .serve_connection(io, svc); + + conn + .with_upgrades() + .map_err(AnyError::from) + .try_or_cancel(cancel) + .await + } + }) +} + +fn serve_http( + io: impl tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin + Send + 'static, + request_info: HttpConnectionProperties, + cancel: RcRef, + tx: tokio::sync::mpsc::Sender, +) -> JoinHandle> { + // TODO(mmastrac): This is faster if we can use tokio::spawn but then the send bounds get us + let svc = service_fn(move |req: Request| { + new_slab_future(req, request_info.clone(), tx.clone()) + }); + spawn_local(async { + let conn = http1::Builder::new() + .keep_alive(true) + .serve_connection(io, svc); + conn + .with_upgrades() + .map_err(AnyError::from) + .try_or_cancel(cancel) + .await + }) +} + +fn serve_http_on( + network_stream: NetworkStream, + listen_properties: &HttpListenProperties, + cancel: RcRef, + tx: tokio::sync::mpsc::Sender, +) -> JoinHandle> { + // We always want some sort of peer address. If we can't get one, just make up one. + let peer_address = network_stream.peer_address().unwrap_or_else(|_| { + NetworkStreamAddress::Ip(SocketAddr::V4(SocketAddrV4::new( + Ipv4Addr::new(0, 0, 0, 0), + 0, + ))) + }); + let connection_properties: HttpConnectionProperties = + DefaultHttpRequestProperties::connection_properties( + listen_properties, + &peer_address, + ); + + match network_stream { + NetworkStream::Tcp(conn) => { + serve_http(conn, connection_properties, cancel, tx) + } + NetworkStream::Tls(conn) => { + serve_https(conn, connection_properties, cancel, tx) + } + #[cfg(unix)] + NetworkStream::Unix(conn) => { + serve_http(conn, connection_properties, cancel, tx) + } + } +} + +struct HttpJoinHandle( + AsyncRefCell>>>, + CancelHandle, + AsyncRefCell>, +); + +impl HttpJoinHandle { + fn cancel_handle(self: &Rc) -> RcRef { + RcRef::map(self, |this| &this.1) + } +} + +impl Resource for HttpJoinHandle { + fn name(&self) -> Cow { + "http".into() + } + + fn close(self: Rc) { + self.1.cancel() + } +} + +#[op(v8)] +pub fn op_serve_http( + state: Rc>, + listener_rid: ResourceId, +) -> Result<(ResourceId, &'static str, String), AnyError> { + let listener = + DefaultHttpRequestProperties::get_network_stream_listener_for_rid( + &mut state.borrow_mut(), + listener_rid, + )?; + + let local_address = listener.listen_address()?; + let listen_properties = DefaultHttpRequestProperties::listen_properties( + listener.stream(), + &local_address, + ); + + let (tx, rx) = tokio::sync::mpsc::channel(10); + let resource: Rc = Rc::new(HttpJoinHandle( + AsyncRefCell::new(None), + CancelHandle::new(), + AsyncRefCell::new(rx), + )); + let cancel_clone = resource.cancel_handle(); + + let listen_properties_clone = listen_properties.clone(); + let handle = spawn_local(async move { + loop { + let conn = listener + .accept() + .try_or_cancel(cancel_clone.clone()) + .await?; + serve_http_on( + conn, + &listen_properties_clone, + cancel_clone.clone(), + tx.clone(), + ); + } + #[allow(unreachable_code)] + Ok::<_, AnyError>(()) + }); + + // Set the handle after we start the future + *RcRef::map(&resource, |this| &this.0) + .try_borrow_mut() + .unwrap() = Some(handle); + + Ok(( + state.borrow_mut().resource_table.add_rc(resource), + listen_properties.scheme, + listen_properties.fallback_host, + )) +} + +#[op(v8)] +pub fn op_serve_http_on( + state: Rc>, + conn: ResourceId, +) -> Result<(ResourceId, &'static str, String), AnyError> { + let network_stream = + DefaultHttpRequestProperties::get_network_stream_for_rid( + &mut state.borrow_mut(), + conn, + )?; + + let local_address = network_stream.local_address()?; + let listen_properties = DefaultHttpRequestProperties::listen_properties( + network_stream.stream(), + &local_address, + ); + + let (tx, rx) = tokio::sync::mpsc::channel(10); + let resource: Rc = Rc::new(HttpJoinHandle( + AsyncRefCell::new(None), + CancelHandle::new(), + AsyncRefCell::new(rx), + )); + + let handle = serve_http_on( + network_stream, + &listen_properties, + resource.cancel_handle(), + tx, + ); + + // Set the handle after we start the future + *RcRef::map(&resource, |this| &this.0) + .try_borrow_mut() + .unwrap() = Some(handle); + + Ok(( + state.borrow_mut().resource_table.add_rc(resource), + listen_properties.scheme, + listen_properties.fallback_host, + )) +} + +#[op] +pub async fn op_http_wait( + state: Rc>, + rid: ResourceId, +) -> Result { + // We will get the join handle initially, as we might be consuming requests still + let join_handle = state + .borrow_mut() + .resource_table + .get::(rid)?; + + let cancel = join_handle.clone().cancel_handle(); + let next = async { + let mut recv = RcRef::map(&join_handle, |this| &this.2).borrow_mut().await; + recv.recv().await + } + .or_cancel(cancel) + .unwrap_or_else(|_| None) + .await; + + // Do we have a request? + if let Some(req) = next { + return Ok(req as u32); + } + + // No - we're shutting down + let res = RcRef::map(join_handle, |this| &this.0) + .borrow_mut() + .await + .take() + .unwrap() + .await?; + + // Drop the cancel and join handles + state + .borrow_mut() + .resource_table + .take::(rid)?; + + // Filter out shutdown (ENOTCONN) errors + if let Err(err) = res { + if let Some(err) = err.source() { + if let Some(err) = err.downcast_ref::() { + if err.kind() == io::ErrorKind::NotConnected { + return Ok(u32::MAX); + } + } + } + return Err(err); + } + + Ok(u32::MAX) +} diff --git a/ext/http/lib.rs b/ext/http/lib.rs index 43e3c130aa..561b13885d 100644 --- a/ext/http/lib.rs +++ b/ext/http/lib.rs @@ -34,6 +34,7 @@ use deno_core::ResourceId; use deno_core::StringOrBuffer; use deno_core::WriteOutcome; use deno_core::ZeroCopyBuf; +use deno_net::raw::NetworkStream; use deno_websocket::ws_create_server_stream; use flate2::write::GzEncoder; use flate2::Compression; @@ -76,7 +77,11 @@ use crate::reader_stream::ExternallyAbortableReaderStream; use crate::reader_stream::ShutdownHandle; pub mod compressible; +mod http_next; mod reader_stream; +mod request_body; +mod request_properties; +mod response_body; mod websocket_upgrade; deno_core::extension!( @@ -92,8 +97,25 @@ deno_core::extension!( op_http_websocket_accept_header, op_http_upgrade_early, op_http_upgrade_websocket, + http_next::op_serve_http, + http_next::op_serve_http_on, + http_next::op_http_wait, + http_next::op_http_track, + http_next::op_set_response_header, + http_next::op_set_response_headers, + http_next::op_set_response_body_text, + http_next::op_set_promise_complete, + http_next::op_set_response_body_bytes, + http_next::op_set_response_body_resource, + http_next::op_set_response_body_stream, + http_next::op_get_request_header, + http_next::op_get_request_headers, + http_next::op_get_request_method_and_url, + http_next::op_read_request_body, + http_next::op_upgrade, + http_next::op_upgrade_raw, ], - esm = ["01_http.js"], + esm = ["00_serve.js", "01_http.js"], ); pub enum HttpSocketAddr { @@ -1147,8 +1169,10 @@ async fn op_http_upgrade_websocket( } }; - let transport = hyper::upgrade::on(request).await?; - let ws_rid = ws_create_server_stream(&state, transport).await?; + let (transport, bytes) = + extract_network_stream(hyper::upgrade::on(request).await?); + let ws_rid = + ws_create_server_stream(&mut state.borrow_mut(), transport, bytes)?; Ok(ws_rid) } @@ -1166,6 +1190,16 @@ where } } +impl hyper1::rt::Executor for LocalExecutor +where + Fut: Future + 'static, + Fut::Output: 'static, +{ + fn execute(&self, fut: Fut) { + spawn_local(fut); + } +} + fn http_error(message: &'static str) -> AnyError { custom_error("Http", message) } @@ -1192,3 +1226,47 @@ fn filter_enotconn( fn never() -> Pending { pending() } + +trait CanDowncastUpgrade: Sized { + fn downcast( + self, + ) -> Result<(T, Bytes), Self>; +} + +impl CanDowncastUpgrade for hyper1::upgrade::Upgraded { + fn downcast( + self, + ) -> Result<(T, Bytes), Self> { + let hyper1::upgrade::Parts { io, read_buf, .. } = self.downcast()?; + Ok((io, read_buf)) + } +} + +impl CanDowncastUpgrade for hyper::upgrade::Upgraded { + fn downcast( + self, + ) -> Result<(T, Bytes), Self> { + let hyper::upgrade::Parts { io, read_buf, .. } = self.downcast()?; + Ok((io, read_buf)) + } +} + +fn extract_network_stream( + upgraded: U, +) -> (NetworkStream, Bytes) { + let upgraded = match upgraded.downcast::() { + Ok((stream, bytes)) => return (NetworkStream::Tcp(stream), bytes), + Err(x) => x, + }; + let upgraded = match upgraded.downcast::() { + Ok((stream, bytes)) => return (NetworkStream::Tls(stream), bytes), + Err(x) => x, + }; + #[cfg(unix)] + let upgraded = match upgraded.downcast::() { + Ok((stream, bytes)) => return (NetworkStream::Unix(stream), bytes), + Err(x) => x, + }; + drop(upgraded); + unreachable!("unexpected stream type"); +} diff --git a/ext/http/request_body.rs b/ext/http/request_body.rs new file mode 100644 index 0000000000..73908ca55d --- /dev/null +++ b/ext/http/request_body.rs @@ -0,0 +1,84 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use bytes::Bytes; +use deno_core::error::AnyError; +use deno_core::futures::stream::Peekable; +use deno_core::futures::Stream; +use deno_core::futures::StreamExt; +use deno_core::AsyncRefCell; +use deno_core::AsyncResult; +use deno_core::BufView; +use deno_core::RcRef; +use deno_core::Resource; +use hyper1::body::Body; +use hyper1::body::Incoming; +use hyper1::body::SizeHint; +use std::borrow::Cow; +use std::pin::Pin; +use std::rc::Rc; + +/// Converts a hyper incoming body stream into a stream of [`Bytes`] that we can use to read in V8. +struct ReadFuture(Incoming); + +impl Stream for ReadFuture { + type Item = Result; + + fn poll_next( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + let res = Pin::new(&mut self.get_mut().0).poll_frame(cx); + match res { + std::task::Poll::Ready(Some(Ok(frame))) => { + if let Ok(data) = frame.into_data() { + // Ensure that we never yield an empty frame + if !data.is_empty() { + return std::task::Poll::Ready(Some(Ok(data))); + } + } + } + std::task::Poll::Ready(None) => return std::task::Poll::Ready(None), + _ => {} + } + std::task::Poll::Pending + } +} + +pub struct HttpRequestBody(AsyncRefCell>, SizeHint); + +impl HttpRequestBody { + pub fn new(body: Incoming) -> Self { + let size_hint = body.size_hint(); + Self(AsyncRefCell::new(ReadFuture(body).peekable()), size_hint) + } + + async fn read(self: Rc, limit: usize) -> Result { + let peekable = RcRef::map(self, |this| &this.0); + let mut peekable = peekable.borrow_mut().await; + match Pin::new(&mut *peekable).peek_mut().await { + None => Ok(BufView::empty()), + Some(Err(_)) => Err(peekable.next().await.unwrap().err().unwrap()), + Some(Ok(bytes)) => { + if bytes.len() <= limit { + // We can safely take the next item since we peeked it + return Ok(BufView::from(peekable.next().await.unwrap()?)); + } + let ret = bytes.split_to(limit); + Ok(BufView::from(ret)) + } + } + } +} + +impl Resource for HttpRequestBody { + fn name(&self) -> Cow { + "requestBody".into() + } + + fn read(self: Rc, limit: usize) -> AsyncResult { + Box::pin(HttpRequestBody::read(self, limit)) + } + + fn size_hint(&self) -> (u64, Option) { + (self.1.lower(), self.1.upper()) + } +} diff --git a/ext/http/request_properties.rs b/ext/http/request_properties.rs new file mode 100644 index 0000000000..7a7f5219c0 --- /dev/null +++ b/ext/http/request_properties.rs @@ -0,0 +1,249 @@ +use deno_core::error::AnyError; +use deno_core::OpState; +use deno_core::ResourceId; +use deno_net::raw::NetworkStream; +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use deno_net::raw::take_network_stream_listener_resource; +use deno_net::raw::take_network_stream_resource; +use deno_net::raw::NetworkStreamAddress; +use deno_net::raw::NetworkStreamListener; +use deno_net::raw::NetworkStreamType; +use hyper::HeaderMap; +use hyper::Uri; +use hyper1::header::HOST; +use std::borrow::Cow; +use std::rc::Rc; + +// TODO(mmastrac): I don't like that we have to clone this, but it's one-time setup +#[derive(Clone)] +pub struct HttpListenProperties { + pub stream_type: NetworkStreamType, + pub scheme: &'static str, + pub fallback_host: String, + pub local_port: Option, +} + +#[derive(Clone)] +pub struct HttpConnectionProperties { + pub stream_type: NetworkStreamType, + pub peer_address: Rc, + pub peer_port: Option, + pub local_port: Option, +} + +pub struct HttpRequestProperties { + pub authority: Option, +} + +/// Pluggable trait to determine listen, connection and request properties +/// for embedders that wish to provide alternative routes for incoming HTTP. +pub trait HttpPropertyExtractor { + /// Given a listener [`ResourceId`], returns the [`NetworkStreamListener`]. + fn get_network_stream_listener_for_rid( + state: &mut OpState, + listener_rid: ResourceId, + ) -> Result; + + /// Given a connection [`ResourceId`], returns the [`NetworkStream`]. + fn get_network_stream_for_rid( + state: &mut OpState, + rid: ResourceId, + ) -> Result; + + /// Determines the listener properties. + fn listen_properties( + stream_type: NetworkStreamType, + local_address: &NetworkStreamAddress, + ) -> HttpListenProperties; + + /// Determines the connection properties. + fn connection_properties( + listen_properties: &HttpListenProperties, + peer_address: &NetworkStreamAddress, + ) -> HttpConnectionProperties; + + /// Determines the request properties. + fn request_properties( + connection_properties: &HttpConnectionProperties, + uri: &Uri, + headers: &HeaderMap, + ) -> HttpRequestProperties; +} + +pub struct DefaultHttpRequestProperties {} + +impl HttpPropertyExtractor for DefaultHttpRequestProperties { + fn get_network_stream_for_rid( + state: &mut OpState, + rid: ResourceId, + ) -> Result { + take_network_stream_resource(&mut state.resource_table, rid) + } + + fn get_network_stream_listener_for_rid( + state: &mut OpState, + listener_rid: ResourceId, + ) -> Result { + take_network_stream_listener_resource( + &mut state.resource_table, + listener_rid, + ) + } + + fn listen_properties( + stream_type: NetworkStreamType, + local_address: &NetworkStreamAddress, + ) -> HttpListenProperties { + let scheme = req_scheme_from_stream_type(stream_type); + let fallback_host = req_host_from_addr(stream_type, local_address); + let local_port: Option = match local_address { + NetworkStreamAddress::Ip(ip) => Some(ip.port()), + #[cfg(unix)] + NetworkStreamAddress::Unix(_) => None, + }; + + HttpListenProperties { + scheme, + fallback_host, + local_port, + stream_type, + } + } + + fn connection_properties( + listen_properties: &HttpListenProperties, + peer_address: &NetworkStreamAddress, + ) -> HttpConnectionProperties { + let peer_port: Option = match peer_address { + NetworkStreamAddress::Ip(ip) => Some(ip.port()), + #[cfg(unix)] + NetworkStreamAddress::Unix(_) => None, + }; + let peer_address = match peer_address { + NetworkStreamAddress::Ip(addr) => Rc::from(addr.ip().to_string()), + #[cfg(unix)] + NetworkStreamAddress::Unix(_) => Rc::from("unix"), + }; + let local_port = listen_properties.local_port; + let stream_type = listen_properties.stream_type; + + HttpConnectionProperties { + stream_type, + peer_address, + peer_port, + local_port, + } + } + + fn request_properties( + connection_properties: &HttpConnectionProperties, + uri: &Uri, + headers: &HeaderMap, + ) -> HttpRequestProperties { + let authority = req_host( + uri, + headers, + connection_properties.stream_type, + connection_properties.local_port.unwrap_or_default(), + ) + .map(|s| s.into_owned()); + + HttpRequestProperties { authority } + } +} + +/// Compute the fallback address from the [`NetworkStreamListenAddress`]. If the request has no authority/host in +/// its URI, and there is no [`HeaderName::HOST`] header, we fall back to this. +fn req_host_from_addr( + stream_type: NetworkStreamType, + addr: &NetworkStreamAddress, +) -> String { + match addr { + NetworkStreamAddress::Ip(addr) => { + if (stream_type == NetworkStreamType::Tls && addr.port() == 443) + || (stream_type == NetworkStreamType::Tcp && addr.port() == 80) + { + if addr.ip().is_loopback() || addr.ip().is_unspecified() { + return "localhost".to_owned(); + } + addr.ip().to_string() + } else { + if addr.ip().is_loopback() || addr.ip().is_unspecified() { + return format!("localhost:{}", addr.port()); + } + addr.to_string() + } + } + // There is no standard way for unix domain socket URLs + // nginx and nodejs request use http://unix:[socket_path]:/ but it is not a valid URL + // httpie uses http+unix://[percent_encoding_of_path]/ which we follow + #[cfg(unix)] + NetworkStreamAddress::Unix(unix) => percent_encoding::percent_encode( + unix + .as_pathname() + .and_then(|x| x.to_str()) + .unwrap_or_default() + .as_bytes(), + percent_encoding::NON_ALPHANUMERIC, + ) + .to_string(), + } +} + +fn req_scheme_from_stream_type(stream_type: NetworkStreamType) -> &'static str { + match stream_type { + NetworkStreamType::Tcp => "http://", + NetworkStreamType::Tls => "https://", + #[cfg(unix)] + NetworkStreamType::Unix => "http+unix://", + } +} + +fn req_host<'a>( + uri: &'a Uri, + headers: &'a HeaderMap, + addr_type: NetworkStreamType, + port: u16, +) -> Option> { + // Unix sockets always use the socket address + #[cfg(unix)] + if addr_type == NetworkStreamType::Unix { + return None; + } + + // It is rare that an authority will be passed, but if it does, it takes priority + if let Some(auth) = uri.authority() { + match addr_type { + NetworkStreamType::Tcp => { + if port == 80 { + return Some(Cow::Borrowed(auth.host())); + } + } + NetworkStreamType::Tls => { + if port == 443 { + return Some(Cow::Borrowed(auth.host())); + } + } + #[cfg(unix)] + NetworkStreamType::Unix => {} + } + return Some(Cow::Borrowed(auth.as_str())); + } + + // TODO(mmastrac): Most requests will use this path and we probably will want to optimize it in the future + if let Some(host) = headers.get(HOST) { + return Some(match host.to_str() { + Ok(host) => Cow::Borrowed(host), + Err(_) => Cow::Owned( + host + .as_bytes() + .iter() + .cloned() + .map(char::from) + .collect::(), + ), + }); + } + + None +} diff --git a/ext/http/response_body.rs b/ext/http/response_body.rs new file mode 100644 index 0000000000..0086e4d782 --- /dev/null +++ b/ext/http/response_body.rs @@ -0,0 +1,253 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use std::borrow::Cow; +use std::cell::RefCell; +use std::future::Future; +use std::pin::Pin; +use std::rc::Rc; +use std::task::Waker; + +use deno_core::error::bad_resource; +use deno_core::error::AnyError; +use deno_core::futures::FutureExt; +use deno_core::AsyncRefCell; +use deno_core::AsyncResult; +use deno_core::BufView; +use deno_core::CancelHandle; +use deno_core::CancelTryFuture; +use deno_core::RcRef; +use deno_core::Resource; +use deno_core::WriteOutcome; +use hyper1::body::Body; +use hyper1::body::Frame; +use hyper1::body::SizeHint; + +#[derive(Clone, Debug, Default)] +pub struct CompletionHandle { + inner: Rc>, +} + +#[derive(Debug, Default)] +struct CompletionHandleInner { + complete: bool, + success: bool, + waker: Option, +} + +impl CompletionHandle { + pub fn complete(&self, success: bool) { + let mut mut_self = self.inner.borrow_mut(); + mut_self.complete = true; + mut_self.success = success; + if let Some(waker) = mut_self.waker.take() { + drop(mut_self); + waker.wake(); + } + } +} + +impl Future for CompletionHandle { + type Output = bool; + + fn poll( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll { + let mut mut_self = self.inner.borrow_mut(); + if mut_self.complete { + return std::task::Poll::Ready(mut_self.success); + } + + mut_self.waker = Some(cx.waker().clone()); + std::task::Poll::Pending + } +} + +#[derive(Default)] +pub enum ResponseBytesInner { + /// An empty stream. + #[default] + Empty, + /// A completed stream. + Done, + /// A static buffer of bytes, sent it one fell swoop. + Bytes(BufView), + /// A resource stream, piped in fast mode. + Resource(bool, Rc, AsyncResult), + /// A JS-backed stream, written in JS and transported via pipe. + V8Stream(tokio::sync::mpsc::Receiver), +} + +impl std::fmt::Debug for ResponseBytesInner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Done => f.write_str("Done"), + Self::Empty => f.write_str("Empty"), + Self::Bytes(..) => f.write_str("Bytes"), + Self::Resource(..) => f.write_str("Resource"), + Self::V8Stream(..) => f.write_str("V8Stream"), + } + } +} + +/// This represents the union of possible response types in Deno with the stream-style [`Body`] interface +/// required by hyper. As the API requires information about request completion (including a success/fail +/// flag), we include a very lightweight [`CompletionHandle`] for interested parties to listen on. +#[derive(Debug, Default)] +pub struct ResponseBytes(ResponseBytesInner, CompletionHandle); + +impl ResponseBytes { + pub fn initialize(&mut self, inner: ResponseBytesInner) { + debug_assert!(matches!(self.0, ResponseBytesInner::Empty)); + self.0 = inner; + } + + pub fn completion_handle(&self) -> CompletionHandle { + self.1.clone() + } + + fn complete(&mut self, success: bool) -> ResponseBytesInner { + if matches!(self.0, ResponseBytesInner::Done) { + return ResponseBytesInner::Done; + } + + let current = std::mem::replace(&mut self.0, ResponseBytesInner::Done); + self.1.complete(success); + current + } +} + +impl ResponseBytesInner { + pub fn size_hint(&self) -> SizeHint { + match self { + Self::Done => SizeHint::with_exact(0), + Self::Empty => SizeHint::with_exact(0), + Self::Bytes(bytes) => SizeHint::with_exact(bytes.len() as u64), + Self::Resource(_, res, _) => { + let hint = res.size_hint(); + let mut size_hint = SizeHint::new(); + size_hint.set_lower(hint.0); + if let Some(upper) = hint.1 { + size_hint.set_upper(upper) + } + size_hint + } + Self::V8Stream(..) => SizeHint::default(), + } + } +} + +impl Body for ResponseBytes { + type Data = BufView; + type Error = AnyError; + + fn poll_frame( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll, Self::Error>>> { + match &mut self.0 { + ResponseBytesInner::Done | ResponseBytesInner::Empty => { + unreachable!() + } + ResponseBytesInner::Bytes(..) => { + if let ResponseBytesInner::Bytes(data) = self.complete(true) { + std::task::Poll::Ready(Some(Ok(Frame::data(data)))) + } else { + unreachable!() + } + } + ResponseBytesInner::Resource(auto_close, stm, ref mut future) => { + match future.poll_unpin(cx) { + std::task::Poll::Pending => std::task::Poll::Pending, + std::task::Poll::Ready(Err(err)) => { + std::task::Poll::Ready(Some(Err(err))) + } + std::task::Poll::Ready(Ok(buf)) => { + if buf.is_empty() { + if *auto_close { + stm.clone().close(); + } + self.complete(true); + return std::task::Poll::Ready(None); + } + // Re-arm the future + *future = stm.clone().read(64 * 1024); + std::task::Poll::Ready(Some(Ok(Frame::data(buf)))) + } + } + } + ResponseBytesInner::V8Stream(stm) => match stm.poll_recv(cx) { + std::task::Poll::Pending => std::task::Poll::Pending, + std::task::Poll::Ready(Some(buf)) => { + std::task::Poll::Ready(Some(Ok(Frame::data(buf)))) + } + std::task::Poll::Ready(None) => { + self.complete(true); + std::task::Poll::Ready(None) + } + }, + } + } + + fn is_end_stream(&self) -> bool { + matches!(self.0, ResponseBytesInner::Done | ResponseBytesInner::Empty) + } + + fn size_hint(&self) -> SizeHint { + // The size hint currently only used in the case where it is exact bounds in hyper, but we'll pass it through + // anyways just in case hyper needs it. + self.0.size_hint() + } +} + +impl Drop for ResponseBytes { + fn drop(&mut self) { + // We won't actually poll_frame for Empty responses so this is where we return success + self.complete(matches!(self.0, ResponseBytesInner::Empty)); + } +} + +/// A response body object that can be passed to V8. This body will feed byte buffers to a channel which +/// feed's hyper's HTTP response. +pub struct V8StreamHttpResponseBody( + AsyncRefCell>>, + CancelHandle, +); + +impl V8StreamHttpResponseBody { + pub fn new(sender: tokio::sync::mpsc::Sender) -> Self { + Self(AsyncRefCell::new(Some(sender)), CancelHandle::default()) + } +} + +impl Resource for V8StreamHttpResponseBody { + fn name(&self) -> Cow { + "responseBody".into() + } + + fn write( + self: Rc, + buf: BufView, + ) -> AsyncResult { + let cancel_handle = RcRef::map(&self, |this| &this.1); + Box::pin( + async move { + let nwritten = buf.len(); + + let res = RcRef::map(self, |this| &this.0).borrow().await; + if let Some(tx) = res.as_ref() { + tx.send(buf) + .await + .map_err(|_| bad_resource("failed to write"))?; + Ok(WriteOutcome::Full { nwritten }) + } else { + Err(bad_resource("failed to write")) + } + } + .try_or_cancel(cancel_handle), + ) + } + + fn close(self: Rc) { + self.1.cancel(); + } +} diff --git a/ext/net/Cargo.toml b/ext/net/Cargo.toml index a7a1acff6f..6bab80cc79 100644 --- a/ext/net/Cargo.toml +++ b/ext/net/Cargo.toml @@ -17,6 +17,7 @@ path = "lib.rs" deno_core.workspace = true deno_tls.workspace = true log.workspace = true +pin-project.workspace = true serde.workspace = true socket2.workspace = true tokio.workspace = true diff --git a/ext/net/lib.rs b/ext/net/lib.rs index f812bf60bc..ff67186b0c 100644 --- a/ext/net/lib.rs +++ b/ext/net/lib.rs @@ -5,6 +5,7 @@ pub mod ops; pub mod ops_tls; #[cfg(unix)] pub mod ops_unix; +pub mod raw; pub mod resolve_addr; use deno_core::error::AnyError; diff --git a/ext/net/ops_tls.rs b/ext/net/ops_tls.rs index c0cfb8674f..8a77570668 100644 --- a/ext/net/ops_tls.rs +++ b/ext/net/ops_tls.rs @@ -61,6 +61,7 @@ use std::fs::File; use std::io; use std::io::BufReader; use std::io::ErrorKind; +use std::net::SocketAddr; use std::path::Path; use std::pin::Pin; use std::rc::Rc; @@ -115,6 +116,13 @@ impl TlsStream { Self::new(tcp, Connection::Client(tls)) } + pub fn new_client_side_from( + tcp: TcpStream, + connection: ClientConnection, + ) -> Self { + Self::new(tcp, Connection::Client(connection)) + } + pub fn new_server_side( tcp: TcpStream, tls_config: Arc, @@ -123,6 +131,13 @@ impl TlsStream { Self::new(tcp, Connection::Server(tls)) } + pub fn new_server_side_from( + tcp: TcpStream, + connection: ServerConnection, + ) -> Self { + Self::new(tcp, Connection::Server(connection)) + } + pub fn into_split(self) -> (ReadHalf, WriteHalf) { let shared = Shared::new(self); let rd = ReadHalf { @@ -132,6 +147,16 @@ impl TlsStream { (rd, wr) } + /// Convenience method to match [`TcpStream`]. + pub fn peer_addr(&self) -> Result { + self.0.as_ref().unwrap().tcp.peer_addr() + } + + /// Convenience method to match [`TcpStream`]. + pub fn local_addr(&self) -> Result { + self.0.as_ref().unwrap().tcp.local_addr() + } + /// Tokio-rustls compatibility: returns a reference to the underlying TCP /// stream, and a reference to the Rustls `Connection` object. pub fn get_ref(&self) -> (&TcpStream, &Connection) { @@ -954,8 +979,8 @@ fn load_private_keys_from_file( } pub struct TlsListenerResource { - tcp_listener: AsyncRefCell, - tls_config: Arc, + pub(crate) tcp_listener: AsyncRefCell, + pub(crate) tls_config: Arc, cancel_handle: CancelHandle, } diff --git a/ext/net/ops_unix.rs b/ext/net/ops_unix.rs index 1161d27592..bed923f8b4 100644 --- a/ext/net/ops_unix.rs +++ b/ext/net/ops_unix.rs @@ -32,8 +32,8 @@ pub fn into_string(s: std::ffi::OsString) -> Result { }) } -struct UnixListenerResource { - listener: AsyncRefCell, +pub(crate) struct UnixListenerResource { + pub listener: AsyncRefCell, cancel: CancelHandle, } diff --git a/ext/net/raw.rs b/ext/net/raw.rs new file mode 100644 index 0000000000..74cc10d630 --- /dev/null +++ b/ext/net/raw.rs @@ -0,0 +1,304 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use crate::io::TcpStreamResource; +#[cfg(unix)] +use crate::io::UnixStreamResource; +use crate::ops::TcpListenerResource; +use crate::ops_tls::TlsListenerResource; +use crate::ops_tls::TlsStream; +use crate::ops_tls::TlsStreamResource; +#[cfg(unix)] +use crate::ops_unix::UnixListenerResource; +use deno_core::error::bad_resource; +use deno_core::error::bad_resource_id; +use deno_core::error::AnyError; +use deno_core::ResourceId; +use deno_core::ResourceTable; +use deno_tls::rustls::ServerConfig; +use pin_project::pin_project; +use std::rc::Rc; +use std::sync::Arc; +use tokio::net::TcpStream; +#[cfg(unix)] +use tokio::net::UnixStream; + +/// A raw stream of one of the types handled by this extension. +#[pin_project(project = NetworkStreamProject)] +pub enum NetworkStream { + Tcp(#[pin] TcpStream), + Tls(#[pin] TlsStream), + #[cfg(unix)] + Unix(#[pin] UnixStream), +} + +/// A raw stream of one of the types handled by this extension. +#[derive(Copy, Clone, PartialEq, Eq)] +pub enum NetworkStreamType { + Tcp, + Tls, + #[cfg(unix)] + Unix, +} + +impl NetworkStream { + pub fn local_address(&self) -> Result { + match self { + Self::Tcp(tcp) => Ok(NetworkStreamAddress::Ip(tcp.local_addr()?)), + Self::Tls(tls) => Ok(NetworkStreamAddress::Ip(tls.local_addr()?)), + #[cfg(unix)] + Self::Unix(unix) => Ok(NetworkStreamAddress::Unix(unix.local_addr()?)), + } + } + + pub fn peer_address(&self) -> Result { + match self { + Self::Tcp(tcp) => Ok(NetworkStreamAddress::Ip(tcp.peer_addr()?)), + Self::Tls(tls) => Ok(NetworkStreamAddress::Ip(tls.peer_addr()?)), + #[cfg(unix)] + Self::Unix(unix) => Ok(NetworkStreamAddress::Unix(unix.peer_addr()?)), + } + } + + pub fn stream(&self) -> NetworkStreamType { + match self { + Self::Tcp(_) => NetworkStreamType::Tcp, + Self::Tls(_) => NetworkStreamType::Tls, + #[cfg(unix)] + Self::Unix(_) => NetworkStreamType::Unix, + } + } +} + +impl tokio::io::AsyncRead for NetworkStream { + fn poll_read( + self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + buf: &mut tokio::io::ReadBuf<'_>, + ) -> std::task::Poll> { + match self.project() { + NetworkStreamProject::Tcp(s) => s.poll_read(cx, buf), + NetworkStreamProject::Tls(s) => s.poll_read(cx, buf), + #[cfg(unix)] + NetworkStreamProject::Unix(s) => s.poll_read(cx, buf), + } + } +} + +impl tokio::io::AsyncWrite for NetworkStream { + fn poll_write( + self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + buf: &[u8], + ) -> std::task::Poll> { + match self.project() { + NetworkStreamProject::Tcp(s) => s.poll_write(cx, buf), + NetworkStreamProject::Tls(s) => s.poll_write(cx, buf), + #[cfg(unix)] + NetworkStreamProject::Unix(s) => s.poll_write(cx, buf), + } + } + + fn poll_flush( + self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + match self.project() { + NetworkStreamProject::Tcp(s) => s.poll_flush(cx), + NetworkStreamProject::Tls(s) => s.poll_flush(cx), + #[cfg(unix)] + NetworkStreamProject::Unix(s) => s.poll_flush(cx), + } + } + + fn poll_shutdown( + self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + match self.project() { + NetworkStreamProject::Tcp(s) => s.poll_shutdown(cx), + NetworkStreamProject::Tls(s) => s.poll_shutdown(cx), + #[cfg(unix)] + NetworkStreamProject::Unix(s) => s.poll_shutdown(cx), + } + } + + fn is_write_vectored(&self) -> bool { + match self { + Self::Tcp(s) => s.is_write_vectored(), + Self::Tls(s) => s.is_write_vectored(), + #[cfg(unix)] + Self::Unix(s) => s.is_write_vectored(), + } + } + + fn poll_write_vectored( + self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + bufs: &[std::io::IoSlice<'_>], + ) -> std::task::Poll> { + match self.project() { + NetworkStreamProject::Tcp(s) => s.poll_write_vectored(cx, bufs), + NetworkStreamProject::Tls(s) => s.poll_write_vectored(cx, bufs), + #[cfg(unix)] + NetworkStreamProject::Unix(s) => s.poll_write_vectored(cx, bufs), + } + } +} + +/// A raw stream listener of one of the types handled by this extension. +pub enum NetworkStreamListener { + Tcp(tokio::net::TcpListener), + Tls(tokio::net::TcpListener, Arc), + #[cfg(unix)] + Unix(tokio::net::UnixListener), +} + +pub enum NetworkStreamAddress { + Ip(std::net::SocketAddr), + #[cfg(unix)] + Unix(tokio::net::unix::SocketAddr), +} + +impl NetworkStreamListener { + /// Accepts a connection on this listener. + pub async fn accept(&self) -> Result { + Ok(match self { + Self::Tcp(tcp) => { + let (stream, _addr) = tcp.accept().await?; + NetworkStream::Tcp(stream) + } + Self::Tls(tcp, config) => { + let (stream, _addr) = tcp.accept().await?; + NetworkStream::Tls(TlsStream::new_server_side(stream, config.clone())) + } + #[cfg(unix)] + Self::Unix(unix) => { + let (stream, _addr) = unix.accept().await?; + NetworkStream::Unix(stream) + } + }) + } + + pub fn listen_address(&self) -> Result { + match self { + Self::Tcp(tcp) => Ok(NetworkStreamAddress::Ip(tcp.local_addr()?)), + Self::Tls(tcp, _) => Ok(NetworkStreamAddress::Ip(tcp.local_addr()?)), + #[cfg(unix)] + Self::Unix(unix) => Ok(NetworkStreamAddress::Unix(unix.local_addr()?)), + } + } + + pub fn stream(&self) -> NetworkStreamType { + match self { + Self::Tcp(..) => NetworkStreamType::Tcp, + Self::Tls(..) => NetworkStreamType::Tls, + #[cfg(unix)] + Self::Unix(..) => NetworkStreamType::Unix, + } + } +} + +/// In some cases it may be more efficient to extract the resource from the resource table and use it directly (for example, an HTTP server). +/// This method will extract a stream from the resource table and return it, unwrapped. +pub fn take_network_stream_resource( + resource_table: &mut ResourceTable, + stream_rid: ResourceId, +) -> Result { + // The stream we're attempting to unwrap may be in use somewhere else. If that's the case, we cannot proceed + // with the process of unwrapping this connection, so we just return a bad resource error. + // See also: https://github.com/denoland/deno/pull/16242 + + if let Ok(resource_rc) = resource_table.take::(stream_rid) + { + // This TCP connection might be used somewhere else. + let resource = Rc::try_unwrap(resource_rc) + .map_err(|_| bad_resource("TCP stream is currently in use"))?; + let (read_half, write_half) = resource.into_inner(); + let tcp_stream = read_half.reunite(write_half)?; + return Ok(NetworkStream::Tcp(tcp_stream)); + } + + if let Ok(resource_rc) = resource_table.take::(stream_rid) + { + // This TLS connection might be used somewhere else. + let resource = Rc::try_unwrap(resource_rc) + .map_err(|_| bad_resource("TLS stream is currently in use"))?; + let (read_half, write_half) = resource.into_inner(); + let tls_stream = read_half.reunite(write_half); + return Ok(NetworkStream::Tls(tls_stream)); + } + + #[cfg(unix)] + if let Ok(resource_rc) = resource_table.take::(stream_rid) + { + // This UNIX socket might be used somewhere else. + let resource = Rc::try_unwrap(resource_rc) + .map_err(|_| bad_resource("UNIX stream is currently in use"))?; + let (read_half, write_half) = resource.into_inner(); + let unix_stream = read_half.reunite(write_half)?; + return Ok(NetworkStream::Unix(unix_stream)); + } + + Err(bad_resource_id()) +} + +/// Inserts a raw stream (back?) into the resource table and returns a resource ID. This can then be used to create raw connection +/// objects on the JS side. +pub fn put_network_stream_resource( + resource_table: &mut ResourceTable, + stream: NetworkStream, +) -> Result { + let res = match stream { + NetworkStream::Tcp(conn) => { + let (r, w) = conn.into_split(); + resource_table.add(TcpStreamResource::new((r, w))) + } + NetworkStream::Tls(conn) => { + let (r, w) = conn.into_split(); + resource_table.add(TlsStreamResource::new((r, w))) + } + #[cfg(unix)] + NetworkStream::Unix(conn) => { + let (r, w) = conn.into_split(); + resource_table.add(UnixStreamResource::new((r, w))) + } + }; + + Ok(res) +} + +/// In some cases it may be more efficient to extract the resource from the resource table and use it directly (for example, an HTTP server). +/// This method will extract a stream from the resource table and return it, unwrapped. +pub fn take_network_stream_listener_resource( + resource_table: &mut ResourceTable, + listener_rid: ResourceId, +) -> Result { + if let Ok(resource_rc) = + resource_table.take::(listener_rid) + { + let resource = Rc::try_unwrap(resource_rc) + .map_err(|_| bad_resource("TCP socket listener is currently in use"))?; + return Ok(NetworkStreamListener::Tcp(resource.listener.into_inner())); + } + + if let Ok(resource_rc) = + resource_table.take::(listener_rid) + { + let resource = Rc::try_unwrap(resource_rc) + .map_err(|_| bad_resource("TLS socket listener is currently in use"))?; + return Ok(NetworkStreamListener::Tls( + resource.tcp_listener.into_inner(), + resource.tls_config, + )); + } + + #[cfg(unix)] + if let Ok(resource_rc) = + resource_table.take::(listener_rid) + { + let resource = Rc::try_unwrap(resource_rc) + .map_err(|_| bad_resource("UNIX socket listener is currently in use"))?; + return Ok(NetworkStreamListener::Unix(resource.listener.into_inner())); + } + + Err(bad_resource_id()) +} diff --git a/ext/websocket/Cargo.toml b/ext/websocket/Cargo.toml index 53e184e1e2..006c73a5f2 100644 --- a/ext/websocket/Cargo.toml +++ b/ext/websocket/Cargo.toml @@ -14,11 +14,13 @@ description = "Implementation of WebSocket API for Deno" path = "lib.rs" [dependencies] +bytes.workspace = true deno_core.workspace = true +deno_net.workspace = true deno_tls.workspace = true fastwebsockets = { workspace = true, features = ["upgrade"] } http.workspace = true -hyper.workspace = true +hyper = { workspace = true, features = ["backports"] } serde.workspace = true tokio.workspace = true tokio-rustls.workspace = true diff --git a/ext/websocket/lib.rs b/ext/websocket/lib.rs index 798856bc14..71aa66ff38 100644 --- a/ext/websocket/lib.rs +++ b/ext/websocket/lib.rs @@ -1,11 +1,10 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. - +use crate::stream::WebSocketStream; +use bytes::Bytes; use deno_core::error::invalid_hostname; use deno_core::error::type_error; use deno_core::error::AnyError; use deno_core::op; -use deno_core::StringOrBuffer; - use deno_core::url; use deno_core::AsyncRefCell; use deno_core::ByteString; @@ -15,7 +14,10 @@ use deno_core::OpState; use deno_core::RcRef; use deno_core::Resource; use deno_core::ResourceId; +use deno_core::StringOrBuffer; use deno_core::ZeroCopyBuf; +use deno_net::raw::take_network_stream_resource; +use deno_net::raw::NetworkStream; use deno_tls::create_client_config; use http::header::CONNECTION; use http::header::UPGRADE; @@ -24,9 +26,7 @@ use http::HeaderValue; use http::Method; use http::Request; use http::Uri; -use hyper::upgrade::Upgraded; use hyper::Body; -use hyper::Response; use serde::Deserialize; use serde::Serialize; use std::borrow::Cow; @@ -52,6 +52,7 @@ use fastwebsockets::Role; use fastwebsockets::WebSocket; pub use tokio_tungstenite; // Re-export tokio_tungstenite +mod stream; #[derive(Clone)] pub struct WsRootStore(pub Option); @@ -243,17 +244,21 @@ where let client = fastwebsockets::handshake::client(&LocalExecutor, request, socket); - let (stream, response): (WebSocket, Response) = - if let Some(cancel_resource) = cancel_resource { - client.or_cancel(cancel_resource.0.to_owned()).await? - } else { - client.await - } - .map_err(|err| { - DomExceptionNetworkError::new(&format!( - "failed to connect to WebSocket: {err}" - )) - })?; + let (upgraded, response) = if let Some(cancel_resource) = cancel_resource { + client.or_cancel(cancel_resource.0.to_owned()).await? + } else { + client.await + } + .map_err(|err| { + DomExceptionNetworkError::new(&format!( + "failed to connect to WebSocket: {err}" + )) + })?; + + let inner = MaybeTlsStream::Plain(upgraded.into_inner()); + let stream = + WebSocketStream::new(stream::WsStreamKind::Tungstenite(inner), None); + let stream = WebSocket::after_handshake(stream, Role::Client); if let Some(cancel_rid) = cancel_handle { state.borrow_mut().resource_table.close(cancel_rid).ok(); @@ -294,7 +299,7 @@ pub enum MessageKind { } pub struct ServerWebSocket { - ws: AsyncRefCell>, + ws: AsyncRefCell>, closed: Rc>, } @@ -320,11 +325,19 @@ impl Resource for ServerWebSocket { "serverWebSocket".into() } } -pub async fn ws_create_server_stream( - state: &Rc>, - transport: Upgraded, + +pub fn ws_create_server_stream( + state: &mut OpState, + transport: NetworkStream, + read_buf: Bytes, ) -> Result { - let mut ws = WebSocket::after_handshake(transport, Role::Server); + let mut ws = WebSocket::after_handshake( + WebSocketStream::new( + stream::WsStreamKind::Network(transport), + Some(read_buf), + ), + Role::Server, + ); ws.set_writev(true); ws.set_auto_close(true); ws.set_auto_pong(true); @@ -334,11 +347,26 @@ pub async fn ws_create_server_stream( closed: Rc::new(Cell::new(false)), }; - let resource_table = &mut state.borrow_mut().resource_table; - let rid = resource_table.add(ws_resource); + let rid = state.resource_table.add(ws_resource); Ok(rid) } +#[op] +pub fn op_ws_server_create( + state: &mut OpState, + conn: ResourceId, + extra_bytes: &[u8], +) -> Result { + let network_stream = + take_network_stream_resource(&mut state.resource_table, conn)?; + // Copying the extra bytes, but unlikely this will account for much + ws_create_server_stream( + state, + network_stream, + Bytes::from(extra_bytes.to_vec()), + ) +} + #[op] pub async fn op_ws_send_binary( state: Rc>, @@ -490,6 +518,7 @@ deno_core::extension!(deno_websocket, op_ws_next_event, op_ws_send_binary, op_ws_send_text, + op_ws_server_create, ], esm = [ "01_websocket.js", "02_websocketstream.js" ], options = { diff --git a/ext/websocket/stream.rs b/ext/websocket/stream.rs new file mode 100644 index 0000000000..69c06b7eb7 --- /dev/null +++ b/ext/websocket/stream.rs @@ -0,0 +1,115 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use bytes::Buf; +use bytes::Bytes; +use deno_net::raw::NetworkStream; +use hyper::upgrade::Upgraded; +use std::pin::Pin; +use std::task::Poll; +use tokio::io::AsyncRead; +use tokio::io::AsyncWrite; +use tokio::io::ReadBuf; +use tokio_tungstenite::MaybeTlsStream; + +// TODO(bartlomieju): remove this +pub(crate) enum WsStreamKind { + Tungstenite(MaybeTlsStream), + Network(NetworkStream), +} + +pub(crate) struct WebSocketStream { + stream: WsStreamKind, + pre: Option, +} + +impl WebSocketStream { + pub fn new(stream: WsStreamKind, buffer: Option) -> Self { + Self { + stream, + pre: buffer, + } + } +} + +impl AsyncRead for WebSocketStream { + // From hyper's Rewind (https://github.com/hyperium/hyper), MIT License, Copyright (c) Sean McArthur + fn poll_read( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + buf: &mut ReadBuf<'_>, + ) -> Poll> { + if let Some(mut prefix) = self.pre.take() { + // If there are no remaining bytes, let the bytes get dropped. + if !prefix.is_empty() { + let copy_len = std::cmp::min(prefix.len(), buf.remaining()); + // TODO: There should be a way to do following two lines cleaner... + buf.put_slice(&prefix[..copy_len]); + prefix.advance(copy_len); + // Put back what's left + if !prefix.is_empty() { + self.pre = Some(prefix); + } + + return Poll::Ready(Ok(())); + } + } + match &mut self.stream { + WsStreamKind::Network(stream) => Pin::new(stream).poll_read(cx, buf), + WsStreamKind::Tungstenite(stream) => Pin::new(stream).poll_read(cx, buf), + } + } +} + +impl AsyncWrite for WebSocketStream { + fn poll_write( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + buf: &[u8], + ) -> std::task::Poll> { + match &mut self.stream { + WsStreamKind::Network(stream) => Pin::new(stream).poll_write(cx, buf), + WsStreamKind::Tungstenite(stream) => Pin::new(stream).poll_write(cx, buf), + } + } + + fn poll_flush( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + match &mut self.stream { + WsStreamKind::Network(stream) => Pin::new(stream).poll_flush(cx), + WsStreamKind::Tungstenite(stream) => Pin::new(stream).poll_flush(cx), + } + } + + fn poll_shutdown( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + match &mut self.stream { + WsStreamKind::Network(stream) => Pin::new(stream).poll_shutdown(cx), + WsStreamKind::Tungstenite(stream) => Pin::new(stream).poll_shutdown(cx), + } + } + + fn is_write_vectored(&self) -> bool { + match &self.stream { + WsStreamKind::Network(stream) => stream.is_write_vectored(), + WsStreamKind::Tungstenite(stream) => stream.is_write_vectored(), + } + } + + fn poll_write_vectored( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + bufs: &[std::io::IoSlice<'_>], + ) -> std::task::Poll> { + match &mut self.stream { + WsStreamKind::Network(stream) => { + Pin::new(stream).poll_write_vectored(cx, bufs) + } + WsStreamKind::Tungstenite(stream) => { + Pin::new(stream).poll_write_vectored(cx, bufs) + } + } + } +} From 13f7f8c415f0f3ce7cb8801beb317066ef59a0e3 Mon Sep 17 00:00:00 2001 From: Matt Mastracci Date: Sun, 23 Apr 2023 09:59:46 -0600 Subject: [PATCH 017/320] fix(ext/http): ensure that multiple upgrades and multiple simultaneous requests cannot cause a panic (#18810) Fix a bug where we weren't saving `slabId` in #18619, plus add some robustness checks around multiple upgrades (w/test). --- cli/tests/unit/serve_test.ts | 86 ++++++++++++++++++++++++++++++++++++ ext/http/00_serve.js | 11 ++++- ext/http/http_next.rs | 19 ++++++-- 3 files changed, 111 insertions(+), 5 deletions(-) diff --git a/cli/tests/unit/serve_test.ts b/cli/tests/unit/serve_test.ts index 8344f1be5e..9268c7aab8 100644 --- a/cli/tests/unit/serve_test.ts +++ b/cli/tests/unit/serve_test.ts @@ -758,6 +758,92 @@ Deno.test({ permissions: { net: true } }, async function httpServerWebSocket() { await server; }); +Deno.test( + { permissions: { net: true } }, + async function httpServerWebSocketUpgradeTwice() { + const ac = new AbortController(); + const listeningPromise = deferred(); + const server = Deno.serve({ + handler: async (request) => { + const { + response, + socket, + } = Deno.upgradeWebSocket(request); + assertThrows( + () => { + Deno.upgradeWebSocket(request); + }, + Deno.errors.Http, + "already upgraded", + ); + socket.onerror = (e) => { + console.error(e); + fail(); + }; + socket.onmessage = (m) => { + socket.send(m.data); + socket.close(1001); + }; + return response; + }, + port: 4501, + signal: ac.signal, + onListen: onListen(listeningPromise), + onError: createOnErrorCb(ac), + }); + + await listeningPromise; + const def = deferred(); + const ws = new WebSocket("ws://localhost:4501"); + ws.onmessage = (m) => assertEquals(m.data, "foo"); + ws.onerror = (e) => { + console.error(e); + fail(); + }; + ws.onclose = () => def.resolve(); + ws.onopen = () => ws.send("foo"); + + await def; + ac.abort(); + await server; + }, +); + +Deno.test( + { permissions: { net: true } }, + async function httpServerWebSocketCloseFast() { + const ac = new AbortController(); + const listeningPromise = deferred(); + const server = Deno.serve({ + handler: async (request) => { + const { + response, + socket, + } = Deno.upgradeWebSocket(request); + socket.onopen = () => socket.close(); + return response; + }, + port: 4501, + signal: ac.signal, + onListen: onListen(listeningPromise), + onError: createOnErrorCb(ac), + }); + + await listeningPromise; + const def = deferred(); + const ws = new WebSocket("ws://localhost:4501"); + ws.onerror = (e) => { + console.error(e); + fail(); + }; + ws.onclose = () => def.resolve(); + + await def; + ac.abort(); + await server; + }, +); + Deno.test( { permissions: { net: true } }, async function httpServerWebSocketCanAccessRequest() { diff --git a/ext/http/00_serve.js b/ext/http/00_serve.js index 91bd360944..3022bc5fac 100644 --- a/ext/http/00_serve.js +++ b/ext/http/00_serve.js @@ -108,6 +108,13 @@ class InnerRequest { } _wantsUpgrade(upgradeType, ...originalArgs) { + if (this.#upgraded) { + throw new Deno.errors.Http("already upgraded"); + } + if (this.#slabId === undefined) { + throw new Deno.errors.Http("already closed"); + } + // upgradeHttp is async // TODO(mmastrac) if (upgradeType == "upgradeHttp") { @@ -125,6 +132,8 @@ class InnerRequest { const response = originalArgs[0]; const ws = originalArgs[1]; + const slabId = this.#slabId; + this.url(); this.headerList; this.close(); @@ -140,7 +149,7 @@ class InnerRequest { // Returns the connection and extra bytes, which we can pass directly to op_ws_server_create const upgrade = await core.opAsync2( "op_upgrade", - this.#slabId, + slabId, response.headerList, ); const wsRid = core.ops.op_ws_server_create(upgrade[0], upgrade[1]); diff --git a/ext/http/http_next.rs b/ext/http/http_next.rs index 25088e1ab0..47888f0a49 100644 --- a/ext/http/http_next.rs +++ b/ext/http/http_next.rs @@ -112,7 +112,14 @@ macro_rules! with { SLAB.with(|slab| { let mut borrow = slab.borrow_mut(); #[allow(unused_mut)] // TODO(mmastrac): compiler issue? - let mut $http = borrow.get_mut(key).unwrap(); + let mut $http = match borrow.get_mut(key) { + Some(http) => http, + None => panic!( + "Attemped to access invalid request {} ({} in total available)", + key, + borrow.len() + ), + }; #[cfg(__zombie_http_tracking)] if !$http.alive { panic!("Attempted to access a dead HTTP object") @@ -199,7 +206,11 @@ pub async fn op_upgrade( // Stage 1: set the respnse to 101 Switching Protocols and send it let upgrade = with_http_mut(index, |http| { // Manually perform the upgrade. We're peeking into hyper's underlying machinery here a bit - let upgrade = http.request_parts.extensions.remove::().unwrap(); + let upgrade = http + .request_parts + .extensions + .remove::() + .ok_or_else(|| AnyError::msg("upgrade unavailable"))?; let response = http.response.as_mut().unwrap(); *response.status_mut() = StatusCode::SWITCHING_PROTOCOLS; @@ -210,8 +221,8 @@ pub async fn op_upgrade( ); } http.promise.complete(true); - upgrade - }); + Ok::<_, AnyError>(upgrade) + })?; // Stage 2: wait for the request to finish upgrading let upgraded = upgrade.await?; From c95477c49f16a753a9d25b46014fabfd3c7eb9e6 Mon Sep 17 00:00:00 2001 From: Leo Kettmeir Date: Sun, 23 Apr 2023 20:07:30 +0200 Subject: [PATCH 018/320] tests: update tests relying on deno.land (#18811) --- cli/file_fetcher.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cli/file_fetcher.rs b/cli/file_fetcher.rs index 75c2c608fc..38b96c72de 100644 --- a/cli/file_fetcher.rs +++ b/cli/file_fetcher.rs @@ -1982,7 +1982,7 @@ mod tests { async fn test_fetch_with_default_certificate_store() { let _http_server_guard = test_util::http_server(); // Relies on external http server with a valid mozilla root CA cert. - let url = Url::parse("https://deno.land").unwrap(); + let url = Url::parse("https://deno.land/x").unwrap(); let client = HttpClient::from_client( create_http_client( version::get_user_agent(), From fafb2584efec33152fbe353d94151fa36004586a Mon Sep 17 00:00:00 2001 From: Matt Mastracci Date: Sun, 23 Apr 2023 14:07:37 -0600 Subject: [PATCH 019/320] refactor(ext/websocket): Remove dep on tungstenite by reworking code (#18812) --- ext/websocket/lib.rs | 57 ++++++++++++++++++++++++----------------- ext/websocket/stream.rs | 15 +++++------ 2 files changed, 40 insertions(+), 32 deletions(-) diff --git a/ext/websocket/lib.rs b/ext/websocket/lib.rs index 71aa66ff38..943b5d47c7 100644 --- a/ext/websocket/lib.rs +++ b/ext/websocket/lib.rs @@ -38,11 +38,12 @@ use std::future::Future; use std::path::PathBuf; use std::rc::Rc; use std::sync::Arc; +use tokio::io::AsyncRead; +use tokio::io::AsyncWrite; use tokio::net::TcpStream; use tokio_rustls::rustls::RootCertStore; use tokio_rustls::rustls::ServerName; use tokio_rustls::TlsConnector; -use tokio_tungstenite::MaybeTlsStream; use fastwebsockets::CloseCode; use fastwebsockets::FragmentCollector; @@ -129,6 +130,33 @@ pub struct CreateResponse { extensions: String, } +async fn handshake( + cancel_resource: Option>, + request: Request, + socket: S, +) -> Result<(WebSocket, http::Response), AnyError> { + let client = + fastwebsockets::handshake::client(&LocalExecutor, request, socket); + + let (upgraded, response) = if let Some(cancel_resource) = cancel_resource { + client.or_cancel(cancel_resource).await? + } else { + client.await + } + .map_err(|err| { + DomExceptionNetworkError::new(&format!( + "failed to connect to WebSocket: {err}" + )) + })?; + + let upgraded = upgraded.into_inner(); + let stream = + WebSocketStream::new(stream::WsStreamKind::Upgraded(upgraded), None); + let stream = WebSocket::after_handshake(stream, Role::Client); + + Ok((stream, response)) +} + #[op] pub async fn op_ws_create( state: Rc>, @@ -155,7 +183,7 @@ where .borrow_mut() .resource_table .get::(cancel_rid)?; - Some(r) + Some(r.0.clone()) } else { None }; @@ -223,8 +251,8 @@ where let addr = format!("{domain}:{port}"); let tcp_socket = TcpStream::connect(addr).await?; - let socket: MaybeTlsStream = match uri.scheme_str() { - Some("ws") => MaybeTlsStream::Plain(tcp_socket), + let (stream, response) = match uri.scheme_str() { + Some("ws") => handshake(cancel_resource, request, tcp_socket).await?, Some("wss") => { let tls_config = create_client_config( root_cert_store, @@ -236,30 +264,11 @@ where let dnsname = ServerName::try_from(domain.as_str()) .map_err(|_| invalid_hostname(domain))?; let tls_socket = tls_connector.connect(dnsname, tcp_socket).await?; - MaybeTlsStream::Rustls(tls_socket) + handshake(cancel_resource, request, tls_socket).await? } _ => unreachable!(), }; - let client = - fastwebsockets::handshake::client(&LocalExecutor, request, socket); - - let (upgraded, response) = if let Some(cancel_resource) = cancel_resource { - client.or_cancel(cancel_resource.0.to_owned()).await? - } else { - client.await - } - .map_err(|err| { - DomExceptionNetworkError::new(&format!( - "failed to connect to WebSocket: {err}" - )) - })?; - - let inner = MaybeTlsStream::Plain(upgraded.into_inner()); - let stream = - WebSocketStream::new(stream::WsStreamKind::Tungstenite(inner), None); - let stream = WebSocket::after_handshake(stream, Role::Client); - if let Some(cancel_rid) = cancel_handle { state.borrow_mut().resource_table.close(cancel_rid).ok(); } diff --git a/ext/websocket/stream.rs b/ext/websocket/stream.rs index 69c06b7eb7..6f93406f62 100644 --- a/ext/websocket/stream.rs +++ b/ext/websocket/stream.rs @@ -8,11 +8,10 @@ use std::task::Poll; use tokio::io::AsyncRead; use tokio::io::AsyncWrite; use tokio::io::ReadBuf; -use tokio_tungstenite::MaybeTlsStream; // TODO(bartlomieju): remove this pub(crate) enum WsStreamKind { - Tungstenite(MaybeTlsStream), + Upgraded(Upgraded), Network(NetworkStream), } @@ -54,7 +53,7 @@ impl AsyncRead for WebSocketStream { } match &mut self.stream { WsStreamKind::Network(stream) => Pin::new(stream).poll_read(cx, buf), - WsStreamKind::Tungstenite(stream) => Pin::new(stream).poll_read(cx, buf), + WsStreamKind::Upgraded(stream) => Pin::new(stream).poll_read(cx, buf), } } } @@ -67,7 +66,7 @@ impl AsyncWrite for WebSocketStream { ) -> std::task::Poll> { match &mut self.stream { WsStreamKind::Network(stream) => Pin::new(stream).poll_write(cx, buf), - WsStreamKind::Tungstenite(stream) => Pin::new(stream).poll_write(cx, buf), + WsStreamKind::Upgraded(stream) => Pin::new(stream).poll_write(cx, buf), } } @@ -77,7 +76,7 @@ impl AsyncWrite for WebSocketStream { ) -> std::task::Poll> { match &mut self.stream { WsStreamKind::Network(stream) => Pin::new(stream).poll_flush(cx), - WsStreamKind::Tungstenite(stream) => Pin::new(stream).poll_flush(cx), + WsStreamKind::Upgraded(stream) => Pin::new(stream).poll_flush(cx), } } @@ -87,14 +86,14 @@ impl AsyncWrite for WebSocketStream { ) -> std::task::Poll> { match &mut self.stream { WsStreamKind::Network(stream) => Pin::new(stream).poll_shutdown(cx), - WsStreamKind::Tungstenite(stream) => Pin::new(stream).poll_shutdown(cx), + WsStreamKind::Upgraded(stream) => Pin::new(stream).poll_shutdown(cx), } } fn is_write_vectored(&self) -> bool { match &self.stream { WsStreamKind::Network(stream) => stream.is_write_vectored(), - WsStreamKind::Tungstenite(stream) => stream.is_write_vectored(), + WsStreamKind::Upgraded(stream) => stream.is_write_vectored(), } } @@ -107,7 +106,7 @@ impl AsyncWrite for WebSocketStream { WsStreamKind::Network(stream) => { Pin::new(stream).poll_write_vectored(cx, bufs) } - WsStreamKind::Tungstenite(stream) => { + WsStreamKind::Upgraded(stream) => { Pin::new(stream).poll_write_vectored(cx, bufs) } } From 5a524a9a5a7fac0d16b2cbe2df1142dc419df7fc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Sun, 23 Apr 2023 22:55:45 +0200 Subject: [PATCH 020/320] refactor: rewrite client side tests to 'fastwebsockets' crate (#18800) Follow up to https://github.com/denoland/deno/pull/18781. --- cli/tests/integration/inspector_tests.rs | 71 +++++------ cli/tests/integration/run_tests.rs | 147 +++++++++++++++++------ 2 files changed, 144 insertions(+), 74 deletions(-) diff --git a/cli/tests/integration/inspector_tests.rs b/cli/tests/integration/inspector_tests.rs index 35ff014030..cf66c4adc1 100644 --- a/cli/tests/integration/inspector_tests.rs +++ b/cli/tests/integration/inspector_tests.rs @@ -6,15 +6,18 @@ use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::url; use deno_runtime::deno_fetch::reqwest; -use deno_runtime::deno_websocket::tokio_tungstenite; use fastwebsockets::FragmentCollector; use fastwebsockets::Frame; +use fastwebsockets::WebSocket; use hyper::upgrade::Upgraded; +use hyper::Body; use hyper::Request; +use hyper::Response; use std::io::BufRead; use test_util as util; use test_util::TempDir; use tokio::net::TcpStream; +use url::Url; use util::http_server; use util::DenoChild; @@ -30,6 +33,37 @@ where } } +async fn connect_to_ws(uri: Url) -> (WebSocket, Response) { + let domain = &uri.host().unwrap().to_string(); + let port = &uri.port().unwrap_or(match uri.scheme() { + "wss" | "https" => 443, + _ => 80, + }); + let addr = format!("{domain}:{port}"); + + let stream = TcpStream::connect(addr).await.unwrap(); + + let host = uri.host_str().unwrap(); + + let req = Request::builder() + .method("GET") + .uri(uri.path()) + .header("Host", host) + .header(hyper::header::UPGRADE, "websocket") + .header(hyper::header::CONNECTION, "Upgrade") + .header( + "Sec-WebSocket-Key", + fastwebsockets::handshake::generate_key(), + ) + .header("Sec-WebSocket-Version", "13") + .body(hyper::Body::empty()) + .unwrap(); + + fastwebsockets::handshake::client(&SpawnExecutor, req, stream) + .await + .unwrap() +} + struct InspectorTester { socket: FragmentCollector, notification_filter: Box bool + 'static>, @@ -57,35 +91,7 @@ impl InspectorTester { let uri = extract_ws_url_from_stderr(&mut stderr_lines); - let domain = &uri.host().unwrap().to_string(); - let port = &uri.port().unwrap_or(match uri.scheme() { - "wss" | "https" => 443, - _ => 80, - }); - let addr = format!("{domain}:{port}"); - - let stream = TcpStream::connect(addr).await.unwrap(); - - let host = uri.host_str().unwrap(); - - let req = Request::builder() - .method("GET") - .uri(uri.path()) - .header("Host", host) - .header(hyper::header::UPGRADE, "websocket") - .header(hyper::header::CONNECTION, "Upgrade") - .header( - "Sec-WebSocket-Key", - fastwebsockets::handshake::generate_key(), - ) - .header("Sec-WebSocket-Version", "13") - .body(hyper::Body::empty()) - .unwrap(); - - let (socket, response) = - fastwebsockets::handshake::client(&SpawnExecutor, req, stream) - .await - .unwrap(); + let (socket, response) = connect_to_ws(uri).await; assert_eq!(response.status(), 101); // Switching protocols. @@ -289,10 +295,7 @@ async fn inspector_connect() { std::io::BufReader::new(stderr).lines().map(|r| r.unwrap()); let ws_url = extract_ws_url_from_stderr(&mut stderr_lines); - // We use tokio_tungstenite as a websocket client because warp (which is - // a dependency of Deno) uses it. - let (_socket, response) = - tokio_tungstenite::connect_async(ws_url).await.unwrap(); + let (_socket, response) = connect_to_ws(ws_url).await; assert_eq!("101 Switching Protocols", response.status().to_string()); child.kill().unwrap(); child.wait().unwrap(); diff --git a/cli/tests/integration/run_tests.rs b/cli/tests/integration/run_tests.rs index b5bb04fdd9..aba6283d12 100644 --- a/cli/tests/integration/run_tests.rs +++ b/cli/tests/integration/run_tests.rs @@ -4088,14 +4088,46 @@ fn websocketstream() { assert!(status.success()); } -#[test] -fn websocketstream_ping() { - use deno_runtime::deno_websocket::tokio_tungstenite::tungstenite; +#[tokio::test(flavor = "multi_thread")] +async fn websocketstream_ping() { let _g = util::http_server(); let script = util::testdata_path().join("run/websocketstream_ping_test.ts"); let root_ca = util::testdata_path().join("tls/RootCA.pem"); - let mut child = util::deno_cmd() + + let srv_fn = hyper::service::service_fn(|mut req| async move { + let (response, upgrade_fut) = + fastwebsockets::upgrade::upgrade(&mut req).unwrap(); + tokio::spawn(async move { + let mut ws = upgrade_fut.await.unwrap(); + + ws.write_frame(fastwebsockets::Frame::text("A".as_bytes().to_vec())) + .await + .unwrap(); + ws.write_frame(fastwebsockets::Frame::new( + true, + fastwebsockets::OpCode::Ping, + None, + vec![], + )) + .await + .unwrap(); + ws.write_frame(fastwebsockets::Frame::text("B".as_bytes().to_vec())) + .await + .unwrap(); + let message = ws.read_frame().await.unwrap(); + assert_eq!(message.opcode, fastwebsockets::OpCode::Pong); + ws.write_frame(fastwebsockets::Frame::text("C".as_bytes().to_vec())) + .await + .unwrap(); + ws.write_frame(fastwebsockets::Frame::close_raw(vec![])) + .await + .unwrap(); + }); + Ok::<_, std::convert::Infallible>(response) + }); + + let child = util::deno_cmd() .arg("test") .arg("--unstable") .arg("--allow-net") @@ -4105,31 +4137,38 @@ fn websocketstream_ping() { .stdout(std::process::Stdio::piped()) .spawn() .unwrap(); + let server = tokio::net::TcpListener::bind("127.0.0.1:4513") + .await + .unwrap(); + tokio::spawn(async move { + let (stream, _) = server.accept().await.unwrap(); + let conn_fut = hyper::server::conn::Http::new() + .serve_connection(stream, srv_fn) + .with_upgrades(); - let server = std::net::TcpListener::bind("127.0.0.1:4513").unwrap(); - let (stream, _) = server.accept().unwrap(); - let mut socket = tungstenite::accept(stream).unwrap(); - socket - .write_message(tungstenite::Message::Text(String::from("A"))) - .unwrap(); - socket - .write_message(tungstenite::Message::Ping(vec![])) - .unwrap(); - socket - .write_message(tungstenite::Message::Text(String::from("B"))) - .unwrap(); - let message = socket.read_message().unwrap(); - assert_eq!(message, tungstenite::Message::Pong(vec![])); - socket - .write_message(tungstenite::Message::Text(String::from("C"))) - .unwrap(); - socket.close(None).unwrap(); + if let Err(e) = conn_fut.await { + eprintln!("websocket server error: {e:?}"); + } + }); - assert!(child.wait().unwrap().success()); + let r = child.wait_with_output().unwrap(); + assert!(r.status.success()); } -#[test] -fn websocket_server_multi_field_connection_header() { +struct SpawnExecutor; + +impl hyper::rt::Executor for SpawnExecutor +where + Fut: std::future::Future + Send + 'static, + Fut::Output: Send + 'static, +{ + fn execute(&self, fut: Fut) { + tokio::task::spawn(fut); + } +} + +#[tokio::test] +async fn websocket_server_multi_field_connection_header() { let script = util::testdata_path() .join("run/websocket_server_multi_field_connection_header_test.ts"); let root_ca = util::testdata_path().join("tls/RootCA.pem"); @@ -4151,25 +4190,41 @@ fn websocket_server_multi_field_connection_header() { let msg = std::str::from_utf8(&buffer).unwrap(); assert_eq!(msg, "READY"); - let req = http::request::Builder::new() - .header(http::header::CONNECTION, "keep-alive, Upgrade") - .uri("ws://localhost:4319") - .body(()) + let stream = tokio::net::TcpStream::connect("localhost:4319") + .await .unwrap(); + let req = hyper::Request::builder() + .header(hyper::header::UPGRADE, "websocket") + .header(http::header::CONNECTION, "keep-alive, Upgrade") + .header( + "Sec-WebSocket-Key", + fastwebsockets::handshake::generate_key(), + ) + .header("Sec-WebSocket-Version", "13") + .uri("ws://localhost:4319") + .body(hyper::Body::empty()) + .unwrap(); + let (mut socket, _) = - deno_runtime::deno_websocket::tokio_tungstenite::tungstenite::connect(req) + fastwebsockets::handshake::client(&SpawnExecutor, req, stream) + .await .unwrap(); - let message = socket.read_message().unwrap(); - assert_eq!(message, deno_runtime::deno_websocket::tokio_tungstenite::tungstenite::Message::Close(None)); - socket.close(None).unwrap(); + + let message = socket.read_frame().await.unwrap(); + assert_eq!(message.opcode, fastwebsockets::OpCode::Close); + assert!(message.payload.is_empty()); + socket + .write_frame(fastwebsockets::Frame::close_raw(vec![])) + .await + .unwrap(); assert!(child.wait().unwrap().success()); } // TODO(bartlomieju): this should use `deno run`, not `deno test`; but the // test hangs then. https://github.com/denoland/deno/issues/14283 -#[test] +#[tokio::test] #[ignore] -fn websocket_server_idletimeout() { +async fn websocket_server_idletimeout() { let script = util::testdata_path().join("run/websocket_server_idletimeout.ts"); let root_ca = util::testdata_path().join("tls/RootCA.pem"); @@ -4191,12 +4246,24 @@ fn websocket_server_idletimeout() { let msg = std::str::from_utf8(&buffer).unwrap(); assert_eq!(msg, "READY"); - let req = http::request::Builder::new() - .uri("ws://localhost:4509") - .body(()) + let stream = tokio::net::TcpStream::connect("localhost:4509") + .await .unwrap(); - let (_ws, _request) = - deno_runtime::deno_websocket::tokio_tungstenite::tungstenite::connect(req) + let req = hyper::Request::builder() + .header(hyper::header::UPGRADE, "websocket") + .header(http::header::CONNECTION, "keep-alive, Upgrade") + .header( + "Sec-WebSocket-Key", + fastwebsockets::handshake::generate_key(), + ) + .header("Sec-WebSocket-Version", "13") + .uri("ws://localhost:4509") + .body(hyper::Body::empty()) + .unwrap(); + + let (_socket, _) = + fastwebsockets::handshake::client(&SpawnExecutor, req, stream) + .await .unwrap(); assert!(child.wait().unwrap().success()); From ea0694bbfd6368d30cb8eaa3dd87f739eb60f8d2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Mon, 24 Apr 2023 01:48:25 +0200 Subject: [PATCH 021/320] test: deflake worker tests (#18813) --- cli/tests/testdata/run/worker_drop_handle_race.js.out | 3 +-- cli/tests/testdata/workers/permissions_blob_local.ts.out | 3 +-- .../testdata/workers/permissions_dynamic_remote.ts.out | 3 +-- cli/tests/testdata/workers/permissions_remote_remote.ts.out | 3 +-- cli/tests/testdata/workers/worker_async_error.ts.out | 3 +-- cli/tests/testdata/workers/worker_error.ts.out | 3 +-- .../testdata/workers/worker_message_handler_error.ts.out | 3 +-- cli/tests/testdata/workers/worker_nested_error.ts.out | 6 ++---- 8 files changed, 9 insertions(+), 18 deletions(-) diff --git a/cli/tests/testdata/run/worker_drop_handle_race.js.out b/cli/tests/testdata/run/worker_drop_handle_race.js.out index ba66941591..451c3af3d5 100644 --- a/cli/tests/testdata/run/worker_drop_handle_race.js.out +++ b/cli/tests/testdata/run/worker_drop_handle_race.js.out @@ -5,5 +5,4 @@ error: Uncaught (in worker "") Error at Object.action (ext:deno_web/02_timers.js:[WILDCARD]) at handleTimerMacrotask (ext:deno_web/02_timers.js:[WILDCARD]) error: Uncaught (in promise) Error: Unhandled error in child worker. - at Worker.#pollControl (ext:runtime/11_workers.js:[WILDCARD]) - at eventLoopTick (ext:core/01_core.js:[WILDCARD]) + at Worker.#pollControl [WILDCARD] diff --git a/cli/tests/testdata/workers/permissions_blob_local.ts.out b/cli/tests/testdata/workers/permissions_blob_local.ts.out index 8cfd41523c..0cd581f7b7 100644 --- a/cli/tests/testdata/workers/permissions_blob_local.ts.out +++ b/cli/tests/testdata/workers/permissions_blob_local.ts.out @@ -1,5 +1,4 @@ error: Uncaught (in worker "") Requires read access to "[WILDCARD]local_file.ts", run again with the --allow-read flag at blob:null/[WILDCARD]:1:8 error: Uncaught (in promise) Error: Unhandled error in child worker. - at Worker.#pollControl ([WILDCARD]) - at eventLoopTick (ext:core/01_core.js:[WILDCARD]) + at Worker.#pollControl [WILDCARD] diff --git a/cli/tests/testdata/workers/permissions_dynamic_remote.ts.out b/cli/tests/testdata/workers/permissions_dynamic_remote.ts.out index cbd3f480f7..cd1884c7e6 100644 --- a/cli/tests/testdata/workers/permissions_dynamic_remote.ts.out +++ b/cli/tests/testdata/workers/permissions_dynamic_remote.ts.out @@ -3,5 +3,4 @@ await import("https://example.com/some/file.ts"); ^ at async http://localhost:4545/workers/dynamic_remote.ts:2:1 [WILDCARD]error: Uncaught (in promise) Error: Unhandled error in child worker. - at Worker.#pollControl ([WILDCARD]) - at eventLoopTick (ext:core/01_core.js:[WILDCARD]) + at Worker.#pollControl [WILDCARD] diff --git a/cli/tests/testdata/workers/permissions_remote_remote.ts.out b/cli/tests/testdata/workers/permissions_remote_remote.ts.out index 001370f2fc..bb065740aa 100644 --- a/cli/tests/testdata/workers/permissions_remote_remote.ts.out +++ b/cli/tests/testdata/workers/permissions_remote_remote.ts.out @@ -1,5 +1,4 @@ error: Uncaught (in worker "") Requires net access to "example.com", run again with the --allow-net flag at http://localhost:4545/workers/static_remote.ts:2:8 error: Uncaught (in promise) Error: Unhandled error in child worker. - at Worker.#pollControl ([WILDCARD]) - at eventLoopTick (ext:core/01_core.js:[WILDCARD]) + at Worker.#pollControl [WILDCARD] diff --git a/cli/tests/testdata/workers/worker_async_error.ts.out b/cli/tests/testdata/workers/worker_async_error.ts.out index 84863f0166..8d017859c4 100644 --- a/cli/tests/testdata/workers/worker_async_error.ts.out +++ b/cli/tests/testdata/workers/worker_async_error.ts.out @@ -4,5 +4,4 @@ error: Uncaught (in worker "foo") (in promise) Error: bar at [WILDCARD]/async_error.ts:[WILDCARD] at [WILDCARD]/async_error.ts:[WILDCARD] error: Uncaught (in promise) Error: Unhandled error in child worker. - at Worker.#pollControl ([WILDCARD]) - at eventLoopTick (ext:core/01_core.js:[WILDCARD]) + at Worker.#pollControl [WILDCARD] diff --git a/cli/tests/testdata/workers/worker_error.ts.out b/cli/tests/testdata/workers/worker_error.ts.out index 89f579fb74..78d0c423ed 100644 --- a/cli/tests/testdata/workers/worker_error.ts.out +++ b/cli/tests/testdata/workers/worker_error.ts.out @@ -2,5 +2,4 @@ at foo ([WILDCARD]) at [WILDCARD] error: Uncaught (in promise) Error: Unhandled error in child worker. - at Worker.#pollControl ([WILDCARD]) - at eventLoopTick (ext:core/01_core.js:[WILDCARD]) + at Worker.#pollControl [WILDCARD] diff --git a/cli/tests/testdata/workers/worker_message_handler_error.ts.out b/cli/tests/testdata/workers/worker_message_handler_error.ts.out index 76449f989e..0f97e97036 100644 --- a/cli/tests/testdata/workers/worker_message_handler_error.ts.out +++ b/cli/tests/testdata/workers/worker_message_handler_error.ts.out @@ -4,5 +4,4 @@ error: Uncaught (in worker "foo") Error: bar at onmessage ([WILDCARD]/message_handler_error.ts:[WILDCARD]) at [WILDCARD] error: Uncaught (in promise) Error: Unhandled error in child worker. - at Worker.#pollControl ([WILDCARD]) - at eventLoopTick (ext:core/01_core.js:[WILDCARD]) + at Worker.#pollControl [WILDCARD] diff --git a/cli/tests/testdata/workers/worker_nested_error.ts.out b/cli/tests/testdata/workers/worker_nested_error.ts.out index dd65036b28..15cb85b48c 100644 --- a/cli/tests/testdata/workers/worker_nested_error.ts.out +++ b/cli/tests/testdata/workers/worker_nested_error.ts.out @@ -4,8 +4,6 @@ at foo ([WILDCARD]/workers/error.ts:[WILDCARD]) at [WILDCARD]/workers/error.ts:[WILDCARD] error: Uncaught (in worker "baz") (in promise) Error: Unhandled error in child worker. - at Worker.#pollControl ([WILDCARD]) - at eventLoopTick (ext:core/01_core.js:[WILDCARD]) + at Worker.#pollControl [WILDCARD] error: Uncaught (in promise) Error: Unhandled error in child worker. - at Worker.#pollControl ([WILDCARD]) - at eventLoopTick (ext:core/01_core.js:[WILDCARD]) + at Worker.#pollControl [WILDCARD] From 28e2c7204fe02304a8fc3339d7758eec0f64f723 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Mon, 24 Apr 2023 05:56:55 +0200 Subject: [PATCH 022/320] chore: remove tokio-tungstenite dependency (#18814) --- Cargo.lock | 67 ++-------------------------------------- Cargo.toml | 1 - ext/websocket/Cargo.toml | 1 - ext/websocket/lib.rs | 1 - test_util/Cargo.toml | 1 - 5 files changed, 3 insertions(+), 68 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ac188de531..666741cf5d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -328,15 +328,6 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c70beb79cbb5ce9c4f8e20849978f34225931f665bb49efa6982875a4d5facb3" -[[package]] -name = "block-buffer" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" -dependencies = [ - "generic-array 0.14.6", -] - [[package]] name = "block-buffer" version = "0.10.4" @@ -1168,7 +1159,7 @@ dependencies = [ "rsa", "scrypt", "serde", - "sha-1 0.10.0", + "sha-1", "sha2", "sha3", "signature 1.6.4", @@ -1361,7 +1352,6 @@ dependencies = [ "serde", "tokio", "tokio-rustls", - "tokio-tungstenite", ] [[package]] @@ -1452,7 +1442,7 @@ version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8168378f4e5023e7218c89c891c0fd8ecdb5e5e4f18cb78f38cf245dd021e76f" dependencies = [ - "block-buffer 0.10.4", + "block-buffer", "const-oid", "crypto-common", "subtle", @@ -4107,19 +4097,6 @@ dependencies = [ "v8", ] -[[package]] -name = "sha-1" -version = "0.9.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6" -dependencies = [ - "block-buffer 0.9.0", - "cfg-if", - "cpufeatures", - "digest 0.9.0", - "opaque-debug", -] - [[package]] name = "sha-1" version = "0.10.0" @@ -4669,7 +4646,7 @@ dependencies = [ "indexmap", "once_cell", "serde", - "sha-1 0.10.0", + "sha-1", "string_enum", "swc_atoms", "swc_common", @@ -4930,7 +4907,6 @@ dependencies = [ "tempfile", "tokio", "tokio-rustls", - "tokio-tungstenite", "url", "winapi", ] @@ -5094,22 +5070,6 @@ dependencies = [ "tokio", ] -[[package]] -name = "tokio-tungstenite" -version = "0.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e80b39df6afcc12cdf752398ade96a6b9e99c903dfdc36e53ad10b9c366bca72" -dependencies = [ - "futures-util", - "log", - "rustls", - "tokio", - "tokio-rustls", - "tungstenite", - "webpki", - "webpki-roots", -] - [[package]] name = "tokio-util" version = "0.7.7" @@ -5362,27 +5322,6 @@ dependencies = [ "termcolor", ] -[[package]] -name = "tungstenite" -version = "0.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ad3713a14ae247f22a728a0456a545df14acf3867f905adff84be99e23b3ad1" -dependencies = [ - "base64 0.13.1", - "byteorder", - "bytes", - "http", - "httparse", - "log", - "rand", - "rustls", - "sha-1 0.9.8", - "thiserror", - "url", - "utf-8", - "webpki", -] - [[package]] name = "twox-hash" version = "1.6.3" diff --git a/Cargo.toml b/Cargo.toml index 9edd7f8357..0602f2a5f4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -134,7 +134,6 @@ tempfile = "3.4.0" thiserror = "=1.0.38" tokio = { version = "=1.25.0", features = ["full"] } tokio-rustls = "0.23.3" -tokio-tungstenite = "0.16.1" tokio-util = "0.7.4" tower-lsp = { version = "=0.17.0", features = ["proposed"] } url = { version = "2.3.1", features = ["serde", "expose_internals"] } diff --git a/ext/websocket/Cargo.toml b/ext/websocket/Cargo.toml index 006c73a5f2..82be1d8636 100644 --- a/ext/websocket/Cargo.toml +++ b/ext/websocket/Cargo.toml @@ -24,4 +24,3 @@ hyper = { workspace = true, features = ["backports"] } serde.workspace = true tokio.workspace = true tokio-rustls.workspace = true -tokio-tungstenite = { workspace = true, features = ["rustls-tls-webpki-roots"] } diff --git a/ext/websocket/lib.rs b/ext/websocket/lib.rs index 943b5d47c7..07cddc85bb 100644 --- a/ext/websocket/lib.rs +++ b/ext/websocket/lib.rs @@ -52,7 +52,6 @@ use fastwebsockets::OpCode; use fastwebsockets::Role; use fastwebsockets::WebSocket; -pub use tokio_tungstenite; // Re-export tokio_tungstenite mod stream; #[derive(Clone)] diff --git a/test_util/Cargo.toml b/test_util/Cargo.toml index 5934913112..115eea326c 100644 --- a/test_util/Cargo.toml +++ b/test_util/Cargo.toml @@ -41,7 +41,6 @@ tar.workspace = true tempfile.workspace = true tokio.workspace = true tokio-rustls.workspace = true -tokio-tungstenite = { workspace = true, features = ["rustls-tls-webpki-roots"] } url.workspace = true [target.'cfg(unix)'.dependencies] From 1f0360c07382dbd86066d1aa8aa4bae34aff18c5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Mon, 24 Apr 2023 12:22:21 +0200 Subject: [PATCH 023/320] refactor(ext/node): reorganize ops (#18799) Move all op related code of "ext/node" to "ext/node/ops" module. These files were unnecessarily scattered around the extension. --- ext/node/lib.rs | 189 ++++++++++++++-------------- ext/node/{ => ops}/crypto/cipher.rs | 0 ext/node/{ => ops}/crypto/dh.rs | 0 ext/node/{ => ops}/crypto/digest.rs | 0 ext/node/{ => ops}/crypto/mod.rs | 0 ext/node/{ => ops}/crypto/primes.rs | 0 ext/node/{ => ops}/crypto/x509.rs | 0 ext/node/{ => ops}/idna.rs | 0 ext/node/ops/mod.rs | 8 ++ ext/node/{ops.rs => ops/require.rs} | 13 +- ext/node/{ => ops}/v8.rs | 0 ext/node/{ => ops}/winerror.rs | 0 ext/node/{ => ops}/zlib/alloc.rs | 0 ext/node/{ => ops}/zlib/mod.rs | 0 ext/node/{ => ops}/zlib/mode.rs | 0 ext/node/{ => ops}/zlib/stream.rs | 0 16 files changed, 106 insertions(+), 104 deletions(-) rename ext/node/{ => ops}/crypto/cipher.rs (100%) rename ext/node/{ => ops}/crypto/dh.rs (100%) rename ext/node/{ => ops}/crypto/digest.rs (100%) rename ext/node/{ => ops}/crypto/mod.rs (100%) rename ext/node/{ => ops}/crypto/primes.rs (100%) rename ext/node/{ => ops}/crypto/x509.rs (100%) rename ext/node/{ => ops}/idna.rs (100%) create mode 100644 ext/node/ops/mod.rs rename ext/node/{ops.rs => ops/require.rs} (98%) rename ext/node/{ => ops}/v8.rs (100%) rename ext/node/{ => ops}/winerror.rs (100%) rename ext/node/{ => ops}/zlib/alloc.rs (100%) rename ext/node/{ => ops}/zlib/mod.rs (100%) rename ext/node/{ => ops}/zlib/mode.rs (100%) rename ext/node/{ => ops}/zlib/stream.rs (100%) diff --git a/ext/node/lib.rs b/ext/node/lib.rs index 38772d0fc7..2b2ced89ce 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -21,18 +21,13 @@ use std::rc::Rc; use std::sync::Arc; pub mod analyze; -mod crypto; pub mod errors; -mod idna; mod ops; mod package_json; mod path; mod polyfill; mod resolution; mod resolver; -mod v8; -mod winerror; -mod zlib; pub use package_json::PackageJson; pub use path::PathClean; @@ -266,100 +261,100 @@ deno_core::extension!(deno_node, deps = [ deno_io, deno_fs ], parameters = [Env: NodeEnv], ops = [ - crypto::op_node_create_decipheriv, - crypto::op_node_cipheriv_encrypt, - crypto::op_node_cipheriv_final, - crypto::op_node_create_cipheriv, - crypto::op_node_create_hash, - crypto::op_node_decipheriv_decrypt, - crypto::op_node_decipheriv_final, - crypto::op_node_hash_update, - crypto::op_node_hash_update_str, - crypto::op_node_hash_digest, - crypto::op_node_hash_digest_hex, - crypto::op_node_hash_clone, - crypto::op_node_private_encrypt, - crypto::op_node_private_decrypt, - crypto::op_node_public_encrypt, - crypto::op_node_check_prime, - crypto::op_node_check_prime_async, - crypto::op_node_check_prime_bytes, - crypto::op_node_check_prime_bytes_async, - crypto::op_node_pbkdf2, - crypto::op_node_pbkdf2_async, - crypto::op_node_hkdf, - crypto::op_node_hkdf_async, - crypto::op_node_generate_secret, - crypto::op_node_generate_secret_async, - crypto::op_node_sign, - crypto::op_node_generate_rsa, - crypto::op_node_generate_rsa_async, - crypto::op_node_dsa_generate, - crypto::op_node_dsa_generate_async, - crypto::op_node_ec_generate, - crypto::op_node_ec_generate_async, - crypto::op_node_ed25519_generate, - crypto::op_node_ed25519_generate_async, - crypto::op_node_x25519_generate, - crypto::op_node_x25519_generate_async, - crypto::op_node_dh_generate_group, - crypto::op_node_dh_generate_group_async, - crypto::op_node_dh_generate, - crypto::op_node_dh_generate_async, - crypto::op_node_verify, - crypto::op_node_random_int, - crypto::op_node_scrypt_sync, - crypto::op_node_scrypt_async, - crypto::x509::op_node_x509_parse, - crypto::x509::op_node_x509_ca, - crypto::x509::op_node_x509_check_email, - crypto::x509::op_node_x509_fingerprint, - crypto::x509::op_node_x509_fingerprint256, - crypto::x509::op_node_x509_fingerprint512, - crypto::x509::op_node_x509_get_issuer, - crypto::x509::op_node_x509_get_subject, - crypto::x509::op_node_x509_get_valid_from, - crypto::x509::op_node_x509_get_valid_to, - crypto::x509::op_node_x509_get_serial_number, - crypto::x509::op_node_x509_key_usage, - winerror::op_node_sys_to_uv_error, - v8::op_v8_cached_data_version_tag, - v8::op_v8_get_heap_statistics, - idna::op_node_idna_domain_to_ascii, - idna::op_node_idna_domain_to_unicode, - idna::op_node_idna_punycode_decode, - idna::op_node_idna_punycode_encode, - zlib::op_zlib_new, - zlib::op_zlib_close, - zlib::op_zlib_close_if_pending, - zlib::op_zlib_write, - zlib::op_zlib_write_async, - zlib::op_zlib_init, - zlib::op_zlib_reset, + ops::crypto::op_node_create_decipheriv, + ops::crypto::op_node_cipheriv_encrypt, + ops::crypto::op_node_cipheriv_final, + ops::crypto::op_node_create_cipheriv, + ops::crypto::op_node_create_hash, + ops::crypto::op_node_decipheriv_decrypt, + ops::crypto::op_node_decipheriv_final, + ops::crypto::op_node_hash_update, + ops::crypto::op_node_hash_update_str, + ops::crypto::op_node_hash_digest, + ops::crypto::op_node_hash_digest_hex, + ops::crypto::op_node_hash_clone, + ops::crypto::op_node_private_encrypt, + ops::crypto::op_node_private_decrypt, + ops::crypto::op_node_public_encrypt, + ops::crypto::op_node_check_prime, + ops::crypto::op_node_check_prime_async, + ops::crypto::op_node_check_prime_bytes, + ops::crypto::op_node_check_prime_bytes_async, + ops::crypto::op_node_pbkdf2, + ops::crypto::op_node_pbkdf2_async, + ops::crypto::op_node_hkdf, + ops::crypto::op_node_hkdf_async, + ops::crypto::op_node_generate_secret, + ops::crypto::op_node_generate_secret_async, + ops::crypto::op_node_sign, + ops::crypto::op_node_generate_rsa, + ops::crypto::op_node_generate_rsa_async, + ops::crypto::op_node_dsa_generate, + ops::crypto::op_node_dsa_generate_async, + ops::crypto::op_node_ec_generate, + ops::crypto::op_node_ec_generate_async, + ops::crypto::op_node_ed25519_generate, + ops::crypto::op_node_ed25519_generate_async, + ops::crypto::op_node_x25519_generate, + ops::crypto::op_node_x25519_generate_async, + ops::crypto::op_node_dh_generate_group, + ops::crypto::op_node_dh_generate_group_async, + ops::crypto::op_node_dh_generate, + ops::crypto::op_node_dh_generate_async, + ops::crypto::op_node_verify, + ops::crypto::op_node_random_int, + ops::crypto::op_node_scrypt_sync, + ops::crypto::op_node_scrypt_async, + ops::crypto::x509::op_node_x509_parse, + ops::crypto::x509::op_node_x509_ca, + ops::crypto::x509::op_node_x509_check_email, + ops::crypto::x509::op_node_x509_fingerprint, + ops::crypto::x509::op_node_x509_fingerprint256, + ops::crypto::x509::op_node_x509_fingerprint512, + ops::crypto::x509::op_node_x509_get_issuer, + ops::crypto::x509::op_node_x509_get_subject, + ops::crypto::x509::op_node_x509_get_valid_from, + ops::crypto::x509::op_node_x509_get_valid_to, + ops::crypto::x509::op_node_x509_get_serial_number, + ops::crypto::x509::op_node_x509_key_usage, + ops::winerror::op_node_sys_to_uv_error, + ops::v8::op_v8_cached_data_version_tag, + ops::v8::op_v8_get_heap_statistics, + ops::idna::op_node_idna_domain_to_ascii, + ops::idna::op_node_idna_domain_to_unicode, + ops::idna::op_node_idna_punycode_decode, + ops::idna::op_node_idna_punycode_encode, + ops::zlib::op_zlib_new, + ops::zlib::op_zlib_close, + ops::zlib::op_zlib_close_if_pending, + ops::zlib::op_zlib_write, + ops::zlib::op_zlib_write_async, + ops::zlib::op_zlib_init, + ops::zlib::op_zlib_reset, op_node_build_os, - ops::op_require_init_paths, - ops::op_require_node_module_paths, - ops::op_require_proxy_path, - ops::op_require_is_deno_dir_package, - ops::op_require_resolve_deno_dir, - ops::op_require_is_request_relative, - ops::op_require_resolve_lookup_paths, - ops::op_require_try_self_parent_path, - ops::op_require_try_self, - ops::op_require_real_path, - ops::op_require_path_is_absolute, - ops::op_require_path_dirname, - ops::op_require_stat, - ops::op_require_path_resolve, - ops::op_require_path_basename, - ops::op_require_read_file, - ops::op_require_as_file_path, - ops::op_require_resolve_exports, - ops::op_require_read_closest_package_json, - ops::op_require_read_package_scope, - ops::op_require_package_imports_resolve, - ops::op_require_break_on_next_statement, + ops::require::op_require_init_paths, + ops::require::op_require_node_module_paths, + ops::require::op_require_proxy_path, + ops::require::op_require_is_deno_dir_package, + ops::require::op_require_resolve_deno_dir, + ops::require::op_require_is_request_relative, + ops::require::op_require_resolve_lookup_paths, + ops::require::op_require_try_self_parent_path, + ops::require::op_require_try_self, + ops::require::op_require_real_path, + ops::require::op_require_path_is_absolute, + ops::require::op_require_path_dirname, + ops::require::op_require_stat, + ops::require::op_require_path_resolve, + ops::require::op_require_path_basename, + ops::require::op_require_read_file, + ops::require::op_require_as_file_path, + ops::require::op_require_resolve_exports, + ops::require::op_require_read_closest_package_json, + ops::require::op_require_read_package_scope, + ops::require::op_require_package_imports_resolve, + ops::require::op_require_break_on_next_statement, ], esm_entry_point = "ext:deno_node/02_init.js", esm = [ diff --git a/ext/node/crypto/cipher.rs b/ext/node/ops/crypto/cipher.rs similarity index 100% rename from ext/node/crypto/cipher.rs rename to ext/node/ops/crypto/cipher.rs diff --git a/ext/node/crypto/dh.rs b/ext/node/ops/crypto/dh.rs similarity index 100% rename from ext/node/crypto/dh.rs rename to ext/node/ops/crypto/dh.rs diff --git a/ext/node/crypto/digest.rs b/ext/node/ops/crypto/digest.rs similarity index 100% rename from ext/node/crypto/digest.rs rename to ext/node/ops/crypto/digest.rs diff --git a/ext/node/crypto/mod.rs b/ext/node/ops/crypto/mod.rs similarity index 100% rename from ext/node/crypto/mod.rs rename to ext/node/ops/crypto/mod.rs diff --git a/ext/node/crypto/primes.rs b/ext/node/ops/crypto/primes.rs similarity index 100% rename from ext/node/crypto/primes.rs rename to ext/node/ops/crypto/primes.rs diff --git a/ext/node/crypto/x509.rs b/ext/node/ops/crypto/x509.rs similarity index 100% rename from ext/node/crypto/x509.rs rename to ext/node/ops/crypto/x509.rs diff --git a/ext/node/idna.rs b/ext/node/ops/idna.rs similarity index 100% rename from ext/node/idna.rs rename to ext/node/ops/idna.rs diff --git a/ext/node/ops/mod.rs b/ext/node/ops/mod.rs new file mode 100644 index 0000000000..6bab57fb8f --- /dev/null +++ b/ext/node/ops/mod.rs @@ -0,0 +1,8 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +pub mod crypto; +pub mod idna; +pub mod require; +pub mod v8; +pub mod winerror; +pub mod zlib; diff --git a/ext/node/ops.rs b/ext/node/ops/require.rs similarity index 98% rename from ext/node/ops.rs rename to ext/node/ops/require.rs index 662168acc2..82a0433400 100644 --- a/ext/node/ops.rs +++ b/ext/node/ops/require.rs @@ -14,15 +14,14 @@ use std::path::Path; use std::path::PathBuf; use std::rc::Rc; +use crate::resolution; use crate::NodeEnv; use crate::NodeFs; - -use super::resolution; -use super::NodeModuleKind; -use super::NodePermissions; -use super::NodeResolutionMode; -use super::NpmResolver; -use super::PackageJson; +use crate::NodeModuleKind; +use crate::NodePermissions; +use crate::NodeResolutionMode; +use crate::NpmResolver; +use crate::PackageJson; fn ensure_read_permission

( state: &mut OpState, diff --git a/ext/node/v8.rs b/ext/node/ops/v8.rs similarity index 100% rename from ext/node/v8.rs rename to ext/node/ops/v8.rs diff --git a/ext/node/winerror.rs b/ext/node/ops/winerror.rs similarity index 100% rename from ext/node/winerror.rs rename to ext/node/ops/winerror.rs diff --git a/ext/node/zlib/alloc.rs b/ext/node/ops/zlib/alloc.rs similarity index 100% rename from ext/node/zlib/alloc.rs rename to ext/node/ops/zlib/alloc.rs diff --git a/ext/node/zlib/mod.rs b/ext/node/ops/zlib/mod.rs similarity index 100% rename from ext/node/zlib/mod.rs rename to ext/node/ops/zlib/mod.rs diff --git a/ext/node/zlib/mode.rs b/ext/node/ops/zlib/mode.rs similarity index 100% rename from ext/node/zlib/mode.rs rename to ext/node/ops/zlib/mode.rs diff --git a/ext/node/zlib/stream.rs b/ext/node/ops/zlib/stream.rs similarity index 100% rename from ext/node/zlib/stream.rs rename to ext/node/ops/zlib/stream.rs From 652694f15d79851bcb02640f18c204efcbd04d17 Mon Sep 17 00:00:00 2001 From: Tristan F Date: Mon, 24 Apr 2023 07:03:53 -0400 Subject: [PATCH 024/320] docs(security): clarify storage explosion attacks in policy (#18697) Deno does not cover storage explosion attacks from evaluated runtime code. I've chosen the following parts for this clarification: - _Evaluated_ code - storage explosion attacks caused by services in Deno such as the HTTP server should still be covered. - Isolated - If the storage explosion attack can happen at arbitrary different files, it may leave a much more lasting impact on a targeted host system than on simply the Deno cache. --- .github/SECURITY.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/SECURITY.md b/.github/SECURITY.md index c5680b2b01..ea7a0af3d3 100644 --- a/.github/SECURITY.md +++ b/.github/SECURITY.md @@ -48,6 +48,10 @@ may change slightly over time, but in general the model is as follows: that a value set in one web worker can not be accessed by another. - All runtime I/O is considered to be privileged and must always be guarded by a runtime permission. This includes filesystem access, network access, etc. + - The only exception to this is runtime storage explosion attacks that are + isolated to a part of the file system, caused by evaluated code (for + example, caching big dependencies or no limits on runtime caches such as the + [Web Cache](https://developer.mozilla.org/en-US/docs/Web/API/Cache) API). - Users should not be able to self-escalate their permissions without explicit consent. - I/O required to build an initial static module graph should always follow the From ebfc75fe8cfb8af1a6f5a1a021d8fd9f235bdd8e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Mon, 24 Apr 2023 15:55:06 +0200 Subject: [PATCH 025/320] test: disable flaky Node test (#18821) If we hit a slow runner this test almost always fails. --- cli/tests/node_compat/config.jsonc | 6 ++- .../test-child-process-exec-timeout-kill.js | 50 ------------------- 2 files changed, 4 insertions(+), 52 deletions(-) delete mode 100644 cli/tests/node_compat/test/parallel/test-child-process-exec-timeout-kill.js diff --git a/cli/tests/node_compat/config.jsonc b/cli/tests/node_compat/config.jsonc index ce1cf3a08c..fd7703d6b8 100644 --- a/cli/tests/node_compat/config.jsonc +++ b/cli/tests/node_compat/config.jsonc @@ -32,7 +32,8 @@ "test-child-process-exec-std-encoding.js", // TODO(bartlomieju): this test was flaky on macOS CI // "test-child-process-exec-timeout-expire.js", - "test-child-process-exec-timeout-kill.js", + // TODO(bartlomieju): this test was flaky on macOS CI + // "test-child-process-exec-timeout-kill.js", "test-child-process-exec-timeout-not-expired.js", "test-child-process-execFile-promisified-abortController.js", "test-child-process-execfile.js", @@ -212,7 +213,8 @@ "test-child-process-exec-stdout-stderr-data-string.js", // TODO(bartlomieju): this test was flaky on macOS CI // "test-child-process-exec-timeout-expire.js", - "test-child-process-exec-timeout-kill.js", + // TODO(bartlomieju): this test was flaky on macOS CI + // "test-child-process-exec-timeout-kill.js", // TODO(bartlomieju): this test was flaky on macOS CI // "test-child-process-execFile-promisified-abortController.js", "test-child-process-execfile-maxbuf.js", diff --git a/cli/tests/node_compat/test/parallel/test-child-process-exec-timeout-kill.js b/cli/tests/node_compat/test/parallel/test-child-process-exec-timeout-kill.js deleted file mode 100644 index fd4884fc5f..0000000000 --- a/cli/tests/node_compat/test/parallel/test-child-process-exec-timeout-kill.js +++ /dev/null @@ -1,50 +0,0 @@ -// deno-fmt-ignore-file -// deno-lint-ignore-file - -// Copyright Joyent and Node contributors. All rights reserved. MIT license. -// Taken from Node 18.12.1 -// This file is automatically generated by "node/_tools/setup.ts". Do not modify this file manually - -// TODO(PolarETech): The process.argv[3] check should be argv[2], and the -// command passed to exec() should not need to include "run", "-A", -// and "require.ts". - -'use strict'; - -// Test exec() with both a timeout and a killSignal. - -const common = require('../common'); -const assert = require('assert'); -const cp = require('child_process'); - -const { - cleanupStaleProcess, - logInTimeout, - kExpiringChildRunTime, - kExpiringParentTimer, -} = require('../common/child_process'); - -if (process.argv[3] === 'child') { - logInTimeout(kExpiringChildRunTime); - return; -} - -const cmd = `"${process.execPath}" run -A require.ts "${__filename}" child`; - -// Test with a different kill signal. -cp.exec(cmd, { - timeout: kExpiringParentTimer, - killSignal: 'SIGKILL' -}, common.mustCall((err, stdout, stderr) => { - console.log('[stdout]', stdout.trim()); - console.log('[stderr]', stderr.trim()); - - assert.strictEqual(err.killed, true); - assert.strictEqual(err.code, null); - assert.strictEqual(err.signal, 'SIGKILL'); - assert.strictEqual(err.cmd, cmd); - assert.strictEqual(stdout.trim(), ''); - assert.strictEqual(stderr.trim(), ''); -})); - -cleanupStaleProcess(__filename); From be9e3c430fdd58cfdd36cb9d6eba076e1b74d2b7 Mon Sep 17 00:00:00 2001 From: Leo Kettmeir Date: Mon, 24 Apr 2023 22:24:18 +0200 Subject: [PATCH 026/320] docs: add categories to APIs without (#18826) --- cli/tsc/dts/lib.deno.unstable.d.ts | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/cli/tsc/dts/lib.deno.unstable.d.ts b/cli/tsc/dts/lib.deno.unstable.d.ts index 9c4bd5d2cf..c11bfb09e7 100644 --- a/cli/tsc/dts/lib.deno.unstable.d.ts +++ b/cli/tsc/dts/lib.deno.unstable.d.ts @@ -97,6 +97,8 @@ declare namespace Deno { /** **UNSTABLE**: New API, yet to be vetted. * * The native struct type for interfacing with foreign functions. + * + * @category FFI */ type NativeStructType = { readonly struct: readonly NativeType[] }; @@ -351,7 +353,9 @@ declare namespace Deno { : StaticForeignSymbol; }; + /** @category FFI */ const brand: unique symbol; + /** @category FFI */ type PointerObject = { [brand]: unknown }; /** **UNSTABLE**: New API, yet to be vetted. @@ -643,8 +647,11 @@ declare namespace Deno { /** * This magic code used to implement better type hints for {@linkcode Deno.dlopen} + * + * @category FFI */ type Cast = A extends B ? A : B; + /** @category FFI */ type Const = Cast< T, | (T extends string | number | bigint | boolean ? T : never) @@ -1770,6 +1777,7 @@ declare namespace Deno { batchSize?: number; } + /** @category KV */ export interface KvCommitResult { /** The versionstamp of the value committed to KV. */ versionstamp: string; From 0e97fa4d5f056e12d3c0704bfb7bcdc56316ef94 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Mon, 24 Apr 2023 17:08:11 -0400 Subject: [PATCH 027/320] fix(npm): only include top level packages in top level node_modules directory (#18824) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit We were indeterministically including packages in the top level `node_modules/` folder when using a local node_modules directory. This change aligns with pnpm and only includes top level packages in this folder. This should be faster for initializing the folder, but may expose issues in packages that reference other packages not defined in their dependencies. That said, the behaviour previously was previously broken. This has exposed a bug in the require implementation where it doesn't find a package (which is the main underlying issue here). There is a failing test already for this in the test suite after this change. Closes #18822 --------- Co-authored-by: Bartek Iwańczuk --- cli/npm/resolvers/local.rs | 14 ++++---------- ext/node/polyfills/01_require.js | 28 ++++++++++++++++++---------- 2 files changed, 22 insertions(+), 20 deletions(-) diff --git a/cli/npm/resolvers/local.rs b/cli/npm/resolvers/local.rs index d4085f345e..a44afc9359 100644 --- a/cli/npm/resolvers/local.rs +++ b/cli/npm/resolvers/local.rs @@ -4,7 +4,6 @@ use std::borrow::Cow; use std::collections::HashSet; -use std::collections::VecDeque; use std::fs; use std::path::Path; use std::path::PathBuf; @@ -365,18 +364,16 @@ async fn sync_resolution_with_fs( } } - // 4. Create all the packages in the node_modules folder, which are symlinks. + // 4. Create all the top level packages in the node_modules folder, which are symlinks. // // Symlink node_modules/ to // node_modules/.deno//node_modules/ let mut found_names = HashSet::new(); - let mut pending_packages = VecDeque::new(); - pending_packages.extend(snapshot.top_level_packages().map(|id| (id, true))); - while let Some((id, is_top_level)) = pending_packages.pop_front() { + let mut ids = snapshot.top_level_packages().collect::>(); + ids.sort_by(|a, b| b.cmp(a)); // create determinism and only include the latest version + for id in ids { let root_folder_name = if found_names.insert(id.nv.name.clone()) { id.nv.name.clone() - } else if is_top_level { - id.nv.to_string() } else { continue; // skip, already handled }; @@ -394,9 +391,6 @@ async fn sync_resolution_with_fs( &local_registry_package_path, &join_package_name(root_node_modules_dir_path, &root_folder_name), )?; - for id in package.dependencies.values() { - pending_packages.push_back((id, false)); - } } drop(single_process_lock); diff --git a/ext/node/polyfills/01_require.js b/ext/node/polyfills/01_require.js index 42ead05e32..8fbe5078cf 100644 --- a/ext/node/polyfills/01_require.js +++ b/ext/node/polyfills/01_require.js @@ -557,15 +557,21 @@ Module._findPath = function (request, paths, isMain, parentPath) { } } - const isDenoDirPackage = ops.op_require_is_deno_dir_package( - curPath, - ); - const isRelative = ops.op_require_is_request_relative( - request, - ); - const basePath = (isDenoDirPackage && !isRelative) - ? pathResolve(curPath, packageSpecifierSubPath(request)) - : pathResolve(curPath, request); + let basePath; + + if (usesLocalNodeModulesDir) { + basePath = pathResolve(curPath, request); + } else { + const isDenoDirPackage = ops.op_require_is_deno_dir_package( + curPath, + ); + const isRelative = ops.op_require_is_request_relative( + request, + ); + basePath = (isDenoDirPackage && !isRelative) + ? pathResolve(curPath, packageSpecifierSubPath(request)) + : pathResolve(curPath, request); + } let filename; const rc = stat(basePath); @@ -615,7 +621,9 @@ Module._resolveLookupPaths = function (request, parent) { return paths; } - if (parent?.filename && parent.filename.length > 0) { + if ( + !usesLocalNodeModulesDir && parent?.filename && parent.filename.length > 0 + ) { const denoDirPath = ops.op_require_resolve_deno_dir( request, parent.filename, From bb74e75a049768c2949aa08de6752a16813b97de Mon Sep 17 00:00:00 2001 From: Matt Mastracci Date: Mon, 24 Apr 2023 23:24:40 +0200 Subject: [PATCH 028/320] feat(ext/http): h2c for http/2 (#18817) This implements HTTP/2 prior-knowledge connections, allowing clients to request HTTP/2 over plaintext or TLS-without-ALPN connections. If a client requests a specific protocol via ALPN (`h2` or `http/1.1`), however, the protocol is forced and must be used. --- cli/tests/unit/serve_test.ts | 78 ++++++++ ext/http/http_next.rs | 107 ++++++++--- ext/http/lib.rs | 70 +++++-- ext/http/network_buffered_stream.rs | 284 ++++++++++++++++++++++++++++ ext/net/raw.rs | 19 ++ 5 files changed, 520 insertions(+), 38 deletions(-) create mode 100644 ext/http/network_buffered_stream.rs diff --git a/cli/tests/unit/serve_test.ts b/cli/tests/unit/serve_test.ts index 9268c7aab8..55b7c4590a 100644 --- a/cli/tests/unit/serve_test.ts +++ b/cli/tests/unit/serve_test.ts @@ -15,6 +15,7 @@ import { deferred, fail, } from "./test_util.ts"; +import { consoleSize } from "../../../runtime/js/40_tty.js"; function createOnErrorCb(ac: AbortController): (err: unknown) => Response { return (err) => { @@ -2709,3 +2710,80 @@ function isProhibitedForTrailer(key: string): boolean { const s = new Set(["transfer-encoding", "content-length", "trailer"]); return s.has(key.toLowerCase()); } + +Deno.test( + { permissions: { net: true, run: true } }, + async function httpServeCurlH2C() { + const ac = new AbortController(); + const server = Deno.serve( + () => new Response("hello world!"), + { signal: ac.signal }, + ); + + assertEquals( + "hello world!", + await curlRequest(["http://localhost:8000/path"]), + ); + assertEquals( + "hello world!", + await curlRequest(["http://localhost:8000/path", "--http2"]), + ); + assertEquals( + "hello world!", + await curlRequest([ + "http://localhost:8000/path", + "--http2", + "--http2-prior-knowledge", + ]), + ); + + ac.abort(); + await server; + }, +); + +Deno.test( + { permissions: { net: true, run: true, read: true } }, + async function httpsServeCurlH2C() { + const ac = new AbortController(); + const server = Deno.serve( + () => new Response("hello world!"), + { + signal: ac.signal, + cert: Deno.readTextFileSync("cli/tests/testdata/tls/localhost.crt"), + key: Deno.readTextFileSync("cli/tests/testdata/tls/localhost.key"), + }, + ); + + assertEquals( + "hello world!", + await curlRequest(["https://localhost:9000/path", "-k"]), + ); + assertEquals( + "hello world!", + await curlRequest(["https://localhost:9000/path", "-k", "--http2"]), + ); + assertEquals( + "hello world!", + await curlRequest([ + "https://localhost:9000/path", + "-k", + "--http2", + "--http2-prior-knowledge", + ]), + ); + + ac.abort(); + await server; + }, +); + +async function curlRequest(args: string[]) { + const { success, stdout } = await new Deno.Command("curl", { + args, + stdout: "piped", + stderr: "null", + }).output(); + assert(success); + return new TextDecoder().decode(stdout); +} diff --git a/ext/http/http_next.rs b/ext/http/http_next.rs index 47888f0a49..71f2a32b68 100644 --- a/ext/http/http_next.rs +++ b/ext/http/http_next.rs @@ -1,5 +1,6 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. use crate::extract_network_stream; +use crate::network_buffered_stream::NetworkStreamPrefixCheck; use crate::request_body::HttpRequestBody; use crate::request_properties::DefaultHttpRequestProperties; use crate::request_properties::HttpConnectionProperties; @@ -36,6 +37,7 @@ use hyper1::http::HeaderValue; use hyper1::server::conn::http1; use hyper1::server::conn::http2; use hyper1::service::service_fn; +use hyper1::service::HttpService; use hyper1::upgrade::OnUpgrade; use hyper1::StatusCode; use pin_project::pin_project; @@ -56,6 +58,37 @@ use tokio::task::JoinHandle; type Request = hyper1::Request; type Response = hyper1::Response; +/// All HTTP/2 connections start with this byte string. +/// +/// In HTTP/2, each endpoint is required to send a connection preface as a final confirmation +/// of the protocol in use and to establish the initial settings for the HTTP/2 connection. The +/// client and server each send a different connection preface. +/// +/// The client connection preface starts with a sequence of 24 octets, which in hex notation is: +/// +/// 0x505249202a20485454502f322e300d0a0d0a534d0d0a0d0a +/// +/// That is, the connection preface starts with the string PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n). This sequence +/// MUST be followed by a SETTINGS frame (Section 6.5), which MAY be empty. +const HTTP2_PREFIX: &[u8] = b"PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n"; + +/// ALPN negotation for "h2" +const TLS_ALPN_HTTP_2: &[u8] = b"h2"; + +/// ALPN negotation for "http/1.1" +const TLS_ALPN_HTTP_11: &[u8] = b"http/1.1"; + +/// Name a trait for streams we can serve HTTP over. +trait HttpServeStream: + tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin + Send + 'static +{ +} +impl< + S: tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin + Send + 'static, + > HttpServeStream for S +{ +} + pub struct HttpSlabRecord { request_info: HttpConnectionProperties, request_parts: Parts, @@ -514,6 +547,44 @@ impl> Future for SlabFuture { } } +fn serve_http11_unconditional( + io: impl HttpServeStream, + svc: impl HttpService + 'static, + cancel: RcRef, +) -> impl Future> + 'static { + let conn = http1::Builder::new() + .keep_alive(true) + .serve_connection(io, svc); + + conn + .with_upgrades() + .map_err(AnyError::from) + .try_or_cancel(cancel) +} + +fn serve_http2_unconditional( + io: impl HttpServeStream, + svc: impl HttpService + 'static, + cancel: RcRef, +) -> impl Future> + 'static { + let conn = http2::Builder::new(LocalExecutor).serve_connection(io, svc); + conn.map_err(AnyError::from).try_or_cancel(cancel) +} + +async fn serve_http2_autodetect( + io: impl HttpServeStream, + svc: impl HttpService + 'static, + cancel: RcRef, +) -> Result<(), AnyError> { + let prefix = NetworkStreamPrefixCheck::new(io, HTTP2_PREFIX); + let (matches, io) = prefix.match_prefix().await?; + if matches { + serve_http2_unconditional(io, svc, cancel).await + } else { + serve_http11_unconditional(io, svc, cancel).await + } +} + fn serve_https( mut io: TlsStream, request_info: HttpConnectionProperties, @@ -526,28 +597,21 @@ fn serve_https( }); spawn_local(async { io.handshake().await?; + // If the client specifically negotiates a protocol, we will use it. If not, we'll auto-detect + // based on the prefix bytes let handshake = io.get_ref().1.alpn_protocol(); - // h2 - if handshake == Some(&[104, 50]) { - let conn = http2::Builder::new(LocalExecutor).serve_connection(io, svc); - - conn.map_err(AnyError::from).try_or_cancel(cancel).await + if handshake == Some(TLS_ALPN_HTTP_2) { + serve_http2_unconditional(io, svc, cancel).await + } else if handshake == Some(TLS_ALPN_HTTP_11) { + serve_http11_unconditional(io, svc, cancel).await } else { - let conn = http1::Builder::new() - .keep_alive(true) - .serve_connection(io, svc); - - conn - .with_upgrades() - .map_err(AnyError::from) - .try_or_cancel(cancel) - .await + serve_http2_autodetect(io, svc, cancel).await } }) } fn serve_http( - io: impl tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin + Send + 'static, + io: impl HttpServeStream, request_info: HttpConnectionProperties, cancel: RcRef, tx: tokio::sync::mpsc::Sender, @@ -556,16 +620,7 @@ fn serve_http( let svc = service_fn(move |req: Request| { new_slab_future(req, request_info.clone(), tx.clone()) }); - spawn_local(async { - let conn = http1::Builder::new() - .keep_alive(true) - .serve_connection(io, svc); - conn - .with_upgrades() - .map_err(AnyError::from) - .try_or_cancel(cancel) - .await - }) + spawn_local(serve_http2_autodetect(io, svc, cancel)) } fn serve_http_on( @@ -702,7 +757,7 @@ pub fn op_serve_http_on( AsyncRefCell::new(rx), )); - let handle = serve_http_on( + let handle: JoinHandle> = serve_http_on( network_stream, &listen_properties, resource.cancel_handle(), diff --git a/ext/http/lib.rs b/ext/http/lib.rs index 561b13885d..d5404d189a 100644 --- a/ext/http/lib.rs +++ b/ext/http/lib.rs @@ -73,11 +73,13 @@ use tokio::io::AsyncWriteExt; use tokio::task::spawn_local; use websocket_upgrade::WebSocketUpgrade; +use crate::network_buffered_stream::NetworkBufferedStream; use crate::reader_stream::ExternallyAbortableReaderStream; use crate::reader_stream::ShutdownHandle; pub mod compressible; mod http_next; +mod network_buffered_stream; mod reader_stream; mod request_body; mod request_properties; @@ -1251,22 +1253,66 @@ impl CanDowncastUpgrade for hyper::upgrade::Upgraded { } } +fn maybe_extract_network_stream< + T: Into + AsyncRead + AsyncWrite + Unpin + 'static, + U: CanDowncastUpgrade, +>( + upgraded: U, +) -> Result<(NetworkStream, Bytes), U> { + let upgraded = match upgraded.downcast::() { + Ok((stream, bytes)) => return Ok((stream.into(), bytes)), + Err(x) => x, + }; + + match upgraded.downcast::>() { + Ok((stream, upgraded_bytes)) => { + // Both the upgrade and the stream might have unread bytes + let (io, stream_bytes) = stream.into_inner(); + let bytes = match (stream_bytes.is_empty(), upgraded_bytes.is_empty()) { + (false, false) => Bytes::default(), + (true, false) => upgraded_bytes, + (false, true) => stream_bytes, + (true, true) => { + // The upgraded bytes come first as they have already been read + let mut v = upgraded_bytes.to_vec(); + v.append(&mut stream_bytes.to_vec()); + Bytes::from(v) + } + }; + Ok((io.into(), bytes)) + } + Err(x) => Err(x), + } +} + fn extract_network_stream( upgraded: U, ) -> (NetworkStream, Bytes) { - let upgraded = match upgraded.downcast::() { - Ok((stream, bytes)) => return (NetworkStream::Tcp(stream), bytes), - Err(x) => x, - }; - let upgraded = match upgraded.downcast::() { - Ok((stream, bytes)) => return (NetworkStream::Tls(stream), bytes), - Err(x) => x, - }; + let upgraded = + match maybe_extract_network_stream::(upgraded) { + Ok(res) => return res, + Err(x) => x, + }; + let upgraded = + match maybe_extract_network_stream::( + upgraded, + ) { + Ok(res) => return res, + Err(x) => x, + }; #[cfg(unix)] - let upgraded = match upgraded.downcast::() { - Ok((stream, bytes)) => return (NetworkStream::Unix(stream), bytes), - Err(x) => x, - }; + let upgraded = + match maybe_extract_network_stream::(upgraded) { + Ok(res) => return res, + Err(x) => x, + }; + let upgraded = + match maybe_extract_network_stream::(upgraded) { + Ok(res) => return res, + Err(x) => x, + }; + + // TODO(mmastrac): HTTP/2 websockets may yield an un-downgradable type drop(upgraded); unreachable!("unexpected stream type"); } diff --git a/ext/http/network_buffered_stream.rs b/ext/http/network_buffered_stream.rs new file mode 100644 index 0000000000..e4b2ee895d --- /dev/null +++ b/ext/http/network_buffered_stream.rs @@ -0,0 +1,284 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +use bytes::Bytes; +use deno_core::futures::future::poll_fn; +use deno_core::futures::ready; +use std::io; +use std::mem::MaybeUninit; +use std::pin::Pin; +use std::task::Poll; +use tokio::io::AsyncRead; +use tokio::io::AsyncWrite; +use tokio::io::ReadBuf; + +const MAX_PREFIX_SIZE: usize = 256; + +pub struct NetworkStreamPrefixCheck { + io: S, + prefix: &'static [u8], + buffer: [MaybeUninit; MAX_PREFIX_SIZE * 2], +} + +impl NetworkStreamPrefixCheck { + pub fn new(io: S, prefix: &'static [u8]) -> Self { + debug_assert!(prefix.len() < MAX_PREFIX_SIZE); + Self { + io, + prefix, + buffer: [MaybeUninit::::uninit(); MAX_PREFIX_SIZE * 2], + } + } + + // Returns a [`NetworkBufferedStream`], rewound with the bytes we read to determine what + // type of stream this is. + pub async fn match_prefix( + self, + ) -> io::Result<(bool, NetworkBufferedStream)> { + let mut buffer = self.buffer; + let mut readbuf = ReadBuf::uninit(&mut buffer); + let mut io = self.io; + let prefix = self.prefix; + loop { + enum State { + Unknown, + Matched, + NotMatched, + } + + let state = poll_fn(|cx| { + let filled_len = readbuf.filled().len(); + let res = ready!(Pin::new(&mut io).poll_read(cx, &mut readbuf)); + if let Err(e) = res { + return Poll::Ready(Err(e)); + } + let filled = readbuf.filled(); + let new_len = filled.len(); + if new_len == filled_len { + // Empty read, no match + return Poll::Ready(Ok(State::NotMatched)); + } else if new_len < prefix.len() { + // Read less than prefix, make sure we're still matching the prefix (early exit) + if !prefix.starts_with(filled) { + return Poll::Ready(Ok(State::NotMatched)); + } + } else if new_len >= prefix.len() { + // We have enough to determine + if filled.starts_with(prefix) { + return Poll::Ready(Ok(State::Matched)); + } else { + return Poll::Ready(Ok(State::NotMatched)); + } + } + + Poll::Ready(Ok(State::Unknown)) + }) + .await?; + + match state { + State::Unknown => continue, + State::Matched => { + let initialized_len = readbuf.filled().len(); + return Ok(( + true, + NetworkBufferedStream::new(io, buffer, initialized_len), + )); + } + State::NotMatched => { + let initialized_len = readbuf.filled().len(); + return Ok(( + false, + NetworkBufferedStream::new(io, buffer, initialized_len), + )); + } + } + } + } +} + +pub struct NetworkBufferedStream { + io: S, + initialized_len: usize, + prefix_offset: usize, + prefix: [MaybeUninit; MAX_PREFIX_SIZE * 2], + prefix_read: bool, +} + +impl NetworkBufferedStream { + fn new( + io: S, + prefix: [MaybeUninit; MAX_PREFIX_SIZE * 2], + initialized_len: usize, + ) -> Self { + Self { + io, + initialized_len, + prefix_offset: 0, + prefix, + prefix_read: false, + } + } + + fn current_slice(&self) -> &[u8] { + // We trust that these bytes are initialized properly + let slice = &self.prefix[self.prefix_offset..self.initialized_len]; + + // This guarantee comes from slice_assume_init_ref (we can't use that until it's stable) + + // SAFETY: casting `slice` to a `*const [T]` is safe since the caller guarantees that + // `slice` is initialized, and `MaybeUninit` is guaranteed to have the same layout as `T`. + // The pointer obtained is valid since it refers to memory owned by `slice` which is a + // reference and thus guaranteed to be valid for reads. + + unsafe { &*(slice as *const [_] as *const [u8]) as _ } + } + + pub fn into_inner(self) -> (S, Bytes) { + let bytes = Bytes::copy_from_slice(self.current_slice()); + (self.io, bytes) + } +} + +impl AsyncRead for NetworkBufferedStream { + // From hyper's Rewind (https://github.com/hyperium/hyper), MIT License, Copyright (c) Sean McArthur + fn poll_read( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + buf: &mut ReadBuf<'_>, + ) -> Poll> { + if !self.prefix_read { + let prefix = self.current_slice(); + + // If there are no remaining bytes, let the bytes get dropped. + if !prefix.is_empty() { + let copy_len = std::cmp::min(prefix.len(), buf.remaining()); + buf.put_slice(&prefix[..copy_len]); + self.prefix_offset += copy_len; + + return Poll::Ready(Ok(())); + } else { + self.prefix_read = true; + } + } + Pin::new(&mut self.io).poll_read(cx, buf) + } +} + +impl AsyncWrite + for NetworkBufferedStream +{ + fn poll_write( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + buf: &[u8], + ) -> std::task::Poll> { + Pin::new(&mut self.io).poll_write(cx, buf) + } + + fn poll_flush( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + Pin::new(&mut self.io).poll_flush(cx) + } + + fn poll_shutdown( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + Pin::new(&mut self.io).poll_shutdown(cx) + } + + fn is_write_vectored(&self) -> bool { + self.io.is_write_vectored() + } + + fn poll_write_vectored( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + bufs: &[std::io::IoSlice<'_>], + ) -> std::task::Poll> { + Pin::new(&mut self.io).poll_write_vectored(cx, bufs) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use tokio::io::AsyncReadExt; + + struct YieldsOneByteAtATime(&'static [u8]); + + impl AsyncRead for YieldsOneByteAtATime { + fn poll_read( + mut self: Pin<&mut Self>, + _cx: &mut std::task::Context<'_>, + buf: &mut ReadBuf<'_>, + ) -> Poll> { + if let Some((head, tail)) = self.as_mut().0.split_first() { + self.as_mut().0 = tail; + let dest = buf.initialize_unfilled_to(1); + dest[0] = *head; + buf.advance(1); + } + Poll::Ready(Ok(())) + } + } + + async fn test( + io: impl AsyncRead + Unpin, + prefix: &'static [u8], + expect_match: bool, + expect_string: &'static str, + ) -> io::Result<()> { + let (matches, mut io) = NetworkStreamPrefixCheck::new(io, prefix) + .match_prefix() + .await?; + assert_eq!(matches, expect_match); + let mut s = String::new(); + Pin::new(&mut io).read_to_string(&mut s).await?; + assert_eq!(s, expect_string); + Ok(()) + } + + #[tokio::test] + async fn matches_prefix_simple() -> io::Result<()> { + let buf = b"prefix match".as_slice(); + test(buf, b"prefix", true, "prefix match").await + } + + #[tokio::test] + async fn matches_prefix_exact() -> io::Result<()> { + let buf = b"prefix".as_slice(); + test(buf, b"prefix", true, "prefix").await + } + + #[tokio::test] + async fn not_matches_prefix_simple() -> io::Result<()> { + let buf = b"prefill match".as_slice(); + test(buf, b"prefix", false, "prefill match").await + } + + #[tokio::test] + async fn not_matches_prefix_short() -> io::Result<()> { + let buf = b"nope".as_slice(); + test(buf, b"prefix", false, "nope").await + } + + #[tokio::test] + async fn not_matches_prefix_empty() -> io::Result<()> { + let buf = b"".as_slice(); + test(buf, b"prefix", false, "").await + } + + #[tokio::test] + async fn matches_one_byte_at_a_time() -> io::Result<()> { + let buf = YieldsOneByteAtATime(b"prefix"); + test(buf, b"prefix", true, "prefix").await + } + + #[tokio::test] + async fn not_matches_one_byte_at_a_time() -> io::Result<()> { + let buf = YieldsOneByteAtATime(b"prefill"); + test(buf, b"prefix", false, "prefill").await + } +} diff --git a/ext/net/raw.rs b/ext/net/raw.rs index 74cc10d630..3b50af41e0 100644 --- a/ext/net/raw.rs +++ b/ext/net/raw.rs @@ -30,6 +30,25 @@ pub enum NetworkStream { Unix(#[pin] UnixStream), } +impl From for NetworkStream { + fn from(value: TcpStream) -> Self { + NetworkStream::Tcp(value) + } +} + +impl From for NetworkStream { + fn from(value: TlsStream) -> Self { + NetworkStream::Tls(value) + } +} + +#[cfg(unix)] +impl From for NetworkStream { + fn from(value: UnixStream) -> Self { + NetworkStream::Unix(value) + } +} + /// A raw stream of one of the types handled by this extension. #[derive(Copy, Clone, PartialEq, Eq)] pub enum NetworkStreamType { From aa286fdecb15461ef8ddd4c372f5a13e01e1cb7b Mon Sep 17 00:00:00 2001 From: David Sherret Date: Mon, 24 Apr 2023 19:44:35 -0400 Subject: [PATCH 029/320] refactor(ext/node): allow injecting `NodeFs` from CLI (#18829) This allows providing a `NodeFs` as part of the `WorkerOptions`. --- cli/build.rs | 6 +- cli/lsp/documents.rs | 13 +- cli/lsp/language_server.rs | 10 +- cli/module_loader.rs | 13 +- cli/node.rs | 7 +- cli/npm/resolvers/local.rs | 11 +- cli/npm/resolvers/mod.rs | 3 + cli/proc_state.rs | 14 +- cli/standalone/mod.rs | 2 + cli/tools/check.rs | 6 +- cli/tools/task.rs | 8 +- cli/tsc/mod.rs | 19 +- cli/worker.rs | 21 +- ext/node/analyze.rs | 110 +- ext/node/lib.rs | 112 +- ext/node/ops/require.rs | 138 +- ext/node/package_json.rs | 10 +- ext/node/resolution.rs | 2113 +++++++++++++++++++---------- ext/node/resolver.rs | 686 ---------- runtime/build.rs | 3 +- runtime/examples/hello_runtime.rs | 1 + runtime/lib.rs | 1 - runtime/web_worker.rs | 5 +- runtime/worker.rs | 6 +- 24 files changed, 1631 insertions(+), 1687 deletions(-) delete mode 100644 ext/node/resolver.rs diff --git a/cli/build.rs b/cli/build.rs index 9be441bcc4..7a3252e20b 100644 --- a/cli/build.rs +++ b/cli/build.rs @@ -2,6 +2,7 @@ use std::env; use std::path::PathBuf; +use std::sync::Arc; use deno_core::snapshot_util::*; use deno_core::Extension; @@ -361,7 +362,10 @@ fn create_cli_snapshot(snapshot_path: PathBuf) { deno_http::deno_http::init_ops(), deno_io::deno_io::init_ops(Default::default()), deno_fs::deno_fs::init_ops::<_, PermissionsContainer>(false, StdFs), - deno_node::deno_node::init_ops::(None), + deno_node::deno_node::init_ops::( + None, + Some(Arc::new(deno_node::RealFs)), + ), cli::init_ops_and_esm(), // NOTE: This needs to be init_ops_and_esm! ]; diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs index fd40bb95f2..31aa3ae8ef 100644 --- a/cli/lsp/documents.rs +++ b/cli/lsp/documents.rs @@ -17,7 +17,6 @@ use crate::file_fetcher::get_source_from_bytes; use crate::file_fetcher::map_content_type; use crate::file_fetcher::SUPPORTED_SCHEMES; use crate::lsp::logging::lsp_warn; -use crate::node::CliNodeResolver; use crate::npm::CliNpmRegistryApi; use crate::npm::NpmResolution; use crate::npm::PackageJsonDepsInstaller; @@ -39,8 +38,8 @@ use deno_graph::Resolution; use deno_runtime::deno_node; use deno_runtime::deno_node::NodeResolution; use deno_runtime::deno_node::NodeResolutionMode; +use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_node::PackageJson; -use deno_runtime::deno_node::RealFs; use deno_runtime::permissions::PermissionsContainer; use deno_semver::npm::NpmPackageReq; use deno_semver::npm::NpmPackageReqReference; @@ -1057,7 +1056,7 @@ impl Documents { &self, specifiers: Vec, referrer_doc: &AssetOrDocument, - maybe_node_resolver: Option<&Arc>, + maybe_node_resolver: Option<&Arc>, ) -> Vec> { let referrer = referrer_doc.specifier(); let dependencies = match referrer_doc { @@ -1071,7 +1070,7 @@ impl Documents { // we're in an npm package, so use node resolution results.push(Some(NodeResolution::into_specifier_and_media_type( node_resolver - .resolve::( + .resolve( &specifier, referrer, NodeResolutionMode::Types, @@ -1419,7 +1418,7 @@ impl Documents { fn resolve_dependency( &self, specifier: &ModuleSpecifier, - maybe_node_resolver: Option<&Arc>, + maybe_node_resolver: Option<&Arc>, ) -> Option<(ModuleSpecifier, MediaType)> { if let Ok(npm_ref) = NpmPackageReqReference::from_specifier(specifier) { return node_resolve_npm_req_ref(npm_ref, maybe_node_resolver); @@ -1454,12 +1453,12 @@ impl Documents { fn node_resolve_npm_req_ref( npm_req_ref: NpmPackageReqReference, - maybe_node_resolver: Option<&Arc>, + maybe_node_resolver: Option<&Arc>, ) -> Option<(ModuleSpecifier, MediaType)> { maybe_node_resolver.map(|node_resolver| { NodeResolution::into_specifier_and_media_type( node_resolver - .resolve_npm_req_reference::( + .resolve_npm_req_reference( &npm_req_ref, NodeResolutionMode::Types, &mut PermissionsContainer::allow_all(), diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index f1b9cb4347..e7968a6655 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -9,6 +9,7 @@ use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::serde_json::Value; use deno_core::ModuleSpecifier; +use deno_runtime::deno_node; use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_node::PackageJson; use deno_runtime::deno_web::BlobStore; @@ -79,7 +80,6 @@ use crate::file_fetcher::FileFetcher; use crate::graph_util; use crate::http_util::HttpClient; use crate::lsp::urls::LspUrlKind; -use crate::node::CliNodeResolver; use crate::npm::create_npm_fs_resolver; use crate::npm::CliNpmRegistryApi; use crate::npm::CliNpmResolver; @@ -103,7 +103,7 @@ pub struct StateSnapshot { pub cache_metadata: cache::CacheMetadata, pub documents: Documents, pub maybe_import_map: Option>, - pub maybe_node_resolver: Option>, + pub maybe_node_resolver: Option>, pub maybe_npm_resolver: Option>, } @@ -449,6 +449,7 @@ fn create_lsp_structs( let resolution = Arc::new(NpmResolution::from_serialized(api.clone(), None, None)); let fs_resolver = create_npm_fs_resolver( + Arc::new(deno_node::RealFs), npm_cache.clone(), &progress_bar, registry_url.clone(), @@ -700,9 +701,11 @@ impl Inner { self.npm_resolution.snapshot(), None, )); + let node_fs = Arc::new(deno_node::RealFs); let npm_resolver = Arc::new(CliNpmResolver::new( npm_resolution.clone(), create_npm_fs_resolver( + node_fs.clone(), self.npm_cache.clone(), &ProgressBar::new(ProgressBarStyle::TextOnly), self.npm_api.base_url().clone(), @@ -711,7 +714,8 @@ impl Inner { ), None, )); - let node_resolver = Arc::new(NodeResolver::new(npm_resolver.clone())); + let node_resolver = + Arc::new(NodeResolver::new(node_fs, npm_resolver.clone())); Arc::new(StateSnapshot { assets: self.assets.snapshot(), cache_metadata: self.cache_metadata.clone(), diff --git a/cli/module_loader.rs b/cli/module_loader.rs index c4ef0ed7e9..5a7743ef26 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -12,7 +12,6 @@ use crate::graph_util::ModuleGraphBuilder; use crate::graph_util::ModuleGraphContainer; use crate::node; use crate::node::CliNodeCodeTranslator; -use crate::node::CliNodeResolver; use crate::proc_state::CjsResolutionStore; use crate::proc_state::FileWatcherReporter; use crate::proc_state::ProcState; @@ -51,7 +50,7 @@ use deno_lockfile::Lockfile; use deno_runtime::deno_node; use deno_runtime::deno_node::NodeResolution; use deno_runtime::deno_node::NodeResolutionMode; -use deno_runtime::deno_node::RealFs; +use deno_runtime::deno_node::NodeResolver; use deno_runtime::permissions::PermissionsContainer; use deno_semver::npm::NpmPackageReqReference; use std::borrow::Cow; @@ -244,7 +243,7 @@ pub struct CliModuleLoader { graph_container: Arc, module_load_preparer: Arc, node_code_translator: Arc, - node_resolver: Arc, + node_resolver: Arc, parsed_source_cache: Arc, resolver: Arc, } @@ -387,7 +386,7 @@ impl CliModuleLoader { self.root_permissions.clone() }; // translate cjs to esm if it's cjs and inject node globals - self.node_code_translator.translate_cjs_to_esm::( + self.node_code_translator.translate_cjs_to_esm( specifier, &code, &mut permissions, @@ -466,7 +465,7 @@ impl ModuleLoader for CliModuleLoader { if self.node_resolver.in_npm_package(referrer) { // we're in an npm package, so use node resolution return self - .handle_node_resolve_result(self.node_resolver.resolve::( + .handle_node_resolve_result(self.node_resolver.resolve( specifier, referrer, NodeResolutionMode::Execution, @@ -492,7 +491,7 @@ impl ModuleLoader for CliModuleLoader { return match graph.get(specifier) { Some(Module::Npm(module)) => self .handle_node_resolve_result( - self.node_resolver.resolve_npm_reference::( + self.node_resolver.resolve_npm_reference( &module.nv_reference, NodeResolutionMode::Execution, &mut permissions, @@ -554,7 +553,7 @@ impl ModuleLoader for CliModuleLoader { { return self .handle_node_resolve_result( - self.node_resolver.resolve_npm_req_reference::( + self.node_resolver.resolve_npm_req_reference( &reference, NodeResolutionMode::Execution, &mut permissions, diff --git a/cli/node.rs b/cli/node.rs index 3ec9500e88..8b54d0d422 100644 --- a/cli/node.rs +++ b/cli/node.rs @@ -1,7 +1,6 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. use std::collections::HashSet; -use std::sync::Arc; use deno_ast::swc::common::SyntaxContext; use deno_ast::view::Node; @@ -15,15 +14,11 @@ use deno_core::error::AnyError; use deno_runtime::deno_node::analyze::CjsAnalysis as ExtNodeCjsAnalysis; use deno_runtime::deno_node::analyze::CjsEsmCodeAnalyzer; use deno_runtime::deno_node::analyze::NodeCodeTranslator; -use deno_runtime::deno_node::NodeResolver; use crate::cache::NodeAnalysisCache; -use crate::npm::CliNpmResolver; use crate::util::fs::canonicalize_path_maybe_not_exists; -pub type CliNodeCodeTranslator = - NodeCodeTranslator>; -pub type CliNodeResolver = NodeResolver>; +pub type CliNodeCodeTranslator = NodeCodeTranslator; /// Resolves a specifier that is pointing into a node_modules folder. /// diff --git a/cli/npm/resolvers/local.rs b/cli/npm/resolvers/local.rs index a44afc9359..e2919f6ee8 100644 --- a/cli/npm/resolvers/local.rs +++ b/cli/npm/resolvers/local.rs @@ -23,6 +23,7 @@ use deno_npm::resolution::NpmResolutionSnapshot; use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageId; use deno_runtime::deno_core::futures; +use deno_runtime::deno_node::NodeFs; use deno_runtime::deno_node::NodePermissions; use deno_runtime::deno_node::NodeResolutionMode; use deno_runtime::deno_node::PackageJson; @@ -43,6 +44,7 @@ use super::common::NpmPackageFsResolver; /// and resolves packages from it. #[derive(Debug)] pub struct LocalNpmPackageResolver { + fs: Arc, cache: Arc, progress_bar: ProgressBar, resolution: Arc, @@ -53,6 +55,7 @@ pub struct LocalNpmPackageResolver { impl LocalNpmPackageResolver { pub fn new( + fs: Arc, cache: Arc, progress_bar: ProgressBar, registry_url: Url, @@ -60,6 +63,7 @@ impl LocalNpmPackageResolver { resolution: Arc, ) -> Self { Self { + fs, cache, progress_bar, resolution, @@ -149,9 +153,10 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver { if sub_dir.is_dir() { // if doing types resolution, only resolve the package if it specifies a types property if mode.is_types() && !name.starts_with("@types/") { - let package_json = PackageJson::load_skip_read_permission::< - deno_runtime::deno_node::RealFs, - >(sub_dir.join("package.json"))?; + let package_json = PackageJson::load_skip_read_permission( + &*self.fs, + sub_dir.join("package.json"), + )?; if package_json.types.is_some() { return Ok(sub_dir); } diff --git a/cli/npm/resolvers/mod.rs b/cli/npm/resolvers/mod.rs index 8b871beaf7..fa83cdf59c 100644 --- a/cli/npm/resolvers/mod.rs +++ b/cli/npm/resolvers/mod.rs @@ -18,6 +18,7 @@ use deno_npm::resolution::NpmResolutionSnapshot; use deno_npm::resolution::PackageReqNotFoundError; use deno_npm::resolution::SerializedNpmResolutionSnapshot; use deno_npm::NpmPackageId; +use deno_runtime::deno_node; use deno_runtime::deno_node::NodePermissions; use deno_runtime::deno_node::NodeResolutionMode; use deno_runtime::deno_node::NpmResolver; @@ -269,6 +270,7 @@ impl NpmResolver for CliNpmResolver { } pub fn create_npm_fs_resolver( + fs: Arc, cache: Arc, progress_bar: &ProgressBar, registry_url: Url, @@ -277,6 +279,7 @@ pub fn create_npm_fs_resolver( ) -> Arc { match maybe_node_modules_path { Some(node_modules_folder) => Arc::new(LocalNpmPackageResolver::new( + fs, cache, progress_bar.clone(), registry_url, diff --git a/cli/proc_state.rs b/cli/proc_state.rs index 6b7e9b1f28..b6529d3a07 100644 --- a/cli/proc_state.rs +++ b/cli/proc_state.rs @@ -19,7 +19,6 @@ use crate::http_util::HttpClient; use crate::module_loader::ModuleLoadPreparer; use crate::node::CliCjsEsmCodeAnalyzer; use crate::node::CliNodeCodeTranslator; -use crate::node::CliNodeResolver; use crate::npm::create_npm_fs_resolver; use crate::npm::CliNpmRegistryApi; use crate::npm::CliNpmResolver; @@ -38,6 +37,7 @@ use deno_core::ModuleSpecifier; use deno_core::SharedArrayBufferStore; use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel; +use deno_runtime::deno_node; use deno_runtime::deno_node::analyze::NodeCodeTranslator; use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_tls::rustls::RootCertStore; @@ -79,7 +79,8 @@ pub struct Inner { pub module_graph_builder: Arc, pub module_load_preparer: Arc, pub node_code_translator: Arc, - pub node_resolver: Arc, + pub node_fs: Arc, + pub node_resolver: Arc, pub npm_api: Arc, pub npm_cache: Arc, pub npm_resolver: Arc, @@ -150,6 +151,7 @@ impl ProcState { module_graph_builder: self.module_graph_builder.clone(), module_load_preparer: self.module_load_preparer.clone(), node_code_translator: self.node_code_translator.clone(), + node_fs: self.node_fs.clone(), node_resolver: self.node_resolver.clone(), npm_api: self.npm_api.clone(), npm_cache: self.npm_cache.clone(), @@ -245,7 +247,9 @@ impl ProcState { npm_snapshot, lockfile.as_ref().cloned(), )); + let node_fs = Arc::new(deno_node::RealFs); let npm_fs_resolver = create_npm_fs_resolver( + node_fs.clone(), npm_cache, &progress_bar, npm_registry_url, @@ -308,11 +312,14 @@ impl ProcState { let node_analysis_cache = NodeAnalysisCache::new(caches.node_analysis_db(&dir)); let cjs_esm_analyzer = CliCjsEsmCodeAnalyzer::new(node_analysis_cache); + let node_resolver = + Arc::new(NodeResolver::new(node_fs.clone(), npm_resolver.clone())); let node_code_translator = Arc::new(NodeCodeTranslator::new( cjs_esm_analyzer, + node_fs.clone(), + node_resolver.clone(), npm_resolver.clone(), )); - let node_resolver = Arc::new(NodeResolver::new(npm_resolver.clone())); let type_checker = Arc::new(TypeChecker::new( dir.clone(), caches.clone(), @@ -365,6 +372,7 @@ impl ProcState { maybe_file_watcher_reporter, module_graph_builder, node_code_translator, + node_fs, node_resolver, npm_api, npm_cache, diff --git a/cli/standalone/mod.rs b/cli/standalone/mod.rs index a2872e9b92..669ad1d813 100644 --- a/cli/standalone/mod.rs +++ b/cli/standalone/mod.rs @@ -190,6 +190,7 @@ fn create_web_worker_callback( root_cert_store: Some(ps.root_cert_store.clone()), seed: ps.options.seed(), module_loader, + node_fs: Some(ps.node_fs.clone()), npm_resolver: None, // not currently supported create_web_worker_cb, preload_module_cb: web_worker_cb.clone(), @@ -285,6 +286,7 @@ pub async fn run( should_break_on_first_statement: false, should_wait_for_inspector_session: false, module_loader, + node_fs: Some(ps.node_fs.clone()), npm_resolver: None, // not currently supported get_error_class_fn: Some(&get_error_class_name), cache_storage_dir: None, diff --git a/cli/tools/check.rs b/cli/tools/check.rs index 36bc25d6a8..4fb6800fa0 100644 --- a/cli/tools/check.rs +++ b/cli/tools/check.rs @@ -9,6 +9,7 @@ use deno_core::error::AnyError; use deno_graph::Module; use deno_graph::ModuleGraph; use deno_runtime::colors; +use deno_runtime::deno_node::NodeResolver; use once_cell::sync::Lazy; use regex::Regex; @@ -21,7 +22,6 @@ use crate::cache::Caches; use crate::cache::DenoDir; use crate::cache::FastInsecureHasher; use crate::cache::TypeCheckCache; -use crate::node::CliNodeResolver; use crate::npm::CliNpmResolver; use crate::tsc; use crate::version; @@ -42,7 +42,7 @@ pub struct TypeChecker { deno_dir: DenoDir, caches: Arc, cli_options: Arc, - node_resolver: Arc, + node_resolver: Arc, npm_resolver: Arc, } @@ -51,7 +51,7 @@ impl TypeChecker { deno_dir: DenoDir, caches: Arc, cli_options: Arc, - node_resolver: Arc, + node_resolver: Arc, npm_resolver: Arc, ) -> Self { Self { diff --git a/cli/tools/task.rs b/cli/tools/task.rs index 898cdd8d90..5d34d39c75 100644 --- a/cli/tools/task.rs +++ b/cli/tools/task.rs @@ -4,7 +4,6 @@ use crate::args::CliOptions; use crate::args::Flags; use crate::args::TaskFlags; use crate::colors; -use crate::node::CliNodeResolver; use crate::npm::CliNpmResolver; use crate::proc_state::ProcState; use crate::util::fs::canonicalize_path; @@ -13,7 +12,7 @@ use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::futures; use deno_core::futures::future::LocalBoxFuture; -use deno_runtime::deno_node::RealFs; +use deno_runtime::deno_node::NodeResolver; use deno_semver::npm::NpmPackageNv; use deno_task_shell::ExecuteResult; use deno_task_shell::ShellCommand; @@ -236,13 +235,12 @@ impl ShellCommand for NpmPackageBinCommand { fn resolve_npm_commands( npm_resolver: &CliNpmResolver, - node_resolver: &CliNodeResolver, + node_resolver: &NodeResolver, ) -> Result>, AnyError> { let mut result = HashMap::new(); let snapshot = npm_resolver.snapshot(); for id in snapshot.top_level_packages() { - let bin_commands = - node_resolver.resolve_binary_commands::(&id.nv)?; + let bin_commands = node_resolver.resolve_binary_commands(&id.nv)?; for bin_command in bin_commands { result.insert( bin_command.to_string(), diff --git a/cli/tsc/mod.rs b/cli/tsc/mod.rs index 0d956b661f..aa589a1ca9 100644 --- a/cli/tsc/mod.rs +++ b/cli/tsc/mod.rs @@ -4,7 +4,6 @@ use crate::args::TsConfig; use crate::args::TypeCheckMode; use crate::cache::FastInsecureHasher; use crate::node; -use crate::node::CliNodeResolver; use crate::util::checksum; use crate::util::path::mapped_specifier_for_tsc; @@ -35,7 +34,7 @@ use deno_graph::ResolutionResolved; use deno_runtime::deno_node; use deno_runtime::deno_node::NodeResolution; use deno_runtime::deno_node::NodeResolutionMode; -use deno_runtime::deno_node::RealFs; +use deno_runtime::deno_node::NodeResolver; use deno_runtime::permissions::PermissionsContainer; use deno_semver::npm::NpmPackageReqReference; use lsp_types::Url; @@ -307,7 +306,7 @@ pub struct Request { pub debug: bool, pub graph: Arc, pub hash_data: u64, - pub maybe_node_resolver: Option>, + pub maybe_node_resolver: Option>, pub maybe_tsbuildinfo: Option, /// A vector of strings that represent the root/entry point modules for the /// program. @@ -331,7 +330,7 @@ struct State { graph: Arc, maybe_tsbuildinfo: Option, maybe_response: Option, - maybe_node_resolver: Option>, + maybe_node_resolver: Option>, remapped_specifiers: HashMap, root_map: HashMap, current_dir: PathBuf, @@ -341,7 +340,7 @@ impl State { pub fn new( graph: Arc, hash_data: u64, - maybe_node_resolver: Option>, + maybe_node_resolver: Option>, maybe_tsbuildinfo: Option, root_map: HashMap, remapped_specifiers: HashMap, @@ -637,7 +636,7 @@ fn resolve_graph_specifier_types( } Some(Module::Npm(module)) => { if let Some(node_resolver) = &state.maybe_node_resolver { - let maybe_resolution = node_resolver.resolve_npm_reference::( + let maybe_resolution = node_resolver.resolve_npm_reference( &module.nv_reference, NodeResolutionMode::Types, &mut PermissionsContainer::allow_all(), @@ -655,9 +654,7 @@ fn resolve_graph_specifier_types( let specifier = node::resolve_specifier_into_node_modules(&module.specifier); NodeResolution::into_specifier_and_media_type( - node_resolver - .url_to_node_resolution::(specifier) - .ok(), + node_resolver.url_to_node_resolution(specifier).ok(), ) })) } @@ -678,7 +675,7 @@ fn resolve_non_graph_specifier_types( // we're in an npm package, so use node resolution Ok(Some(NodeResolution::into_specifier_and_media_type( node_resolver - .resolve::( + .resolve( specifier, referrer, NodeResolutionMode::Types, @@ -692,7 +689,7 @@ fn resolve_non_graph_specifier_types( // we don't need this special code here. // This could occur when resolving npm:@types/node when it is // injected and not part of the graph - let maybe_resolution = node_resolver.resolve_npm_req_reference::( + let maybe_resolution = node_resolver.resolve_npm_req_reference( &npm_ref, NodeResolutionMode::Types, &mut PermissionsContainer::allow_all(), diff --git a/cli/worker.rs b/cli/worker.rs index c73e4edbed..e565789ede 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -1,7 +1,6 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. use std::path::PathBuf; -use std::rc::Rc; use std::sync::Arc; use deno_ast::ModuleSpecifier; @@ -14,7 +13,6 @@ use deno_core::ModuleId; use deno_runtime::colors; use deno_runtime::deno_node; use deno_runtime::deno_node::NodeResolution; -use deno_runtime::deno_node::RealFs; use deno_runtime::fmt_errors::format_js_error; use deno_runtime::ops::worker_host::CreateWebWorkerCb; use deno_runtime::ops::worker_host::WorkerEventCb; @@ -259,15 +257,13 @@ pub async fn create_custom_worker( ps.npm_resolver .add_package_reqs(vec![package_ref.req.clone()]) .await?; - let node_resolution = ps - .node_resolver - .resolve_binary_export::(&package_ref)?; + let node_resolution = + ps.node_resolver.resolve_binary_export(&package_ref)?; let is_main_cjs = matches!(node_resolution, NodeResolution::CommonJs(_)); (node_resolution.into_url(), is_main_cjs) } else if ps.options.is_npm_main() { - let node_resolution = ps - .node_resolver - .url_to_node_resolution::(main_module)?; + let node_resolution = + ps.node_resolver.url_to_node_resolution(main_module)?; let is_main_cjs = matches!(node_resolution, NodeResolution::CommonJs(_)); (node_resolution.into_url(), is_main_cjs) } else { @@ -345,7 +341,8 @@ pub async fn create_custom_worker( should_break_on_first_statement: ps.options.inspect_brk().is_some(), should_wait_for_inspector_session: ps.options.inspect_wait().is_some(), module_loader, - npm_resolver: Some(Rc::new(ps.npm_resolver.clone())), + node_fs: Some(ps.node_fs.clone()), + npm_resolver: Some(ps.npm_resolver.clone()), get_error_class_fn: Some(&errors::get_error_class_name), cache_storage_dir, origin_storage_dir, @@ -468,7 +465,8 @@ fn create_web_worker_callback( format_js_error_fn: Some(Arc::new(format_js_error)), source_map_getter: Some(Box::new(module_loader.clone())), module_loader, - npm_resolver: Some(Rc::new(ps.npm_resolver.clone())), + node_fs: Some(ps.node_fs.clone()), + npm_resolver: Some(ps.npm_resolver.clone()), worker_type: args.worker_type, maybe_inspector_server, get_error_class_fn: Some(&errors::get_error_class_name), @@ -492,6 +490,8 @@ fn create_web_worker_callback( #[cfg(test)] mod tests { + use std::rc::Rc; + use super::*; use deno_core::resolve_path; use deno_core::FsModuleLoader; @@ -520,6 +520,7 @@ mod tests { should_break_on_first_statement: false, should_wait_for_inspector_session: false, module_loader: Rc::new(FsModuleLoader), + node_fs: Some(Arc::new(deno_node::RealFs)), npm_resolver: None, get_error_class_fn: None, cache_storage_dir: None, diff --git a/ext/node/analyze.rs b/ext/node/analyze.rs index a206f4425a..f1af2f6110 100644 --- a/ext/node/analyze.rs +++ b/ext/node/analyze.rs @@ -5,6 +5,7 @@ use std::collections::VecDeque; use std::fmt::Write; use std::path::Path; use std::path::PathBuf; +use std::sync::Arc; use deno_core::anyhow::Context; use deno_core::ModuleSpecifier; @@ -12,11 +13,11 @@ use once_cell::sync::Lazy; use deno_core::error::AnyError; -use crate::package_exports_resolve; use crate::NodeFs; use crate::NodeModuleKind; use crate::NodePermissions; use crate::NodeResolutionMode; +use crate::NodeResolver; use crate::NpmResolver; use crate::PackageJson; use crate::PathClean; @@ -64,23 +65,26 @@ pub trait CjsEsmCodeAnalyzer { ) -> Result, AnyError>; } -pub struct NodeCodeTranslator< - TCjsEsmCodeAnalyzer: CjsEsmCodeAnalyzer, - TNpmResolver: NpmResolver, -> { +pub struct NodeCodeTranslator { cjs_esm_code_analyzer: TCjsEsmCodeAnalyzer, - npm_resolver: TNpmResolver, + fs: Arc, + node_resolver: Arc, + npm_resolver: Arc, } -impl - NodeCodeTranslator +impl + NodeCodeTranslator { pub fn new( cjs_esm_code_analyzer: TCjsEsmCodeAnalyzer, - npm_resolver: TNpmResolver, + fs: Arc, + node_resolver: Arc, + npm_resolver: Arc, ) -> Self { Self { cjs_esm_code_analyzer, + fs, + node_resolver, npm_resolver, } } @@ -105,7 +109,7 @@ impl /// For all discovered reexports the analysis will be performed recursively. /// /// If successful a source code for equivalent ES module is returned. - pub fn translate_cjs_to_esm( + pub fn translate_cjs_to_esm( &self, specifier: &ModuleSpecifier, source: &str, @@ -142,7 +146,7 @@ impl handled_reexports.insert(reexport.to_string()); // First, resolve relate reexport specifier - let resolved_reexport = self.resolve::( + let resolved_reexport = self.resolve( &reexport, &referrer, // FIXME(bartlomieju): check if these conditions are okay, probably @@ -154,7 +158,9 @@ impl // Second, read the source code from disk let reexport_specifier = ModuleSpecifier::from_file_path(&resolved_reexport).unwrap(); - let reexport_file_text = Fs::read_to_string(&resolved_reexport) + let reexport_file_text = self + .fs + .read_to_string(&resolved_reexport) .with_context(|| { format!( "Could not find '{}' ({}) referenced from {}", @@ -208,7 +214,7 @@ impl Ok(translated_source) } - fn resolve( + fn resolve( &self, specifier: &str, referrer: &ModuleSpecifier, @@ -223,10 +229,8 @@ impl let referrer_path = referrer.to_file_path().unwrap(); if specifier.starts_with("./") || specifier.starts_with("../") { if let Some(parent) = referrer_path.parent() { - return file_extension_probe::( - parent.join(specifier), - &referrer_path, - ); + return self + .file_extension_probe(parent.join(specifier), &referrer_path); } else { todo!(); } @@ -245,15 +249,16 @@ impl )?; let package_json_path = module_dir.join("package.json"); - if Fs::exists(&package_json_path) { - let package_json = PackageJson::load::( - &self.npm_resolver, + if self.fs.exists(&package_json_path) { + let package_json = PackageJson::load( + &*self.fs, + &*self.npm_resolver, permissions, package_json_path.clone(), )?; if let Some(exports) = &package_json.exports { - return package_exports_resolve::( + return self.node_resolver.package_exports_resolve( &package_json_path, package_subpath, exports, @@ -261,7 +266,6 @@ impl NodeModuleKind::Esm, conditions, mode, - &self.npm_resolver, permissions, ); } @@ -269,12 +273,13 @@ impl // old school if package_subpath != "." { let d = module_dir.join(package_subpath); - if Fs::is_dir(&d) { + if self.fs.is_dir(&d) { // subdir might have a package.json that specifies the entrypoint let package_json_path = d.join("package.json"); - if Fs::exists(&package_json_path) { - let package_json = PackageJson::load::( - &self.npm_resolver, + if self.fs.exists(&package_json_path) { + let package_json = PackageJson::load( + &*self.fs, + &*self.npm_resolver, permissions, package_json_path, )?; @@ -285,7 +290,7 @@ impl return Ok(d.join("index.js").clean()); } - return file_extension_probe::(d, &referrer_path); + return self.file_extension_probe(d, &referrer_path); } else if let Some(main) = package_json.main(NodeModuleKind::Cjs) { return Ok(module_dir.join(main).clean()); } else { @@ -294,6 +299,33 @@ impl } Err(not_found(specifier, &referrer_path)) } + + fn file_extension_probe( + &self, + p: PathBuf, + referrer: &Path, + ) -> Result { + let p = p.clean(); + if self.fs.exists(&p) { + let file_name = p.file_name().unwrap(); + let p_js = + p.with_file_name(format!("{}.js", file_name.to_str().unwrap())); + if self.fs.is_file(&p_js) { + return Ok(p_js); + } else if self.fs.is_dir(&p) { + return Ok(p.join("index.js")); + } else { + return Ok(p); + } + } else if let Some(file_name) = p.file_name() { + let p_js = + p.with_file_name(format!("{}.js", file_name.to_str().unwrap())); + if self.fs.is_file(&p_js) { + return Ok(p_js); + } + } + Err(not_found(&p.to_string_lossy(), referrer)) + } } fn esm_code_from_top_level_decls( @@ -455,30 +487,6 @@ fn parse_specifier(specifier: &str) -> Option<(String, String)> { Some((package_name, package_subpath)) } -fn file_extension_probe( - p: PathBuf, - referrer: &Path, -) -> Result { - let p = p.clean(); - if Fs::exists(&p) { - let file_name = p.file_name().unwrap(); - let p_js = p.with_file_name(format!("{}.js", file_name.to_str().unwrap())); - if Fs::is_file(&p_js) { - return Ok(p_js); - } else if Fs::is_dir(&p) { - return Ok(p.join("index.js")); - } else { - return Ok(p); - } - } else if let Some(file_name) = p.file_name() { - let p_js = p.with_file_name(format!("{}.js", file_name.to_str().unwrap())); - if Fs::is_file(&p_js) { - return Ok(p_js); - } - } - Err(not_found(&p.to_string_lossy(), referrer)) -} - fn not_found(path: &str, referrer: &Path) -> AnyError { let msg = format!( "[ERR_MODULE_NOT_FOUND] Cannot find module \"{}\" imported from \"{}\"", diff --git a/ext/node/lib.rs b/ext/node/lib.rs index 2b2ced89ce..e63c73537f 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -27,7 +27,6 @@ mod package_json; mod path; mod polyfill; mod resolution; -mod resolver; pub use package_json::PackageJson; pub use path::PathClean; @@ -35,22 +34,13 @@ pub use polyfill::is_builtin_node_module; pub use polyfill::resolve_builtin_node_module; pub use polyfill::NodeModulePolyfill; pub use polyfill::SUPPORTED_BUILTIN_NODE_MODULES; -pub use resolution::get_closest_package_json; -pub use resolution::get_package_scope_config; -pub use resolution::legacy_main_resolve; -pub use resolution::package_exports_resolve; -pub use resolution::package_imports_resolve; -pub use resolution::package_resolve; -pub use resolution::path_to_declaration_path; pub use resolution::NodeModuleKind; +pub use resolution::NodeResolution; pub use resolution::NodeResolutionMode; -pub use resolution::DEFAULT_CONDITIONS; -pub use resolver::NodeResolution; -pub use resolver::NodeResolver; +pub use resolution::NodeResolver; pub trait NodeEnv { type P: NodePermissions; - type Fs: NodeFs; } pub trait NodePermissions { @@ -71,24 +61,26 @@ pub struct NodeFsMetadata { pub is_dir: bool, } -pub trait NodeFs { - fn current_dir() -> io::Result; - fn metadata>(path: P) -> io::Result; - fn is_file>(path: P) -> bool; - fn is_dir>(path: P) -> bool; - fn exists>(path: P) -> bool; - fn read_to_string>(path: P) -> io::Result; - fn canonicalize>(path: P) -> io::Result; +pub trait NodeFs: std::fmt::Debug + Send + Sync { + fn current_dir(&self) -> io::Result; + fn metadata(&self, path: &Path) -> io::Result; + fn is_file(&self, path: &Path) -> bool; + fn is_dir(&self, path: &Path) -> bool; + fn exists(&self, path: &Path) -> bool; + fn read_to_string(&self, path: &Path) -> io::Result; + fn canonicalize(&self, path: &Path) -> io::Result; } +#[derive(Debug)] pub struct RealFs; + impl NodeFs for RealFs { - fn current_dir() -> io::Result { + fn current_dir(&self) -> io::Result { #[allow(clippy::disallowed_methods)] std::env::current_dir() } - fn metadata>(path: P) -> io::Result { + fn metadata(&self, path: &Path) -> io::Result { #[allow(clippy::disallowed_methods)] std::fs::metadata(path).map(|metadata| { // on most systems, calling is_file() and is_dir() is cheap @@ -100,35 +92,35 @@ impl NodeFs for RealFs { }) } - fn exists>(path: P) -> bool { + fn exists(&self, path: &Path) -> bool { #[allow(clippy::disallowed_methods)] std::fs::metadata(path).is_ok() } - fn is_file>(path: P) -> bool { + fn is_file(&self, path: &Path) -> bool { #[allow(clippy::disallowed_methods)] std::fs::metadata(path) .map(|m| m.is_file()) .unwrap_or(false) } - fn is_dir>(path: P) -> bool { + fn is_dir(&self, path: &Path) -> bool { #[allow(clippy::disallowed_methods)] std::fs::metadata(path).map(|m| m.is_dir()).unwrap_or(false) } - fn read_to_string>(path: P) -> io::Result { + fn read_to_string(&self, path: &Path) -> io::Result { #[allow(clippy::disallowed_methods)] std::fs::read_to_string(path) } - fn canonicalize>(path: P) -> io::Result { + fn canonicalize(&self, path: &Path) -> io::Result { #[allow(clippy::disallowed_methods)] - std::path::Path::canonicalize(path.as_ref()) + std::path::Path::canonicalize(path) } } -pub trait NpmResolver { +pub trait NpmResolver: std::fmt::Debug + Send + Sync { /// Resolves an npm package folder path from an npm package referrer. fn resolve_package_folder_from_package( &self, @@ -177,57 +169,6 @@ pub trait NpmResolver { ) -> Result<(), AnyError>; } -impl NpmResolver for Arc { - fn resolve_package_folder_from_package( - &self, - specifier: &str, - referrer: &ModuleSpecifier, - mode: NodeResolutionMode, - ) -> Result { - (**self).resolve_package_folder_from_package(specifier, referrer, mode) - } - - fn resolve_package_folder_from_path( - &self, - path: &Path, - ) -> Result { - (**self).resolve_package_folder_from_path(path) - } - - fn resolve_package_folder_from_deno_module( - &self, - pkg_nv: &NpmPackageNv, - ) -> Result { - (**self).resolve_package_folder_from_deno_module(pkg_nv) - } - - fn resolve_pkg_id_from_pkg_req( - &self, - req: &NpmPackageReq, - ) -> Result { - (**self).resolve_pkg_id_from_pkg_req(req) - } - - fn resolve_nv_ref_from_pkg_req_ref( - &self, - req_ref: &NpmPackageReqReference, - ) -> Result { - (**self).resolve_nv_ref_from_pkg_req_ref(req_ref) - } - - fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool { - (**self).in_npm_package(specifier) - } - - fn ensure_read_permission( - &self, - permissions: &mut dyn NodePermissions, - path: &Path, - ) -> Result<(), AnyError> { - (**self).ensure_read_permission(permissions, path) - } -} - pub static NODE_GLOBAL_THIS_NAME: Lazy = Lazy::new(|| { let now = std::time::SystemTime::now(); let seconds = now @@ -582,11 +523,18 @@ deno_core::extension!(deno_node, "zlib.ts", ], options = { - maybe_npm_resolver: Option>, + maybe_npm_resolver: Option>, + fs: Option>, }, state = |state, options| { + let fs = options.fs.unwrap_or_else(|| Arc::new(RealFs)); + state.put(fs.clone()); if let Some(npm_resolver) = options.maybe_npm_resolver { - state.put(npm_resolver); + state.put(npm_resolver.clone()); + state.put(Rc::new(NodeResolver::new( + fs, + npm_resolver, + ))) } }, ); diff --git a/ext/node/ops/require.rs b/ext/node/ops/require.rs index 82a0433400..513b3f5899 100644 --- a/ext/node/ops/require.rs +++ b/ext/node/ops/require.rs @@ -13,6 +13,7 @@ use std::cell::RefCell; use std::path::Path; use std::path::PathBuf; use std::rc::Rc; +use std::sync::Arc; use crate::resolution; use crate::NodeEnv; @@ -20,6 +21,7 @@ use crate::NodeFs; use crate::NodeModuleKind; use crate::NodePermissions; use crate::NodeResolutionMode; +use crate::NodeResolver; use crate::NpmResolver; use crate::PackageJson; @@ -31,7 +33,7 @@ where P: NodePermissions + 'static, { let resolver = { - let resolver = state.borrow::>(); + let resolver = state.borrow::>(); resolver.clone() }; let permissions = state.borrow_mut::

(); @@ -96,10 +98,11 @@ pub fn op_require_node_module_paths( where Env: NodeEnv + 'static, { + let fs = state.borrow::>().clone(); // Guarantee that "from" is absolute. let from = deno_core::resolve_path( &from, - &(Env::Fs::current_dir()).context("Unable to get CWD")?, + &(fs.current_dir()).context("Unable to get CWD")?, ) .unwrap() .to_file_path() @@ -191,7 +194,7 @@ fn op_require_resolve_deno_dir( request: String, parent_filename: String, ) -> Option { - let resolver = state.borrow::>(); + let resolver = state.borrow::>(); resolver .resolve_package_folder_from_package( &request, @@ -204,7 +207,7 @@ fn op_require_resolve_deno_dir( #[op] fn op_require_is_deno_dir_package(state: &mut OpState, path: String) -> bool { - let resolver = state.borrow::>(); + let resolver = state.borrow::>(); resolver.in_npm_package_at_path(&PathBuf::from(path)) } @@ -264,7 +267,8 @@ where { let path = PathBuf::from(path); ensure_read_permission::(state, &path)?; - if let Ok(metadata) = Env::Fs::metadata(&path) { + let fs = state.borrow::>().clone(); + if let Ok(metadata) = fs.metadata(&path) { if metadata.is_file { return Ok(0); } else { @@ -285,7 +289,8 @@ where { let path = PathBuf::from(request); ensure_read_permission::(state, &path)?; - let mut canonicalized_path = Env::Fs::canonicalize(&path)?; + let fs = state.borrow::>().clone(); + let mut canonicalized_path = fs.canonicalize(&path)?; if cfg!(windows) { canonicalized_path = PathBuf::from( canonicalized_path @@ -353,7 +358,8 @@ where if let Some(parent_id) = maybe_parent_id { if parent_id == "" || parent_id == "internal/preload" { - if let Ok(cwd) = Env::Fs::current_dir() { + let fs = state.borrow::>().clone(); + if let Ok(cwd) = fs.current_dir() { ensure_read_permission::(state, &cwd)?; return Ok(Some(cwd.to_string_lossy().to_string())); } @@ -375,14 +381,14 @@ where return Ok(None); } - let resolver = state.borrow::>().clone(); + let node_resolver = state.borrow::>().clone(); let permissions = state.borrow_mut::(); - let pkg = resolution::get_package_scope_config::( - &Url::from_file_path(parent_path.unwrap()).unwrap(), - &*resolver, - permissions, - ) - .ok(); + let pkg = node_resolver + .get_package_scope_config( + &Url::from_file_path(parent_path.unwrap()).unwrap(), + permissions, + ) + .ok(); if pkg.is_none() { return Ok(None); } @@ -408,18 +414,18 @@ where let referrer = deno_core::url::Url::from_file_path(&pkg.path).unwrap(); if let Some(exports) = &pkg.exports { - resolution::package_exports_resolve::( - &pkg.path, - expansion, - exports, - &referrer, - NodeModuleKind::Cjs, - resolution::REQUIRE_CONDITIONS, - NodeResolutionMode::Execution, - &*resolver, - permissions, - ) - .map(|r| Some(r.to_string_lossy().to_string())) + node_resolver + .package_exports_resolve( + &pkg.path, + expansion, + exports, + &referrer, + NodeModuleKind::Cjs, + resolution::REQUIRE_CONDITIONS, + NodeResolutionMode::Execution, + permissions, + ) + .map(|r| Some(r.to_string_lossy().to_string())) } else { Ok(None) } @@ -435,7 +441,8 @@ where { let file_path = PathBuf::from(file_path); ensure_read_permission::(state, &file_path)?; - Ok(Env::Fs::read_to_string(file_path)?) + let fs = state.borrow::>().clone(); + Ok(fs.read_to_string(&file_path)?) } #[op] @@ -462,10 +469,12 @@ fn op_require_resolve_exports( where Env: NodeEnv + 'static, { - let resolver = state.borrow::>().clone(); + let fs = state.borrow::>().clone(); + let npm_resolver = state.borrow::>().clone(); + let node_resolver = state.borrow::>().clone(); let permissions = state.borrow_mut::(); - let pkg_path = if resolver + let pkg_path = if npm_resolver .in_npm_package_at_path(&PathBuf::from(&modules_path)) && !uses_local_node_modules_dir { @@ -473,32 +482,31 @@ where } else { let orignal = modules_path.clone(); let mod_dir = path_resolve(vec![modules_path, name]); - if Env::Fs::is_dir(&mod_dir) { + if fs.is_dir(Path::new(&mod_dir)) { mod_dir } else { orignal } }; - let pkg = PackageJson::load::( - &*resolver, + let pkg = node_resolver.load_package_json( permissions, PathBuf::from(&pkg_path).join("package.json"), )?; if let Some(exports) = &pkg.exports { let referrer = Url::from_file_path(parent_path).unwrap(); - resolution::package_exports_resolve::( - &pkg.path, - format!(".{expansion}"), - exports, - &referrer, - NodeModuleKind::Cjs, - resolution::REQUIRE_CONDITIONS, - NodeResolutionMode::Execution, - &*resolver, - permissions, - ) - .map(|r| Some(r.to_string_lossy().to_string())) + node_resolver + .package_exports_resolve( + &pkg.path, + format!(".{expansion}"), + exports, + &referrer, + NodeModuleKind::Cjs, + resolution::REQUIRE_CONDITIONS, + NodeResolutionMode::Execution, + permissions, + ) + .map(|r| Some(r.to_string_lossy().to_string())) } else { Ok(None) } @@ -516,11 +524,10 @@ where state, PathBuf::from(&filename).parent().unwrap(), )?; - let resolver = state.borrow::>().clone(); + let node_resolver = state.borrow::>().clone(); let permissions = state.borrow_mut::(); - resolution::get_closest_package_json::( + node_resolver.get_closest_package_json( &Url::from_file_path(filename).unwrap(), - &*resolver, permissions, ) } @@ -533,10 +540,12 @@ fn op_require_read_package_scope( where Env: NodeEnv + 'static, { - let resolver = state.borrow::>().clone(); + let node_resolver = state.borrow::>().clone(); let permissions = state.borrow_mut::(); let package_json_path = PathBuf::from(package_json_path); - PackageJson::load::(&*resolver, permissions, package_json_path).ok() + node_resolver + .load_package_json(permissions, package_json_path) + .ok() } #[op] @@ -550,29 +559,24 @@ where { let parent_path = PathBuf::from(&parent_filename); ensure_read_permission::(state, &parent_path)?; - let resolver = state.borrow::>().clone(); + let node_resolver = state.borrow::>().clone(); let permissions = state.borrow_mut::(); - let pkg = PackageJson::load::( - &*resolver, - permissions, - parent_path.join("package.json"), - )?; + let pkg = node_resolver + .load_package_json(permissions, parent_path.join("package.json"))?; if pkg.imports.is_some() { let referrer = deno_core::url::Url::from_file_path(&parent_filename).unwrap(); - let r = resolution::package_imports_resolve::( - &request, - &referrer, - NodeModuleKind::Cjs, - resolution::REQUIRE_CONDITIONS, - NodeResolutionMode::Execution, - &*resolver, - permissions, - ) - .map(|r| Some(Url::from_file_path(r).unwrap().to_string())); - state.put(resolver); - r + node_resolver + .package_imports_resolve( + &request, + &referrer, + NodeModuleKind::Cjs, + resolution::REQUIRE_CONDITIONS, + NodeResolutionMode::Execution, + permissions, + ) + .map(|r| Some(Url::from_file_path(r).unwrap().to_string())) } else { Ok(None) } diff --git a/ext/node/package_json.rs b/ext/node/package_json.rs index 08f78681ae..0e34897e3a 100644 --- a/ext/node/package_json.rs +++ b/ext/node/package_json.rs @@ -62,16 +62,18 @@ impl PackageJson { } } - pub fn load( + pub fn load( + fs: &dyn NodeFs, resolver: &dyn NpmResolver, permissions: &mut dyn NodePermissions, path: PathBuf, ) -> Result { resolver.ensure_read_permission(permissions, &path)?; - Self::load_skip_read_permission::(path) + Self::load_skip_read_permission(fs, path) } - pub fn load_skip_read_permission( + pub fn load_skip_read_permission( + fs: &dyn NodeFs, path: PathBuf, ) -> Result { assert!(path.is_absolute()); @@ -80,7 +82,7 @@ impl PackageJson { return Ok(CACHE.with(|cache| cache.borrow()[&path].clone())); } - let source = match Fs::read_to_string(&path) { + let source = match fs.read_to_string(&path) { Ok(source) => source, Err(err) if err.kind() == ErrorKind::NotFound => { return Ok(PackageJson::empty(path)); diff --git a/ext/node/resolution.rs b/ext/node/resolution.rs index d324f4b4b9..e5db6b3ac7 100644 --- a/ext/node/resolution.rs +++ b/ext/node/resolution.rs @@ -2,21 +2,28 @@ use std::path::Path; use std::path::PathBuf; +use std::sync::Arc; use deno_core::anyhow::bail; +use deno_core::anyhow::Context; use deno_core::error::generic_error; use deno_core::error::AnyError; use deno_core::serde_json::Map; use deno_core::serde_json::Value; use deno_core::url::Url; use deno_core::ModuleSpecifier; +use deno_media_type::MediaType; +use deno_semver::npm::NpmPackageNv; +use deno_semver::npm::NpmPackageNvReference; +use deno_semver::npm::NpmPackageReqReference; use crate::errors; -use crate::package_json::PackageJson; -use crate::path::PathClean; +use crate::AllowAllNodePermissions; use crate::NodeFs; use crate::NodePermissions; use crate::NpmResolver; +use crate::PackageJson; +use crate::PathClean; pub static DEFAULT_CONDITIONS: &[&str] = &["deno", "node", "import"]; pub static REQUIRE_CONDITIONS: &[&str] = &["require", "node"]; @@ -39,53 +46,1260 @@ impl NodeResolutionMode { } } -/// Checks if the resolved file has a corresponding declaration file. -pub fn path_to_declaration_path( - path: PathBuf, - referrer_kind: NodeModuleKind, -) -> Option { - fn probe_extensions( - path: &Path, - referrer_kind: NodeModuleKind, - ) -> Option { - let specific_dts_path = match referrer_kind { - NodeModuleKind::Cjs => with_known_extension(path, "d.cts"), - NodeModuleKind::Esm => with_known_extension(path, "d.mts"), - }; - if Fs::exists(&specific_dts_path) { - return Some(specific_dts_path); - } - let dts_path = with_known_extension(path, "d.ts"); - if Fs::exists(&dts_path) { - Some(dts_path) - } else { - None +#[derive(Debug)] +pub enum NodeResolution { + Esm(ModuleSpecifier), + CommonJs(ModuleSpecifier), + BuiltIn(String), +} + +impl NodeResolution { + pub fn into_url(self) -> ModuleSpecifier { + match self { + Self::Esm(u) => u, + Self::CommonJs(u) => u, + Self::BuiltIn(specifier) => { + if specifier.starts_with("node:") { + ModuleSpecifier::parse(&specifier).unwrap() + } else { + ModuleSpecifier::parse(&format!("node:{specifier}")).unwrap() + } + } } } - let lowercase_path = path.to_string_lossy().to_lowercase(); - if lowercase_path.ends_with(".d.ts") - || lowercase_path.ends_with(".d.cts") - || lowercase_path.ends_with(".d.ts") - { - return Some(path); + pub fn into_specifier_and_media_type( + resolution: Option, + ) -> (ModuleSpecifier, MediaType) { + match resolution { + Some(NodeResolution::CommonJs(specifier)) => { + let media_type = MediaType::from_specifier(&specifier); + ( + specifier, + match media_type { + MediaType::JavaScript | MediaType::Jsx => MediaType::Cjs, + MediaType::TypeScript | MediaType::Tsx => MediaType::Cts, + MediaType::Dts => MediaType::Dcts, + _ => media_type, + }, + ) + } + Some(NodeResolution::Esm(specifier)) => { + let media_type = MediaType::from_specifier(&specifier); + ( + specifier, + match media_type { + MediaType::JavaScript | MediaType::Jsx => MediaType::Mjs, + MediaType::TypeScript | MediaType::Tsx => MediaType::Mts, + MediaType::Dts => MediaType::Dmts, + _ => media_type, + }, + ) + } + Some(resolution) => (resolution.into_url(), MediaType::Dts), + None => ( + ModuleSpecifier::parse("internal:///missing_dependency.d.ts").unwrap(), + MediaType::Dts, + ), + } } - if let Some(path) = probe_extensions::(&path, referrer_kind) { - return Some(path); +} + +#[derive(Debug)] +pub struct NodeResolver { + fs: Arc, + npm_resolver: Arc, +} + +impl NodeResolver { + pub fn new(fs: Arc, npm_resolver: Arc) -> Self { + Self { fs, npm_resolver } } - if Fs::is_dir(&path) { - if let Some(path) = - probe_extensions::(&path.join("index"), referrer_kind) + + pub fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool { + self.npm_resolver.in_npm_package(specifier) + } + + /// This function is an implementation of `defaultResolve` in + /// `lib/internal/modules/esm/resolve.js` from Node. + pub fn resolve( + &self, + specifier: &str, + referrer: &ModuleSpecifier, + mode: NodeResolutionMode, + permissions: &mut dyn NodePermissions, + ) -> Result, AnyError> { + // Note: if we are here, then the referrer is an esm module + // TODO(bartlomieju): skipped "policy" part as we don't plan to support it + + if crate::is_builtin_node_module(specifier) { + return Ok(Some(NodeResolution::BuiltIn(specifier.to_string()))); + } + + if let Ok(url) = Url::parse(specifier) { + if url.scheme() == "data" { + return Ok(Some(NodeResolution::Esm(url))); + } + + let protocol = url.scheme(); + + if protocol == "node" { + let split_specifier = url.as_str().split(':'); + let specifier = split_specifier.skip(1).collect::(); + + if crate::is_builtin_node_module(&specifier) { + return Ok(Some(NodeResolution::BuiltIn(specifier))); + } + } + + if protocol != "file" && protocol != "data" { + return Err(errors::err_unsupported_esm_url_scheme(&url)); + } + + // todo(dsherret): this seems wrong + if referrer.scheme() == "data" { + let url = referrer.join(specifier).map_err(AnyError::from)?; + return Ok(Some(NodeResolution::Esm(url))); + } + } + + let url = self.module_resolve( + specifier, + referrer, + DEFAULT_CONDITIONS, + mode, + permissions, + )?; + let url = match url { + Some(url) => url, + None => return Ok(None), + }; + let url = match mode { + NodeResolutionMode::Execution => url, + NodeResolutionMode::Types => { + let path = url.to_file_path().unwrap(); + // todo(16370): the module kind is not correct here. I think we need + // typescript to tell us if the referrer is esm or cjs + let path = + match self.path_to_declaration_path(path, NodeModuleKind::Esm) { + Some(path) => path, + None => return Ok(None), + }; + ModuleSpecifier::from_file_path(path).unwrap() + } + }; + + let resolve_response = self.url_to_node_resolution(url)?; + // TODO(bartlomieju): skipped checking errors for commonJS resolution and + // "preserveSymlinksMain"/"preserveSymlinks" options. + Ok(Some(resolve_response)) + } + + fn module_resolve( + &self, + specifier: &str, + referrer: &ModuleSpecifier, + conditions: &[&str], + mode: NodeResolutionMode, + permissions: &mut dyn NodePermissions, + ) -> Result, AnyError> { + // note: if we're here, the referrer is an esm module + let url = if should_be_treated_as_relative_or_absolute_path(specifier) { + let resolved_specifier = referrer.join(specifier)?; + if mode.is_types() { + let file_path = to_file_path(&resolved_specifier); + // todo(dsherret): the node module kind is not correct and we + // should use the value provided by typescript instead + let declaration_path = + self.path_to_declaration_path(file_path, NodeModuleKind::Esm); + declaration_path.map(|declaration_path| { + ModuleSpecifier::from_file_path(declaration_path).unwrap() + }) + } else { + Some(resolved_specifier) + } + } else if specifier.starts_with('#') { + Some( + self + .package_imports_resolve( + specifier, + referrer, + NodeModuleKind::Esm, + conditions, + mode, + permissions, + ) + .map(|p| ModuleSpecifier::from_file_path(p).unwrap())?, + ) + } else if let Ok(resolved) = Url::parse(specifier) { + Some(resolved) + } else { + self + .package_resolve( + specifier, + referrer, + NodeModuleKind::Esm, + conditions, + mode, + permissions, + )? + .map(|p| ModuleSpecifier::from_file_path(p).unwrap()) + }; + Ok(match url { + Some(url) => Some(self.finalize_resolution(url, referrer)?), + None => None, + }) + } + + fn finalize_resolution( + &self, + resolved: ModuleSpecifier, + base: &ModuleSpecifier, + ) -> Result { + let encoded_sep_re = lazy_regex::regex!(r"%2F|%2C"); + + if encoded_sep_re.is_match(resolved.path()) { + return Err(errors::err_invalid_module_specifier( + resolved.path(), + "must not include encoded \"/\" or \"\\\\\" characters", + Some(to_file_path_string(base)), + )); + } + + let path = to_file_path(&resolved); + + // TODO(bartlomieju): currently not supported + // if (getOptionValue('--experimental-specifier-resolution') === 'node') { + // ... + // } + + let p_str = path.to_str().unwrap(); + let p = if p_str.ends_with('/') { + p_str[p_str.len() - 1..].to_string() + } else { + p_str.to_string() + }; + + let (is_dir, is_file) = if let Ok(stats) = self.fs.metadata(Path::new(&p)) { + (stats.is_dir, stats.is_file) + } else { + (false, false) + }; + if is_dir { + return Err(errors::err_unsupported_dir_import( + resolved.as_str(), + base.as_str(), + )); + } else if !is_file { + return Err(errors::err_module_not_found( + resolved.as_str(), + base.as_str(), + "module", + )); + } + + Ok(resolved) + } + + pub fn resolve_npm_req_reference( + &self, + reference: &NpmPackageReqReference, + mode: NodeResolutionMode, + permissions: &mut dyn NodePermissions, + ) -> Result, AnyError> { + let reference = self + .npm_resolver + .resolve_nv_ref_from_pkg_req_ref(reference)?; + self.resolve_npm_reference(&reference, mode, permissions) + } + + pub fn resolve_npm_reference( + &self, + reference: &NpmPackageNvReference, + mode: NodeResolutionMode, + permissions: &mut dyn NodePermissions, + ) -> Result, AnyError> { + let package_folder = self + .npm_resolver + .resolve_package_folder_from_deno_module(&reference.nv)?; + let node_module_kind = NodeModuleKind::Esm; + let maybe_resolved_path = self + .package_config_resolve( + &reference + .sub_path + .as_ref() + .map(|s| format!("./{s}")) + .unwrap_or_else(|| ".".to_string()), + &package_folder, + node_module_kind, + DEFAULT_CONDITIONS, + mode, + permissions, + ) + .with_context(|| { + format!("Error resolving package config for '{reference}'") + })?; + let resolved_path = match maybe_resolved_path { + Some(resolved_path) => resolved_path, + None => return Ok(None), + }; + let resolved_path = match mode { + NodeResolutionMode::Execution => resolved_path, + NodeResolutionMode::Types => { + match self.path_to_declaration_path(resolved_path, node_module_kind) { + Some(path) => path, + None => return Ok(None), + } + } + }; + let url = ModuleSpecifier::from_file_path(resolved_path).unwrap(); + let resolve_response = self.url_to_node_resolution(url)?; + // TODO(bartlomieju): skipped checking errors for commonJS resolution and + // "preserveSymlinksMain"/"preserveSymlinks" options. + Ok(Some(resolve_response)) + } + + pub fn resolve_binary_commands( + &self, + pkg_nv: &NpmPackageNv, + ) -> Result, AnyError> { + let package_folder = self + .npm_resolver + .resolve_package_folder_from_deno_module(pkg_nv)?; + let package_json_path = package_folder.join("package.json"); + let package_json = self + .load_package_json(&mut AllowAllNodePermissions, package_json_path)?; + + Ok(match package_json.bin { + Some(Value::String(_)) => vec![pkg_nv.name.to_string()], + Some(Value::Object(o)) => { + o.into_iter().map(|(key, _)| key).collect::>() + } + _ => Vec::new(), + }) + } + + pub fn resolve_binary_export( + &self, + pkg_ref: &NpmPackageReqReference, + ) -> Result { + let pkg_nv = self + .npm_resolver + .resolve_pkg_id_from_pkg_req(&pkg_ref.req)? + .nv; + let bin_name = pkg_ref.sub_path.as_deref(); + let package_folder = self + .npm_resolver + .resolve_package_folder_from_deno_module(&pkg_nv)?; + let package_json_path = package_folder.join("package.json"); + let package_json = self + .load_package_json(&mut AllowAllNodePermissions, package_json_path)?; + let bin = match &package_json.bin { + Some(bin) => bin, + None => bail!( + "package '{}' did not have a bin property in its package.json", + &pkg_nv.name, + ), + }; + let bin_entry = resolve_bin_entry_value(&pkg_nv, bin_name, bin)?; + let url = + ModuleSpecifier::from_file_path(package_folder.join(bin_entry)).unwrap(); + + let resolve_response = self.url_to_node_resolution(url)?; + // TODO(bartlomieju): skipped checking errors for commonJS resolution and + // "preserveSymlinksMain"/"preserveSymlinks" options. + Ok(resolve_response) + } + + pub fn url_to_node_resolution( + &self, + url: ModuleSpecifier, + ) -> Result { + let url_str = url.as_str().to_lowercase(); + if url_str.starts_with("http") { + Ok(NodeResolution::Esm(url)) + } else if url_str.ends_with(".js") || url_str.ends_with(".d.ts") { + let package_config = + self.get_closest_package_json(&url, &mut AllowAllNodePermissions)?; + if package_config.typ == "module" { + Ok(NodeResolution::Esm(url)) + } else { + Ok(NodeResolution::CommonJs(url)) + } + } else if url_str.ends_with(".mjs") || url_str.ends_with(".d.mts") { + Ok(NodeResolution::Esm(url)) + } else if url_str.ends_with(".ts") { + Err(generic_error(format!( + "TypeScript files are not supported in npm packages: {url}" + ))) + } else { + Ok(NodeResolution::CommonJs(url)) + } + } + + fn package_config_resolve( + &self, + package_subpath: &str, + package_dir: &Path, + referrer_kind: NodeModuleKind, + conditions: &[&str], + mode: NodeResolutionMode, + permissions: &mut dyn NodePermissions, + ) -> Result, AnyError> { + let package_json_path = package_dir.join("package.json"); + let referrer = ModuleSpecifier::from_directory_path(package_dir).unwrap(); + let package_config = + self.load_package_json(permissions, package_json_path.clone())?; + if let Some(exports) = &package_config.exports { + let result = self.package_exports_resolve( + &package_json_path, + package_subpath.to_string(), + exports, + &referrer, + referrer_kind, + conditions, + mode, + permissions, + ); + match result { + Ok(found) => return Ok(Some(found)), + Err(exports_err) => { + if mode.is_types() && package_subpath == "." { + if let Ok(Some(path)) = + self.legacy_main_resolve(&package_config, referrer_kind, mode) + { + return Ok(Some(path)); + } else { + return Ok(None); + } + } + return Err(exports_err); + } + } + } + if package_subpath == "." { + return self.legacy_main_resolve(&package_config, referrer_kind, mode); + } + + Ok(Some(package_dir.join(package_subpath))) + } + + /// Checks if the resolved file has a corresponding declaration file. + pub(super) fn path_to_declaration_path( + &self, + path: PathBuf, + referrer_kind: NodeModuleKind, + ) -> Option { + fn probe_extensions( + fs: &dyn NodeFs, + path: &Path, + referrer_kind: NodeModuleKind, + ) -> Option { + let specific_dts_path = match referrer_kind { + NodeModuleKind::Cjs => with_known_extension(path, "d.cts"), + NodeModuleKind::Esm => with_known_extension(path, "d.mts"), + }; + if fs.exists(&specific_dts_path) { + return Some(specific_dts_path); + } + let dts_path = with_known_extension(path, "d.ts"); + if fs.exists(&dts_path) { + Some(dts_path) + } else { + None + } + } + + let lowercase_path = path.to_string_lossy().to_lowercase(); + if lowercase_path.ends_with(".d.ts") + || lowercase_path.ends_with(".d.cts") + || lowercase_path.ends_with(".d.ts") { return Some(path); } + if let Some(path) = probe_extensions(&*self.fs, &path, referrer_kind) { + return Some(path); + } + if self.fs.is_dir(&path) { + if let Some(path) = + probe_extensions(&*self.fs, &path.join("index"), referrer_kind) + { + return Some(path); + } + } + None } - None + + pub(super) fn package_imports_resolve( + &self, + name: &str, + referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, + conditions: &[&str], + mode: NodeResolutionMode, + permissions: &mut dyn NodePermissions, + ) -> Result { + if name == "#" || name.starts_with("#/") || name.ends_with('/') { + let reason = "is not a valid internal imports specifier name"; + return Err(errors::err_invalid_module_specifier( + name, + reason, + Some(to_specifier_display_string(referrer)), + )); + } + + let package_config = + self.get_package_scope_config(referrer, permissions)?; + let mut package_json_path = None; + if package_config.exists { + package_json_path = Some(package_config.path.clone()); + if let Some(imports) = &package_config.imports { + if imports.contains_key(name) && !name.contains('*') { + let maybe_resolved = self.resolve_package_target( + package_json_path.as_ref().unwrap(), + imports.get(name).unwrap().to_owned(), + "".to_string(), + name.to_string(), + referrer, + referrer_kind, + false, + true, + conditions, + mode, + permissions, + )?; + if let Some(resolved) = maybe_resolved { + return Ok(resolved); + } + } else { + let mut best_match = ""; + let mut best_match_subpath = None; + for key in imports.keys() { + let pattern_index = key.find('*'); + if let Some(pattern_index) = pattern_index { + let key_sub = &key[0..=pattern_index]; + if name.starts_with(key_sub) { + let pattern_trailer = &key[pattern_index + 1..]; + if name.len() > key.len() + && name.ends_with(&pattern_trailer) + && pattern_key_compare(best_match, key) == 1 + && key.rfind('*') == Some(pattern_index) + { + best_match = key; + best_match_subpath = Some( + name[pattern_index..=(name.len() - pattern_trailer.len())] + .to_string(), + ); + } + } + } + } + + if !best_match.is_empty() { + let target = imports.get(best_match).unwrap().to_owned(); + let maybe_resolved = self.resolve_package_target( + package_json_path.as_ref().unwrap(), + target, + best_match_subpath.unwrap(), + best_match.to_string(), + referrer, + referrer_kind, + true, + true, + conditions, + mode, + permissions, + )?; + if let Some(resolved) = maybe_resolved { + return Ok(resolved); + } + } + } + } + } + + Err(throw_import_not_defined( + name, + package_json_path.as_deref(), + referrer, + )) + } + + #[allow(clippy::too_many_arguments)] + fn resolve_package_target_string( + &self, + target: String, + subpath: String, + match_: String, + package_json_path: &Path, + referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, + pattern: bool, + internal: bool, + conditions: &[&str], + mode: NodeResolutionMode, + permissions: &mut dyn NodePermissions, + ) -> Result { + if !subpath.is_empty() && !pattern && !target.ends_with('/') { + return Err(throw_invalid_package_target( + match_, + target, + package_json_path, + internal, + referrer, + )); + } + let invalid_segment_re = + lazy_regex::regex!(r"(^|\\|/)(\.\.?|node_modules)(\\|/|$)"); + let pattern_re = lazy_regex::regex!(r"\*"); + if !target.starts_with("./") { + if internal && !target.starts_with("../") && !target.starts_with('/') { + let is_url = Url::parse(&target).is_ok(); + if !is_url { + let export_target = if pattern { + pattern_re + .replace(&target, |_caps: ®ex::Captures| subpath.clone()) + .to_string() + } else { + format!("{target}{subpath}") + }; + let package_json_url = + ModuleSpecifier::from_file_path(package_json_path).unwrap(); + return match self.package_resolve( + &export_target, + &package_json_url, + referrer_kind, + conditions, + mode, + permissions, + ) { + Ok(Some(path)) => Ok(path), + Ok(None) => Err(generic_error("not found")), + Err(err) => Err(err), + }; + } + } + return Err(throw_invalid_package_target( + match_, + target, + package_json_path, + internal, + referrer, + )); + } + if invalid_segment_re.is_match(&target[2..]) { + return Err(throw_invalid_package_target( + match_, + target, + package_json_path, + internal, + referrer, + )); + } + let package_path = package_json_path.parent().unwrap(); + let resolved_path = package_path.join(&target).clean(); + if !resolved_path.starts_with(package_path) { + return Err(throw_invalid_package_target( + match_, + target, + package_json_path, + internal, + referrer, + )); + } + if subpath.is_empty() { + return Ok(resolved_path); + } + if invalid_segment_re.is_match(&subpath) { + let request = if pattern { + match_.replace('*', &subpath) + } else { + format!("{match_}{subpath}") + }; + return Err(throw_invalid_subpath( + request, + package_json_path, + internal, + referrer, + )); + } + if pattern { + let resolved_path_str = resolved_path.to_string_lossy(); + let replaced = pattern_re + .replace(&resolved_path_str, |_caps: ®ex::Captures| { + subpath.clone() + }); + return Ok(PathBuf::from(replaced.to_string())); + } + Ok(resolved_path.join(&subpath).clean()) + } + + #[allow(clippy::too_many_arguments)] + fn resolve_package_target( + &self, + package_json_path: &Path, + target: Value, + subpath: String, + package_subpath: String, + referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, + pattern: bool, + internal: bool, + conditions: &[&str], + mode: NodeResolutionMode, + permissions: &mut dyn NodePermissions, + ) -> Result, AnyError> { + if let Some(target) = target.as_str() { + return self + .resolve_package_target_string( + target.to_string(), + subpath, + package_subpath, + package_json_path, + referrer, + referrer_kind, + pattern, + internal, + conditions, + mode, + permissions, + ) + .map(|path| { + if mode.is_types() { + self.path_to_declaration_path(path, referrer_kind) + } else { + Some(path) + } + }); + } else if let Some(target_arr) = target.as_array() { + if target_arr.is_empty() { + return Ok(None); + } + + let mut last_error = None; + for target_item in target_arr { + let resolved_result = self.resolve_package_target( + package_json_path, + target_item.to_owned(), + subpath.clone(), + package_subpath.clone(), + referrer, + referrer_kind, + pattern, + internal, + conditions, + mode, + permissions, + ); + + match resolved_result { + Ok(Some(resolved)) => return Ok(Some(resolved)), + Ok(None) => { + last_error = None; + continue; + } + Err(e) => { + let err_string = e.to_string(); + last_error = Some(e); + if err_string.starts_with("[ERR_INVALID_PACKAGE_TARGET]") { + continue; + } + return Err(last_error.unwrap()); + } + } + } + if last_error.is_none() { + return Ok(None); + } + return Err(last_error.unwrap()); + } else if let Some(target_obj) = target.as_object() { + for key in target_obj.keys() { + // TODO(bartlomieju): verify that keys are not numeric + // return Err(errors::err_invalid_package_config( + // to_file_path_string(package_json_url), + // Some(base.as_str().to_string()), + // Some("\"exports\" cannot contain numeric property keys.".to_string()), + // )); + + if key == "default" + || conditions.contains(&key.as_str()) + || mode.is_types() && key.as_str() == "types" + { + let condition_target = target_obj.get(key).unwrap().to_owned(); + + let resolved = self.resolve_package_target( + package_json_path, + condition_target, + subpath.clone(), + package_subpath.clone(), + referrer, + referrer_kind, + pattern, + internal, + conditions, + mode, + permissions, + )?; + match resolved { + Some(resolved) => return Ok(Some(resolved)), + None => { + continue; + } + } + } + } + } else if target.is_null() { + return Ok(None); + } + + Err(throw_invalid_package_target( + package_subpath, + target.to_string(), + package_json_path, + internal, + referrer, + )) + } + + #[allow(clippy::too_many_arguments)] + pub fn package_exports_resolve( + &self, + package_json_path: &Path, + package_subpath: String, + package_exports: &Map, + referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, + conditions: &[&str], + mode: NodeResolutionMode, + permissions: &mut dyn NodePermissions, + ) -> Result { + if package_exports.contains_key(&package_subpath) + && package_subpath.find('*').is_none() + && !package_subpath.ends_with('/') + { + let target = package_exports.get(&package_subpath).unwrap().to_owned(); + let resolved = self.resolve_package_target( + package_json_path, + target, + "".to_string(), + package_subpath.to_string(), + referrer, + referrer_kind, + false, + false, + conditions, + mode, + permissions, + )?; + if resolved.is_none() { + return Err(throw_exports_not_found( + package_subpath, + package_json_path, + referrer, + )); + } + return Ok(resolved.unwrap()); + } + + let mut best_match = ""; + let mut best_match_subpath = None; + for key in package_exports.keys() { + let pattern_index = key.find('*'); + if let Some(pattern_index) = pattern_index { + let key_sub = &key[0..pattern_index]; + if package_subpath.starts_with(key_sub) { + // When this reaches EOL, this can throw at the top of the whole function: + // + // if (StringPrototypeEndsWith(packageSubpath, '/')) + // throwInvalidSubpath(packageSubpath) + // + // To match "imports" and the spec. + if package_subpath.ends_with('/') { + // TODO(bartlomieju): + // emitTrailingSlashPatternDeprecation(); + } + let pattern_trailer = &key[pattern_index + 1..]; + if package_subpath.len() > key.len() + && package_subpath.ends_with(&pattern_trailer) + && pattern_key_compare(best_match, key) == 1 + && key.rfind('*') == Some(pattern_index) + { + best_match = key; + best_match_subpath = Some( + package_subpath[pattern_index + ..(package_subpath.len() - pattern_trailer.len())] + .to_string(), + ); + } + } + } + } + + if !best_match.is_empty() { + let target = package_exports.get(best_match).unwrap().to_owned(); + let maybe_resolved = self.resolve_package_target( + package_json_path, + target, + best_match_subpath.unwrap(), + best_match.to_string(), + referrer, + referrer_kind, + true, + false, + conditions, + mode, + permissions, + )?; + if let Some(resolved) = maybe_resolved { + return Ok(resolved); + } else { + return Err(throw_exports_not_found( + package_subpath, + package_json_path, + referrer, + )); + } + } + + Err(throw_exports_not_found( + package_subpath, + package_json_path, + referrer, + )) + } + + pub(super) fn package_resolve( + &self, + specifier: &str, + referrer: &ModuleSpecifier, + referrer_kind: NodeModuleKind, + conditions: &[&str], + mode: NodeResolutionMode, + permissions: &mut dyn NodePermissions, + ) -> Result, AnyError> { + let (package_name, package_subpath, _is_scoped) = + parse_package_name(specifier, referrer)?; + + // ResolveSelf + let package_config = + self.get_package_scope_config(referrer, permissions)?; + if package_config.exists + && package_config.name.as_ref() == Some(&package_name) + { + if let Some(exports) = &package_config.exports { + return self + .package_exports_resolve( + &package_config.path, + package_subpath, + exports, + referrer, + referrer_kind, + conditions, + mode, + permissions, + ) + .map(Some); + } + } + + let package_dir_path = self + .npm_resolver + .resolve_package_folder_from_package(&package_name, referrer, mode)?; + let package_json_path = package_dir_path.join("package.json"); + + // todo: error with this instead when can't find package + // Err(errors::err_module_not_found( + // &package_json_url + // .join(".") + // .unwrap() + // .to_file_path() + // .unwrap() + // .display() + // .to_string(), + // &to_file_path_string(referrer), + // "package", + // )) + + // Package match. + let package_json = + self.load_package_json(permissions, package_json_path)?; + if let Some(exports) = &package_json.exports { + return self + .package_exports_resolve( + &package_json.path, + package_subpath, + exports, + referrer, + referrer_kind, + conditions, + mode, + permissions, + ) + .map(Some); + } + if package_subpath == "." { + return self.legacy_main_resolve(&package_json, referrer_kind, mode); + } + + let file_path = package_json.path.parent().unwrap().join(&package_subpath); + + if mode.is_types() { + let maybe_declaration_path = + self.path_to_declaration_path(file_path, referrer_kind); + Ok(maybe_declaration_path) + } else { + Ok(Some(file_path)) + } + } + + pub(super) fn get_package_scope_config( + &self, + referrer: &ModuleSpecifier, + permissions: &mut dyn NodePermissions, + ) -> Result { + let root_folder = self + .npm_resolver + .resolve_package_folder_from_path(&referrer.to_file_path().unwrap())?; + let package_json_path = root_folder.join("package.json"); + self.load_package_json(permissions, package_json_path) + } + + pub(super) fn get_closest_package_json( + &self, + url: &ModuleSpecifier, + permissions: &mut dyn NodePermissions, + ) -> Result { + let package_json_path = self.get_closest_package_json_path(url)?; + self.load_package_json(permissions, package_json_path) + } + + fn get_closest_package_json_path( + &self, + url: &ModuleSpecifier, + ) -> Result { + let file_path = url.to_file_path().unwrap(); + let mut current_dir = file_path.parent().unwrap(); + let package_json_path = current_dir.join("package.json"); + if self.fs.exists(&package_json_path) { + return Ok(package_json_path); + } + let root_pkg_folder = self + .npm_resolver + .resolve_package_folder_from_path(&url.to_file_path().unwrap())?; + while current_dir.starts_with(&root_pkg_folder) { + current_dir = current_dir.parent().unwrap(); + let package_json_path = current_dir.join("package.json"); + if self.fs.exists(&package_json_path) { + return Ok(package_json_path); + } + } + + bail!("did not find package.json in {}", root_pkg_folder.display()) + } + + pub(super) fn load_package_json( + &self, + permissions: &mut dyn NodePermissions, + package_json_path: PathBuf, + ) -> Result { + PackageJson::load( + &*self.fs, + &*self.npm_resolver, + permissions, + package_json_path, + ) + } + + pub(super) fn legacy_main_resolve( + &self, + package_json: &PackageJson, + referrer_kind: NodeModuleKind, + mode: NodeResolutionMode, + ) -> Result, AnyError> { + let maybe_main = if mode.is_types() { + match package_json.types.as_ref() { + Some(types) => Some(types), + None => { + // fallback to checking the main entrypoint for + // a corresponding declaration file + if let Some(main) = package_json.main(referrer_kind) { + let main = package_json.path.parent().unwrap().join(main).clean(); + if let Some(path) = + self.path_to_declaration_path(main, referrer_kind) + { + return Ok(Some(path)); + } + } + None + } + } + } else { + package_json.main(referrer_kind) + }; + + if let Some(main) = maybe_main { + let guess = package_json.path.parent().unwrap().join(main).clean(); + if self.fs.is_file(&guess) { + return Ok(Some(guess)); + } + + // todo(dsherret): investigate exactly how node and typescript handles this + let endings = if mode.is_types() { + match referrer_kind { + NodeModuleKind::Cjs => { + vec![".d.ts", ".d.cts", "/index.d.ts", "/index.d.cts"] + } + NodeModuleKind::Esm => vec![ + ".d.ts", + ".d.mts", + "/index.d.ts", + "/index.d.mts", + ".d.cts", + "/index.d.cts", + ], + } + } else { + vec![".js", "/index.js"] + }; + for ending in endings { + let guess = package_json + .path + .parent() + .unwrap() + .join(format!("{main}{ending}")) + .clean(); + if self.fs.is_file(&guess) { + // TODO(bartlomieju): emitLegacyIndexDeprecation() + return Ok(Some(guess)); + } + } + } + + let index_file_names = if mode.is_types() { + // todo(dsherret): investigate exactly how typescript does this + match referrer_kind { + NodeModuleKind::Cjs => vec!["index.d.ts", "index.d.cts"], + NodeModuleKind::Esm => vec!["index.d.ts", "index.d.mts", "index.d.cts"], + } + } else { + vec!["index.js"] + }; + for index_file_name in index_file_names { + let guess = package_json + .path + .parent() + .unwrap() + .join(index_file_name) + .clean(); + if self.fs.is_file(&guess) { + // TODO(bartlomieju): emitLegacyIndexDeprecation() + return Ok(Some(guess)); + } + } + + Ok(None) + } +} + +fn resolve_bin_entry_value<'a>( + pkg_nv: &NpmPackageNv, + bin_name: Option<&str>, + bin: &'a Value, +) -> Result<&'a str, AnyError> { + let bin_entry = match bin { + Value::String(_) => { + if bin_name.is_some() && bin_name.unwrap() != pkg_nv.name { + None + } else { + Some(bin) + } + } + Value::Object(o) => { + if let Some(bin_name) = bin_name { + o.get(bin_name) + } else if o.len() == 1 || o.len() > 1 && o.values().all(|v| v == o.values().next().unwrap()) { + o.values().next() + } else { + o.get(&pkg_nv.name) + } + }, + _ => bail!("package '{}' did not have a bin property with a string or object value in its package.json", pkg_nv), + }; + let bin_entry = match bin_entry { + Some(e) => e, + None => { + let keys = bin + .as_object() + .map(|o| { + o.keys() + .map(|k| format!(" * npm:{pkg_nv}/{k}")) + .collect::>() + }) + .unwrap_or_default(); + bail!( + "package '{}' did not have a bin entry for '{}' in its package.json{}", + pkg_nv, + bin_name.unwrap_or(&pkg_nv.name), + if keys.is_empty() { + "".to_string() + } else { + format!("\n\nPossibilities:\n{}", keys.join("\n")) + } + ) + } + }; + match bin_entry { + Value::String(s) => Ok(s), + _ => bail!( + "package '{}' had a non-string sub property of bin in its package.json", + pkg_nv, + ), + } +} + +fn to_file_path(url: &ModuleSpecifier) -> PathBuf { + url + .to_file_path() + .unwrap_or_else(|_| panic!("Provided URL was not file:// URL: {url}")) +} + +fn to_file_path_string(url: &ModuleSpecifier) -> String { + to_file_path(url).display().to_string() +} + +fn should_be_treated_as_relative_or_absolute_path(specifier: &str) -> bool { + if specifier.is_empty() { + return false; + } + + if specifier.starts_with('/') { + return true; + } + + is_relative_specifier(specifier) +} + +// TODO(ry) We very likely have this utility function elsewhere in Deno. +fn is_relative_specifier(specifier: &str) -> bool { + let specifier_len = specifier.len(); + let specifier_chars: Vec<_> = specifier.chars().collect(); + + if !specifier_chars.is_empty() && specifier_chars[0] == '.' { + if specifier_len == 1 || specifier_chars[1] == '/' { + return true; + } + if specifier_chars[1] == '.' + && (specifier_len == 2 || specifier_chars[2] == '/') + { + return true; + } + } + false } /// Alternate `PathBuf::with_extension` that will handle known extensions /// more intelligently. -pub fn with_known_extension(path: &Path, ext: &str) -> PathBuf { +fn with_known_extension(path: &Path, ext: &str) -> PathBuf { const NON_DECL_EXTS: &[&str] = &["cjs", "js", "json", "jsx", "mjs", "tsx"]; const DECL_EXTS: &[&str] = &["cts", "mts", "ts"]; @@ -142,145 +1356,6 @@ fn throw_import_not_defined( ) } -fn pattern_key_compare(a: &str, b: &str) -> i32 { - let a_pattern_index = a.find('*'); - let b_pattern_index = b.find('*'); - - let base_len_a = if let Some(index) = a_pattern_index { - index + 1 - } else { - a.len() - }; - let base_len_b = if let Some(index) = b_pattern_index { - index + 1 - } else { - b.len() - }; - - if base_len_a > base_len_b { - return -1; - } - - if base_len_b > base_len_a { - return 1; - } - - if a_pattern_index.is_none() { - return 1; - } - - if b_pattern_index.is_none() { - return -1; - } - - if a.len() > b.len() { - return -1; - } - - if b.len() > a.len() { - return 1; - } - - 0 -} - -pub fn package_imports_resolve( - name: &str, - referrer: &ModuleSpecifier, - referrer_kind: NodeModuleKind, - conditions: &[&str], - mode: NodeResolutionMode, - npm_resolver: &dyn NpmResolver, - permissions: &mut dyn NodePermissions, -) -> Result { - if name == "#" || name.starts_with("#/") || name.ends_with('/') { - let reason = "is not a valid internal imports specifier name"; - return Err(errors::err_invalid_module_specifier( - name, - reason, - Some(to_specifier_display_string(referrer)), - )); - } - - let package_config = - get_package_scope_config::(referrer, npm_resolver, permissions)?; - let mut package_json_path = None; - if package_config.exists { - package_json_path = Some(package_config.path.clone()); - if let Some(imports) = &package_config.imports { - if imports.contains_key(name) && !name.contains('*') { - let maybe_resolved = resolve_package_target::( - package_json_path.as_ref().unwrap(), - imports.get(name).unwrap().to_owned(), - "".to_string(), - name.to_string(), - referrer, - referrer_kind, - false, - true, - conditions, - mode, - npm_resolver, - permissions, - )?; - if let Some(resolved) = maybe_resolved { - return Ok(resolved); - } - } else { - let mut best_match = ""; - let mut best_match_subpath = None; - for key in imports.keys() { - let pattern_index = key.find('*'); - if let Some(pattern_index) = pattern_index { - let key_sub = &key[0..=pattern_index]; - if name.starts_with(key_sub) { - let pattern_trailer = &key[pattern_index + 1..]; - if name.len() > key.len() - && name.ends_with(&pattern_trailer) - && pattern_key_compare(best_match, key) == 1 - && key.rfind('*') == Some(pattern_index) - { - best_match = key; - best_match_subpath = Some( - name[pattern_index..=(name.len() - pattern_trailer.len())] - .to_string(), - ); - } - } - } - } - - if !best_match.is_empty() { - let target = imports.get(best_match).unwrap().to_owned(); - let maybe_resolved = resolve_package_target::( - package_json_path.as_ref().unwrap(), - target, - best_match_subpath.unwrap(), - best_match.to_string(), - referrer, - referrer_kind, - true, - true, - conditions, - mode, - npm_resolver, - permissions, - )?; - if let Some(resolved) = maybe_resolved { - return Ok(resolved); - } - } - } - } - } - - Err(throw_import_not_defined( - name, - package_json_path.as_deref(), - referrer, - )) -} - fn throw_invalid_package_target( subpath: String, target: String, @@ -316,245 +1391,6 @@ fn throw_invalid_subpath( ) } -#[allow(clippy::too_many_arguments)] -fn resolve_package_target_string( - target: String, - subpath: String, - match_: String, - package_json_path: &Path, - referrer: &ModuleSpecifier, - referrer_kind: NodeModuleKind, - pattern: bool, - internal: bool, - conditions: &[&str], - mode: NodeResolutionMode, - npm_resolver: &dyn NpmResolver, - permissions: &mut dyn NodePermissions, -) -> Result { - if !subpath.is_empty() && !pattern && !target.ends_with('/') { - return Err(throw_invalid_package_target( - match_, - target, - package_json_path, - internal, - referrer, - )); - } - let invalid_segment_re = - lazy_regex::regex!(r"(^|\\|/)(\.\.?|node_modules)(\\|/|$)"); - let pattern_re = lazy_regex::regex!(r"\*"); - if !target.starts_with("./") { - if internal && !target.starts_with("../") && !target.starts_with('/') { - let is_url = Url::parse(&target).is_ok(); - if !is_url { - let export_target = if pattern { - pattern_re - .replace(&target, |_caps: ®ex::Captures| subpath.clone()) - .to_string() - } else { - format!("{target}{subpath}") - }; - let package_json_url = - ModuleSpecifier::from_file_path(package_json_path).unwrap(); - return match package_resolve::( - &export_target, - &package_json_url, - referrer_kind, - conditions, - mode, - npm_resolver, - permissions, - ) { - Ok(Some(path)) => Ok(path), - Ok(None) => Err(generic_error("not found")), - Err(err) => Err(err), - }; - } - } - return Err(throw_invalid_package_target( - match_, - target, - package_json_path, - internal, - referrer, - )); - } - if invalid_segment_re.is_match(&target[2..]) { - return Err(throw_invalid_package_target( - match_, - target, - package_json_path, - internal, - referrer, - )); - } - let package_path = package_json_path.parent().unwrap(); - let resolved_path = package_path.join(&target).clean(); - if !resolved_path.starts_with(package_path) { - return Err(throw_invalid_package_target( - match_, - target, - package_json_path, - internal, - referrer, - )); - } - if subpath.is_empty() { - return Ok(resolved_path); - } - if invalid_segment_re.is_match(&subpath) { - let request = if pattern { - match_.replace('*', &subpath) - } else { - format!("{match_}{subpath}") - }; - return Err(throw_invalid_subpath( - request, - package_json_path, - internal, - referrer, - )); - } - if pattern { - let resolved_path_str = resolved_path.to_string_lossy(); - let replaced = pattern_re - .replace(&resolved_path_str, |_caps: ®ex::Captures| { - subpath.clone() - }); - return Ok(PathBuf::from(replaced.to_string())); - } - Ok(resolved_path.join(&subpath).clean()) -} - -#[allow(clippy::too_many_arguments)] -fn resolve_package_target( - package_json_path: &Path, - target: Value, - subpath: String, - package_subpath: String, - referrer: &ModuleSpecifier, - referrer_kind: NodeModuleKind, - pattern: bool, - internal: bool, - conditions: &[&str], - mode: NodeResolutionMode, - npm_resolver: &dyn NpmResolver, - permissions: &mut dyn NodePermissions, -) -> Result, AnyError> { - if let Some(target) = target.as_str() { - return resolve_package_target_string::( - target.to_string(), - subpath, - package_subpath, - package_json_path, - referrer, - referrer_kind, - pattern, - internal, - conditions, - mode, - npm_resolver, - permissions, - ) - .map(|path| { - if mode.is_types() { - path_to_declaration_path::(path, referrer_kind) - } else { - Some(path) - } - }); - } else if let Some(target_arr) = target.as_array() { - if target_arr.is_empty() { - return Ok(None); - } - - let mut last_error = None; - for target_item in target_arr { - let resolved_result = resolve_package_target::( - package_json_path, - target_item.to_owned(), - subpath.clone(), - package_subpath.clone(), - referrer, - referrer_kind, - pattern, - internal, - conditions, - mode, - npm_resolver, - permissions, - ); - - match resolved_result { - Ok(Some(resolved)) => return Ok(Some(resolved)), - Ok(None) => { - last_error = None; - continue; - } - Err(e) => { - let err_string = e.to_string(); - last_error = Some(e); - if err_string.starts_with("[ERR_INVALID_PACKAGE_TARGET]") { - continue; - } - return Err(last_error.unwrap()); - } - } - } - if last_error.is_none() { - return Ok(None); - } - return Err(last_error.unwrap()); - } else if let Some(target_obj) = target.as_object() { - for key in target_obj.keys() { - // TODO(bartlomieju): verify that keys are not numeric - // return Err(errors::err_invalid_package_config( - // to_file_path_string(package_json_url), - // Some(base.as_str().to_string()), - // Some("\"exports\" cannot contain numeric property keys.".to_string()), - // )); - - if key == "default" - || conditions.contains(&key.as_str()) - || mode.is_types() && key.as_str() == "types" - { - let condition_target = target_obj.get(key).unwrap().to_owned(); - - let resolved = resolve_package_target::( - package_json_path, - condition_target, - subpath.clone(), - package_subpath.clone(), - referrer, - referrer_kind, - pattern, - internal, - conditions, - mode, - npm_resolver, - permissions, - )?; - match resolved { - Some(resolved) => return Ok(Some(resolved)), - None => { - continue; - } - } - } - } - } else if target.is_null() { - return Ok(None); - } - - Err(throw_invalid_package_target( - package_subpath, - target.to_string(), - package_json_path, - internal, - referrer, - )) -} - fn throw_exports_not_found( subpath: String, package_json_path: &Path, @@ -567,115 +1403,6 @@ fn throw_exports_not_found( ) } -#[allow(clippy::too_many_arguments)] -pub fn package_exports_resolve( - package_json_path: &Path, - package_subpath: String, - package_exports: &Map, - referrer: &ModuleSpecifier, - referrer_kind: NodeModuleKind, - conditions: &[&str], - mode: NodeResolutionMode, - npm_resolver: &dyn NpmResolver, - permissions: &mut dyn NodePermissions, -) -> Result { - if package_exports.contains_key(&package_subpath) - && package_subpath.find('*').is_none() - && !package_subpath.ends_with('/') - { - let target = package_exports.get(&package_subpath).unwrap().to_owned(); - let resolved = resolve_package_target::( - package_json_path, - target, - "".to_string(), - package_subpath.to_string(), - referrer, - referrer_kind, - false, - false, - conditions, - mode, - npm_resolver, - permissions, - )?; - if resolved.is_none() { - return Err(throw_exports_not_found( - package_subpath, - package_json_path, - referrer, - )); - } - return Ok(resolved.unwrap()); - } - - let mut best_match = ""; - let mut best_match_subpath = None; - for key in package_exports.keys() { - let pattern_index = key.find('*'); - if let Some(pattern_index) = pattern_index { - let key_sub = &key[0..pattern_index]; - if package_subpath.starts_with(key_sub) { - // When this reaches EOL, this can throw at the top of the whole function: - // - // if (StringPrototypeEndsWith(packageSubpath, '/')) - // throwInvalidSubpath(packageSubpath) - // - // To match "imports" and the spec. - if package_subpath.ends_with('/') { - // TODO(bartlomieju): - // emitTrailingSlashPatternDeprecation(); - } - let pattern_trailer = &key[pattern_index + 1..]; - if package_subpath.len() > key.len() - && package_subpath.ends_with(&pattern_trailer) - && pattern_key_compare(best_match, key) == 1 - && key.rfind('*') == Some(pattern_index) - { - best_match = key; - best_match_subpath = Some( - package_subpath - [pattern_index..(package_subpath.len() - pattern_trailer.len())] - .to_string(), - ); - } - } - } - } - - if !best_match.is_empty() { - let target = package_exports.get(best_match).unwrap().to_owned(); - let maybe_resolved = resolve_package_target::( - package_json_path, - target, - best_match_subpath.unwrap(), - best_match.to_string(), - referrer, - referrer_kind, - true, - false, - conditions, - mode, - npm_resolver, - permissions, - )?; - if let Some(resolved) = maybe_resolved { - return Ok(resolved); - } else { - return Err(throw_exports_not_found( - package_subpath, - package_json_path, - referrer, - )); - } - } - - Err(throw_exports_not_found( - package_subpath, - package_json_path, - referrer, - )) -} - fn parse_package_name( specifier: &str, referrer: &ModuleSpecifier, @@ -727,229 +1454,153 @@ fn parse_package_name( Ok((package_name, package_subpath, is_scoped)) } -pub fn package_resolve( - specifier: &str, - referrer: &ModuleSpecifier, - referrer_kind: NodeModuleKind, - conditions: &[&str], - mode: NodeResolutionMode, - npm_resolver: &dyn NpmResolver, - permissions: &mut dyn NodePermissions, -) -> Result, AnyError> { - let (package_name, package_subpath, _is_scoped) = - parse_package_name(specifier, referrer)?; +fn pattern_key_compare(a: &str, b: &str) -> i32 { + let a_pattern_index = a.find('*'); + let b_pattern_index = b.find('*'); - // ResolveSelf - let package_config = - get_package_scope_config::(referrer, npm_resolver, permissions)?; - if package_config.exists - && package_config.name.as_ref() == Some(&package_name) - { - if let Some(exports) = &package_config.exports { - return package_exports_resolve::( - &package_config.path, - package_subpath, - exports, - referrer, - referrer_kind, - conditions, - mode, - npm_resolver, - permissions, - ) - .map(Some); - } - } - - let package_dir_path = npm_resolver.resolve_package_folder_from_package( - &package_name, - referrer, - mode, - )?; - let package_json_path = package_dir_path.join("package.json"); - - // todo: error with this instead when can't find package - // Err(errors::err_module_not_found( - // &package_json_url - // .join(".") - // .unwrap() - // .to_file_path() - // .unwrap() - // .display() - // .to_string(), - // &to_file_path_string(referrer), - // "package", - // )) - - // Package match. - let package_json = - PackageJson::load::(npm_resolver, permissions, package_json_path)?; - if let Some(exports) = &package_json.exports { - return package_exports_resolve::( - &package_json.path, - package_subpath, - exports, - referrer, - referrer_kind, - conditions, - mode, - npm_resolver, - permissions, - ) - .map(Some); - } - if package_subpath == "." { - return legacy_main_resolve::(&package_json, referrer_kind, mode); - } - - let file_path = package_json.path.parent().unwrap().join(&package_subpath); - - if mode.is_types() { - let maybe_declaration_path = - path_to_declaration_path::(file_path, referrer_kind); - Ok(maybe_declaration_path) + let base_len_a = if let Some(index) = a_pattern_index { + index + 1 } else { - Ok(Some(file_path)) - } -} - -pub fn get_package_scope_config( - referrer: &ModuleSpecifier, - npm_resolver: &dyn NpmResolver, - permissions: &mut dyn NodePermissions, -) -> Result { - let root_folder = npm_resolver - .resolve_package_folder_from_path(&referrer.to_file_path().unwrap())?; - let package_json_path = root_folder.join("package.json"); - PackageJson::load::(npm_resolver, permissions, package_json_path) -} - -pub fn get_closest_package_json( - url: &ModuleSpecifier, - npm_resolver: &dyn NpmResolver, - permissions: &mut dyn NodePermissions, -) -> Result { - let package_json_path = - get_closest_package_json_path::(url, npm_resolver)?; - PackageJson::load::(npm_resolver, permissions, package_json_path) -} - -fn get_closest_package_json_path( - url: &ModuleSpecifier, - npm_resolver: &dyn NpmResolver, -) -> Result { - let file_path = url.to_file_path().unwrap(); - let mut current_dir = file_path.parent().unwrap(); - let package_json_path = current_dir.join("package.json"); - if Fs::exists(&package_json_path) { - return Ok(package_json_path); - } - let root_pkg_folder = npm_resolver - .resolve_package_folder_from_path(&url.to_file_path().unwrap())?; - while current_dir.starts_with(&root_pkg_folder) { - current_dir = current_dir.parent().unwrap(); - let package_json_path = current_dir.join("package.json"); - if Fs::exists(&package_json_path) { - return Ok(package_json_path); - } - } - - bail!("did not find package.json in {}", root_pkg_folder.display()) -} - -pub fn legacy_main_resolve( - package_json: &PackageJson, - referrer_kind: NodeModuleKind, - mode: NodeResolutionMode, -) -> Result, AnyError> { - let maybe_main = if mode.is_types() { - match package_json.types.as_ref() { - Some(types) => Some(types), - None => { - // fallback to checking the main entrypoint for - // a corresponding declaration file - if let Some(main) = package_json.main(referrer_kind) { - let main = package_json.path.parent().unwrap().join(main).clean(); - if let Some(path) = - path_to_declaration_path::(main, referrer_kind) - { - return Ok(Some(path)); - } - } - None - } - } + a.len() + }; + let base_len_b = if let Some(index) = b_pattern_index { + index + 1 } else { - package_json.main(referrer_kind) + b.len() }; - if let Some(main) = maybe_main { - let guess = package_json.path.parent().unwrap().join(main).clean(); - if Fs::is_file(&guess) { - return Ok(Some(guess)); - } - - // todo(dsherret): investigate exactly how node and typescript handles this - let endings = if mode.is_types() { - match referrer_kind { - NodeModuleKind::Cjs => { - vec![".d.ts", ".d.cts", "/index.d.ts", "/index.d.cts"] - } - NodeModuleKind::Esm => vec![ - ".d.ts", - ".d.mts", - "/index.d.ts", - "/index.d.mts", - ".d.cts", - "/index.d.cts", - ], - } - } else { - vec![".js", "/index.js"] - }; - for ending in endings { - let guess = package_json - .path - .parent() - .unwrap() - .join(format!("{main}{ending}")) - .clean(); - if Fs::is_file(&guess) { - // TODO(bartlomieju): emitLegacyIndexDeprecation() - return Ok(Some(guess)); - } - } + if base_len_a > base_len_b { + return -1; } - let index_file_names = if mode.is_types() { - // todo(dsherret): investigate exactly how typescript does this - match referrer_kind { - NodeModuleKind::Cjs => vec!["index.d.ts", "index.d.cts"], - NodeModuleKind::Esm => vec!["index.d.ts", "index.d.mts", "index.d.cts"], - } - } else { - vec!["index.js"] - }; - for index_file_name in index_file_names { - let guess = package_json - .path - .parent() - .unwrap() - .join(index_file_name) - .clean(); - if Fs::is_file(&guess) { - // TODO(bartlomieju): emitLegacyIndexDeprecation() - return Ok(Some(guess)); - } + if base_len_b > base_len_a { + return 1; } - Ok(None) + if a_pattern_index.is_none() { + return 1; + } + + if b_pattern_index.is_none() { + return -1; + } + + if a.len() > b.len() { + return -1; + } + + if b.len() > a.len() { + return 1; + } + + 0 } #[cfg(test)] mod tests { + use deno_core::serde_json::json; + use super::*; + #[test] + fn test_resolve_bin_entry_value() { + // should resolve the specified value + let value = json!({ + "bin1": "./value1", + "bin2": "./value2", + "test": "./value3", + }); + assert_eq!( + resolve_bin_entry_value( + &NpmPackageNv::from_str("test@1.1.1").unwrap(), + Some("bin1"), + &value + ) + .unwrap(), + "./value1" + ); + + // should resolve the value with the same name when not specified + assert_eq!( + resolve_bin_entry_value( + &NpmPackageNv::from_str("test@1.1.1").unwrap(), + None, + &value + ) + .unwrap(), + "./value3" + ); + + // should not resolve when specified value does not exist + assert_eq!( + resolve_bin_entry_value( + &NpmPackageNv::from_str("test@1.1.1").unwrap(), + Some("other"), + &value + ) + .err() + .unwrap() + .to_string(), + concat!( + "package 'test@1.1.1' did not have a bin entry for 'other' in its package.json\n", + "\n", + "Possibilities:\n", + " * npm:test@1.1.1/bin1\n", + " * npm:test@1.1.1/bin2\n", + " * npm:test@1.1.1/test" + ) + ); + + // should not resolve when default value can't be determined + assert_eq!( + resolve_bin_entry_value( + &NpmPackageNv::from_str("asdf@1.2.3").unwrap(), + None, + &value + ) + .err() + .unwrap() + .to_string(), + concat!( + "package 'asdf@1.2.3' did not have a bin entry for 'asdf' in its package.json\n", + "\n", + "Possibilities:\n", + " * npm:asdf@1.2.3/bin1\n", + " * npm:asdf@1.2.3/bin2\n", + " * npm:asdf@1.2.3/test" + ) + ); + + // should resolve since all the values are the same + let value = json!({ + "bin1": "./value", + "bin2": "./value", + }); + assert_eq!( + resolve_bin_entry_value( + &NpmPackageNv::from_str("test@1.2.3").unwrap(), + None, + &value + ) + .unwrap(), + "./value" + ); + + // should not resolve when specified and is a string + let value = json!("./value"); + assert_eq!( + resolve_bin_entry_value( + &NpmPackageNv::from_str("test@1.2.3").unwrap(), + Some("path"), + &value + ) + .err() + .unwrap() + .to_string(), + "package 'test@1.2.3' did not have a bin entry for 'path' in its package.json" + ); + } + #[test] fn test_parse_package_name() { let dummy_referrer = Url::parse("http://example.com").unwrap(); diff --git a/ext/node/resolver.rs b/ext/node/resolver.rs deleted file mode 100644 index 41e1cf4d4d..0000000000 --- a/ext/node/resolver.rs +++ /dev/null @@ -1,686 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. - -use std::path::Path; -use std::path::PathBuf; - -use deno_core::anyhow::bail; -use deno_core::anyhow::Context; -use deno_core::error::generic_error; -use deno_core::error::AnyError; -use deno_core::serde_json::Value; -use deno_core::url::Url; -use deno_core::ModuleSpecifier; -use deno_media_type::MediaType; -use deno_semver::npm::NpmPackageNv; -use deno_semver::npm::NpmPackageNvReference; -use deno_semver::npm::NpmPackageReqReference; - -use crate::errors; -use crate::get_closest_package_json; -use crate::legacy_main_resolve; -use crate::package_exports_resolve; -use crate::package_imports_resolve; -use crate::package_resolve; -use crate::path_to_declaration_path; -use crate::AllowAllNodePermissions; -use crate::NodeFs; -use crate::NodeModuleKind; -use crate::NodePermissions; -use crate::NodeResolutionMode; -use crate::NpmResolver; -use crate::PackageJson; -use crate::DEFAULT_CONDITIONS; - -#[derive(Debug)] -pub enum NodeResolution { - Esm(ModuleSpecifier), - CommonJs(ModuleSpecifier), - BuiltIn(String), -} - -impl NodeResolution { - pub fn into_url(self) -> ModuleSpecifier { - match self { - Self::Esm(u) => u, - Self::CommonJs(u) => u, - Self::BuiltIn(specifier) => { - if specifier.starts_with("node:") { - ModuleSpecifier::parse(&specifier).unwrap() - } else { - ModuleSpecifier::parse(&format!("node:{specifier}")).unwrap() - } - } - } - } - - pub fn into_specifier_and_media_type( - resolution: Option, - ) -> (ModuleSpecifier, MediaType) { - match resolution { - Some(NodeResolution::CommonJs(specifier)) => { - let media_type = MediaType::from_specifier(&specifier); - ( - specifier, - match media_type { - MediaType::JavaScript | MediaType::Jsx => MediaType::Cjs, - MediaType::TypeScript | MediaType::Tsx => MediaType::Cts, - MediaType::Dts => MediaType::Dcts, - _ => media_type, - }, - ) - } - Some(NodeResolution::Esm(specifier)) => { - let media_type = MediaType::from_specifier(&specifier); - ( - specifier, - match media_type { - MediaType::JavaScript | MediaType::Jsx => MediaType::Mjs, - MediaType::TypeScript | MediaType::Tsx => MediaType::Mts, - MediaType::Dts => MediaType::Dmts, - _ => media_type, - }, - ) - } - Some(resolution) => (resolution.into_url(), MediaType::Dts), - None => ( - ModuleSpecifier::parse("internal:///missing_dependency.d.ts").unwrap(), - MediaType::Dts, - ), - } - } -} - -#[derive(Debug)] -pub struct NodeResolver { - npm_resolver: TRequireNpmResolver, -} - -impl NodeResolver { - pub fn new(require_npm_resolver: TRequireNpmResolver) -> Self { - Self { - npm_resolver: require_npm_resolver, - } - } - - pub fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool { - self.npm_resolver.in_npm_package(specifier) - } - - /// This function is an implementation of `defaultResolve` in - /// `lib/internal/modules/esm/resolve.js` from Node. - pub fn resolve( - &self, - specifier: &str, - referrer: &ModuleSpecifier, - mode: NodeResolutionMode, - permissions: &mut dyn NodePermissions, - ) -> Result, AnyError> { - // Note: if we are here, then the referrer is an esm module - // TODO(bartlomieju): skipped "policy" part as we don't plan to support it - - if crate::is_builtin_node_module(specifier) { - return Ok(Some(NodeResolution::BuiltIn(specifier.to_string()))); - } - - if let Ok(url) = Url::parse(specifier) { - if url.scheme() == "data" { - return Ok(Some(NodeResolution::Esm(url))); - } - - let protocol = url.scheme(); - - if protocol == "node" { - let split_specifier = url.as_str().split(':'); - let specifier = split_specifier.skip(1).collect::(); - - if crate::is_builtin_node_module(&specifier) { - return Ok(Some(NodeResolution::BuiltIn(specifier))); - } - } - - if protocol != "file" && protocol != "data" { - return Err(errors::err_unsupported_esm_url_scheme(&url)); - } - - // todo(dsherret): this seems wrong - if referrer.scheme() == "data" { - let url = referrer.join(specifier).map_err(AnyError::from)?; - return Ok(Some(NodeResolution::Esm(url))); - } - } - - let url = self.module_resolve::( - specifier, - referrer, - DEFAULT_CONDITIONS, - mode, - permissions, - )?; - let url = match url { - Some(url) => url, - None => return Ok(None), - }; - let url = match mode { - NodeResolutionMode::Execution => url, - NodeResolutionMode::Types => { - let path = url.to_file_path().unwrap(); - // todo(16370): the module kind is not correct here. I think we need - // typescript to tell us if the referrer is esm or cjs - let path = - match path_to_declaration_path::(path, NodeModuleKind::Esm) { - Some(path) => path, - None => return Ok(None), - }; - ModuleSpecifier::from_file_path(path).unwrap() - } - }; - - let resolve_response = self.url_to_node_resolution::(url)?; - // TODO(bartlomieju): skipped checking errors for commonJS resolution and - // "preserveSymlinksMain"/"preserveSymlinks" options. - Ok(Some(resolve_response)) - } - - fn module_resolve( - &self, - specifier: &str, - referrer: &ModuleSpecifier, - conditions: &[&str], - mode: NodeResolutionMode, - permissions: &mut dyn NodePermissions, - ) -> Result, AnyError> { - // note: if we're here, the referrer is an esm module - let url = if should_be_treated_as_relative_or_absolute_path(specifier) { - let resolved_specifier = referrer.join(specifier)?; - if mode.is_types() { - let file_path = to_file_path(&resolved_specifier); - // todo(dsherret): the node module kind is not correct and we - // should use the value provided by typescript instead - let declaration_path = - path_to_declaration_path::(file_path, NodeModuleKind::Esm); - declaration_path.map(|declaration_path| { - ModuleSpecifier::from_file_path(declaration_path).unwrap() - }) - } else { - Some(resolved_specifier) - } - } else if specifier.starts_with('#') { - Some( - package_imports_resolve::( - specifier, - referrer, - NodeModuleKind::Esm, - conditions, - mode, - &self.npm_resolver, - permissions, - ) - .map(|p| ModuleSpecifier::from_file_path(p).unwrap())?, - ) - } else if let Ok(resolved) = Url::parse(specifier) { - Some(resolved) - } else { - package_resolve::( - specifier, - referrer, - NodeModuleKind::Esm, - conditions, - mode, - &self.npm_resolver, - permissions, - )? - .map(|p| ModuleSpecifier::from_file_path(p).unwrap()) - }; - Ok(match url { - Some(url) => Some(finalize_resolution::(url, referrer)?), - None => None, - }) - } - - pub fn resolve_npm_req_reference( - &self, - reference: &NpmPackageReqReference, - mode: NodeResolutionMode, - permissions: &mut dyn NodePermissions, - ) -> Result, AnyError> { - let reference = self - .npm_resolver - .resolve_nv_ref_from_pkg_req_ref(reference)?; - self.resolve_npm_reference::(&reference, mode, permissions) - } - - pub fn resolve_npm_reference( - &self, - reference: &NpmPackageNvReference, - mode: NodeResolutionMode, - permissions: &mut dyn NodePermissions, - ) -> Result, AnyError> { - let package_folder = self - .npm_resolver - .resolve_package_folder_from_deno_module(&reference.nv)?; - let node_module_kind = NodeModuleKind::Esm; - let maybe_resolved_path = package_config_resolve::( - &reference - .sub_path - .as_ref() - .map(|s| format!("./{s}")) - .unwrap_or_else(|| ".".to_string()), - &package_folder, - node_module_kind, - DEFAULT_CONDITIONS, - mode, - &self.npm_resolver, - permissions, - ) - .with_context(|| { - format!("Error resolving package config for '{reference}'") - })?; - let resolved_path = match maybe_resolved_path { - Some(resolved_path) => resolved_path, - None => return Ok(None), - }; - let resolved_path = match mode { - NodeResolutionMode::Execution => resolved_path, - NodeResolutionMode::Types => { - match path_to_declaration_path::(resolved_path, node_module_kind) { - Some(path) => path, - None => return Ok(None), - } - } - }; - let url = ModuleSpecifier::from_file_path(resolved_path).unwrap(); - let resolve_response = self.url_to_node_resolution::(url)?; - // TODO(bartlomieju): skipped checking errors for commonJS resolution and - // "preserveSymlinksMain"/"preserveSymlinks" options. - Ok(Some(resolve_response)) - } - - pub fn resolve_binary_commands( - &self, - pkg_nv: &NpmPackageNv, - ) -> Result, AnyError> { - let package_folder = self - .npm_resolver - .resolve_package_folder_from_deno_module(pkg_nv)?; - let package_json_path = package_folder.join("package.json"); - let package_json = PackageJson::load::( - &self.npm_resolver, - &mut AllowAllNodePermissions, - package_json_path, - )?; - - Ok(match package_json.bin { - Some(Value::String(_)) => vec![pkg_nv.name.to_string()], - Some(Value::Object(o)) => { - o.into_iter().map(|(key, _)| key).collect::>() - } - _ => Vec::new(), - }) - } - - pub fn resolve_binary_export( - &self, - pkg_ref: &NpmPackageReqReference, - ) -> Result { - let pkg_nv = self - .npm_resolver - .resolve_pkg_id_from_pkg_req(&pkg_ref.req)? - .nv; - let bin_name = pkg_ref.sub_path.as_deref(); - let package_folder = self - .npm_resolver - .resolve_package_folder_from_deno_module(&pkg_nv)?; - let package_json_path = package_folder.join("package.json"); - let package_json = PackageJson::load::( - &self.npm_resolver, - &mut AllowAllNodePermissions, - package_json_path, - )?; - let bin = match &package_json.bin { - Some(bin) => bin, - None => bail!( - "package '{}' did not have a bin property in its package.json", - &pkg_nv.name, - ), - }; - let bin_entry = resolve_bin_entry_value(&pkg_nv, bin_name, bin)?; - let url = - ModuleSpecifier::from_file_path(package_folder.join(bin_entry)).unwrap(); - - let resolve_response = self.url_to_node_resolution::(url)?; - // TODO(bartlomieju): skipped checking errors for commonJS resolution and - // "preserveSymlinksMain"/"preserveSymlinks" options. - Ok(resolve_response) - } - - pub fn url_to_node_resolution( - &self, - url: ModuleSpecifier, - ) -> Result { - let url_str = url.as_str().to_lowercase(); - if url_str.starts_with("http") { - Ok(NodeResolution::Esm(url)) - } else if url_str.ends_with(".js") || url_str.ends_with(".d.ts") { - let package_config = get_closest_package_json::( - &url, - &self.npm_resolver, - &mut AllowAllNodePermissions, - )?; - if package_config.typ == "module" { - Ok(NodeResolution::Esm(url)) - } else { - Ok(NodeResolution::CommonJs(url)) - } - } else if url_str.ends_with(".mjs") || url_str.ends_with(".d.mts") { - Ok(NodeResolution::Esm(url)) - } else if url_str.ends_with(".ts") { - Err(generic_error(format!( - "TypeScript files are not supported in npm packages: {url}" - ))) - } else { - Ok(NodeResolution::CommonJs(url)) - } - } -} - -fn resolve_bin_entry_value<'a>( - pkg_nv: &NpmPackageNv, - bin_name: Option<&str>, - bin: &'a Value, -) -> Result<&'a str, AnyError> { - let bin_entry = match bin { - Value::String(_) => { - if bin_name.is_some() && bin_name.unwrap() != pkg_nv.name { - None - } else { - Some(bin) - } - } - Value::Object(o) => { - if let Some(bin_name) = bin_name { - o.get(bin_name) - } else if o.len() == 1 || o.len() > 1 && o.values().all(|v| v == o.values().next().unwrap()) { - o.values().next() - } else { - o.get(&pkg_nv.name) - } - }, - _ => bail!("package '{}' did not have a bin property with a string or object value in its package.json", pkg_nv), - }; - let bin_entry = match bin_entry { - Some(e) => e, - None => { - let keys = bin - .as_object() - .map(|o| { - o.keys() - .map(|k| format!(" * npm:{pkg_nv}/{k}")) - .collect::>() - }) - .unwrap_or_default(); - bail!( - "package '{}' did not have a bin entry for '{}' in its package.json{}", - pkg_nv, - bin_name.unwrap_or(&pkg_nv.name), - if keys.is_empty() { - "".to_string() - } else { - format!("\n\nPossibilities:\n{}", keys.join("\n")) - } - ) - } - }; - match bin_entry { - Value::String(s) => Ok(s), - _ => bail!( - "package '{}' had a non-string sub property of bin in its package.json", - pkg_nv, - ), - } -} - -fn package_config_resolve( - package_subpath: &str, - package_dir: &Path, - referrer_kind: NodeModuleKind, - conditions: &[&str], - mode: NodeResolutionMode, - npm_resolver: &dyn NpmResolver, - permissions: &mut dyn NodePermissions, -) -> Result, AnyError> { - let package_json_path = package_dir.join("package.json"); - let referrer = ModuleSpecifier::from_directory_path(package_dir).unwrap(); - let package_config = PackageJson::load::( - npm_resolver, - permissions, - package_json_path.clone(), - )?; - if let Some(exports) = &package_config.exports { - let result = package_exports_resolve::( - &package_json_path, - package_subpath.to_string(), - exports, - &referrer, - referrer_kind, - conditions, - mode, - npm_resolver, - permissions, - ); - match result { - Ok(found) => return Ok(Some(found)), - Err(exports_err) => { - if mode.is_types() && package_subpath == "." { - if let Ok(Some(path)) = - legacy_main_resolve::(&package_config, referrer_kind, mode) - { - return Ok(Some(path)); - } else { - return Ok(None); - } - } - return Err(exports_err); - } - } - } - if package_subpath == "." { - return legacy_main_resolve::(&package_config, referrer_kind, mode); - } - - Ok(Some(package_dir.join(package_subpath))) -} - -fn finalize_resolution( - resolved: ModuleSpecifier, - base: &ModuleSpecifier, -) -> Result { - let encoded_sep_re = lazy_regex::regex!(r"%2F|%2C"); - - if encoded_sep_re.is_match(resolved.path()) { - return Err(errors::err_invalid_module_specifier( - resolved.path(), - "must not include encoded \"/\" or \"\\\\\" characters", - Some(to_file_path_string(base)), - )); - } - - let path = to_file_path(&resolved); - - // TODO(bartlomieju): currently not supported - // if (getOptionValue('--experimental-specifier-resolution') === 'node') { - // ... - // } - - let p_str = path.to_str().unwrap(); - let p = if p_str.ends_with('/') { - p_str[p_str.len() - 1..].to_string() - } else { - p_str.to_string() - }; - - let (is_dir, is_file) = if let Ok(stats) = Fs::metadata(p) { - (stats.is_dir, stats.is_file) - } else { - (false, false) - }; - if is_dir { - return Err(errors::err_unsupported_dir_import( - resolved.as_str(), - base.as_str(), - )); - } else if !is_file { - return Err(errors::err_module_not_found( - resolved.as_str(), - base.as_str(), - "module", - )); - } - - Ok(resolved) -} - -fn to_file_path(url: &ModuleSpecifier) -> PathBuf { - url - .to_file_path() - .unwrap_or_else(|_| panic!("Provided URL was not file:// URL: {url}")) -} - -fn to_file_path_string(url: &ModuleSpecifier) -> String { - to_file_path(url).display().to_string() -} - -fn should_be_treated_as_relative_or_absolute_path(specifier: &str) -> bool { - if specifier.is_empty() { - return false; - } - - if specifier.starts_with('/') { - return true; - } - - is_relative_specifier(specifier) -} - -// TODO(ry) We very likely have this utility function elsewhere in Deno. -fn is_relative_specifier(specifier: &str) -> bool { - let specifier_len = specifier.len(); - let specifier_chars: Vec<_> = specifier.chars().collect(); - - if !specifier_chars.is_empty() && specifier_chars[0] == '.' { - if specifier_len == 1 || specifier_chars[1] == '/' { - return true; - } - if specifier_chars[1] == '.' - && (specifier_len == 2 || specifier_chars[2] == '/') - { - return true; - } - } - false -} - -#[cfg(test)] -mod tests { - use deno_core::serde_json::json; - - use super::*; - - #[test] - fn test_resolve_bin_entry_value() { - // should resolve the specified value - let value = json!({ - "bin1": "./value1", - "bin2": "./value2", - "test": "./value3", - }); - assert_eq!( - resolve_bin_entry_value( - &NpmPackageNv::from_str("test@1.1.1").unwrap(), - Some("bin1"), - &value - ) - .unwrap(), - "./value1" - ); - - // should resolve the value with the same name when not specified - assert_eq!( - resolve_bin_entry_value( - &NpmPackageNv::from_str("test@1.1.1").unwrap(), - None, - &value - ) - .unwrap(), - "./value3" - ); - - // should not resolve when specified value does not exist - assert_eq!( - resolve_bin_entry_value( - &NpmPackageNv::from_str("test@1.1.1").unwrap(), - Some("other"), - &value - ) - .err() - .unwrap() - .to_string(), - concat!( - "package 'test@1.1.1' did not have a bin entry for 'other' in its package.json\n", - "\n", - "Possibilities:\n", - " * npm:test@1.1.1/bin1\n", - " * npm:test@1.1.1/bin2\n", - " * npm:test@1.1.1/test" - ) - ); - - // should not resolve when default value can't be determined - assert_eq!( - resolve_bin_entry_value( - &NpmPackageNv::from_str("asdf@1.2.3").unwrap(), - None, - &value - ) - .err() - .unwrap() - .to_string(), - concat!( - "package 'asdf@1.2.3' did not have a bin entry for 'asdf' in its package.json\n", - "\n", - "Possibilities:\n", - " * npm:asdf@1.2.3/bin1\n", - " * npm:asdf@1.2.3/bin2\n", - " * npm:asdf@1.2.3/test" - ) - ); - - // should resolve since all the values are the same - let value = json!({ - "bin1": "./value", - "bin2": "./value", - }); - assert_eq!( - resolve_bin_entry_value( - &NpmPackageNv::from_str("test@1.2.3").unwrap(), - None, - &value - ) - .unwrap(), - "./value" - ); - - // should not resolve when specified and is a string - let value = json!("./value"); - assert_eq!( - resolve_bin_entry_value( - &NpmPackageNv::from_str("test@1.2.3").unwrap(), - Some("path"), - &value - ) - .err() - .unwrap() - .to_string(), - "package 'test@1.2.3' did not have a bin entry for 'path' in its package.json" - ); - } -} diff --git a/runtime/build.rs b/runtime/build.rs index d47bee9419..358a30dc59 100644 --- a/runtime/build.rs +++ b/runtime/build.rs @@ -222,7 +222,6 @@ mod startup_snapshot { impl deno_node::NodeEnv for SnapshotNodeEnv { type P = Permissions; - type Fs = deno_node::RealFs; } deno_core::extension!(runtime, @@ -324,7 +323,7 @@ mod startup_snapshot { runtime::init_ops_and_esm(), // FIXME(bartlomieju): these extensions are specified last, because they // depend on `runtime`, even though it should be other way around - deno_node::deno_node::init_ops_and_esm::(None), + deno_node::deno_node::init_ops_and_esm::(None, None), #[cfg(not(feature = "snapshot_from_snapshot"))] runtime_main::init_ops_and_esm(), ]; diff --git a/runtime/examples/hello_runtime.rs b/runtime/examples/hello_runtime.rs index f44c774fbb..f97c045b26 100644 --- a/runtime/examples/hello_runtime.rs +++ b/runtime/examples/hello_runtime.rs @@ -43,6 +43,7 @@ async fn main() -> Result<(), AnyError> { should_break_on_first_statement: false, should_wait_for_inspector_session: false, module_loader, + node_fs: None, npm_resolver: None, get_error_class_fn: Some(&get_error_class_name), cache_storage_dir: None, diff --git a/runtime/lib.rs b/runtime/lib.rs index 6745c4a565..878171913f 100644 --- a/runtime/lib.rs +++ b/runtime/lib.rs @@ -39,5 +39,4 @@ pub use worker_bootstrap::BootstrapOptions; pub struct RuntimeNodeEnv; impl deno_node::NodeEnv for RuntimeNodeEnv { type P = permissions::PermissionsContainer; - type Fs = deno_node::RealFs; } diff --git a/runtime/web_worker.rs b/runtime/web_worker.rs index 06540a9bbf..9bc5ba011f 100644 --- a/runtime/web_worker.rs +++ b/runtime/web_worker.rs @@ -37,7 +37,6 @@ use deno_core::SourceMapGetter; use deno_fs::StdFs; use deno_io::Stdio; use deno_kv::sqlite::SqliteDbHandler; -use deno_node::NpmResolver; use deno_tls::rustls::RootCertStore; use deno_web::create_entangled_message_port; use deno_web::BlobStore; @@ -333,7 +332,8 @@ pub struct WebWorkerOptions { pub root_cert_store: Option, pub seed: Option, pub module_loader: Rc, - pub npm_resolver: Option>, + pub node_fs: Option>, + pub npm_resolver: Option>, pub create_web_worker_cb: Arc, pub preload_module_cb: Arc, pub pre_execute_module_cb: Arc, @@ -444,6 +444,7 @@ impl WebWorker { deno_fs::deno_fs::init_ops::<_, PermissionsContainer>(unstable, StdFs), deno_node::deno_node::init_ops::( options.npm_resolver, + options.node_fs, ), // Runtime ops that are always initialized for WebWorkers ops::web_worker::deno_web_worker::init_ops(), diff --git a/runtime/worker.rs b/runtime/worker.rs index 5cd60604d2..56684e9925 100644 --- a/runtime/worker.rs +++ b/runtime/worker.rs @@ -33,7 +33,6 @@ use deno_core::SourceMapGetter; use deno_fs::StdFs; use deno_io::Stdio; use deno_kv::sqlite::SqliteDbHandler; -use deno_node::NpmResolver; use deno_tls::rustls::RootCertStore; use deno_web::BlobStore; use log::debug; @@ -94,7 +93,8 @@ pub struct WorkerOptions { /// If not provided runtime will error if code being /// executed tries to load modules. pub module_loader: Rc, - pub npm_resolver: Option>, + pub node_fs: Option>, + pub npm_resolver: Option>, // Callbacks invoked when creating new instance of WebWorker pub create_web_worker_cb: Arc, pub web_worker_preload_module_cb: Arc, @@ -164,6 +164,7 @@ impl Default for WorkerOptions { broadcast_channel: Default::default(), source_map_getter: Default::default(), root_cert_store: Default::default(), + node_fs: Default::default(), npm_resolver: Default::default(), blob_store: Default::default(), extensions: Default::default(), @@ -268,6 +269,7 @@ impl MainWorker { deno_fs::deno_fs::init_ops::<_, PermissionsContainer>(unstable, StdFs), deno_node::deno_node::init_ops::( options.npm_resolver, + options.node_fs, ), // Ops from this crate ops::runtime::deno_runtime::init_ops(main_module.clone()), From 667acb075c0f1fb62bc1177f46b550e9dd506c00 Mon Sep 17 00:00:00 2001 From: Nayeem Rahman Date: Tue, 25 Apr 2023 00:52:27 +0100 Subject: [PATCH 030/320] fix(lsp): show dependency errors for repeated imports (#18807) --- Cargo.lock | 4 +- cli/Cargo.toml | 2 +- cli/lsp/diagnostics.rs | 184 +++++++++++++++++++++++++++++++---------- 3 files changed, 142 insertions(+), 48 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 666741cf5d..e2480d54c7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -982,9 +982,9 @@ dependencies = [ [[package]] name = "deno_graph" -version = "0.48.0" +version = "0.48.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57683392402015acc8f20cc3623035f6b2a2c49f1728eef93536c712adafb2c2" +checksum = "dcdbc17bfe49a41dd596ba2a96106b3eae3bd0812e1b63a6fe5883166c1b6fef" dependencies = [ "anyhow", "data-url", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index ebd8583304..a806b70934 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -44,7 +44,7 @@ deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "dep_gra deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] } deno_doc = "0.62.0" deno_emit = "0.20.0" -deno_graph = "=0.48.0" +deno_graph = "=0.48.1" deno_lint = { version = "0.44.0", features = ["docs"] } deno_lockfile.workspace = true deno_npm.workspace = true diff --git a/cli/lsp/diagnostics.rs b/cli/lsp/diagnostics.rs index 965075a2d7..b650d8e558 100644 --- a/cli/lsp/diagnostics.rs +++ b/cli/lsp/diagnostics.rs @@ -857,24 +857,24 @@ impl DenoDiagnostic { } fn diagnose_resolution( - diagnostics: &mut Vec, + lsp_diagnostics: &mut Vec, snapshot: &language_server::StateSnapshot, resolution: &Resolution, is_dynamic: bool, maybe_assert_type: Option<&str>, + ranges: Vec, ) { + let mut diagnostics = vec![]; match resolution { Resolution::Ok(resolved) => { let specifier = &resolved.specifier; - let range = documents::to_lsp_range(&resolved.range); // If the module is a remote module and has a `X-Deno-Warning` header, we // want a warning diagnostic with that message. if let Some(metadata) = snapshot.cache_metadata.get(specifier) { if let Some(message) = metadata.get(&cache::MetadataKey::Warning).cloned() { - diagnostics - .push(DenoDiagnostic::DenoWarn(message).to_lsp_diagnostic(&range)); + diagnostics.push(DenoDiagnostic::DenoWarn(message)); } } if let Some(doc) = snapshot.documents.get(specifier) { @@ -883,13 +883,10 @@ fn diagnose_resolution( // diagnostic that indicates this. This then allows us to issue a code // action to replace the specifier with the final redirected one. if doc_specifier != specifier { - diagnostics.push( - DenoDiagnostic::Redirect { - from: specifier.clone(), - to: doc_specifier.clone(), - } - .to_lsp_diagnostic(&range), - ); + diagnostics.push(DenoDiagnostic::Redirect { + from: specifier.clone(), + to: doc_specifier.clone(), + }); } if doc.media_type() == MediaType::Json { match maybe_assert_type { @@ -900,13 +897,10 @@ fn diagnose_resolution( // not provide a potentially incorrect diagnostic. None if is_dynamic => (), // The module has an incorrect assertion type, diagnostic - Some(assert_type) => diagnostics.push( - DenoDiagnostic::InvalidAssertType(assert_type.to_string()) - .to_lsp_diagnostic(&range), - ), + Some(assert_type) => diagnostics + .push(DenoDiagnostic::InvalidAssertType(assert_type.to_string())), // The module is missing an assertion type, diagnostic - None => diagnostics - .push(DenoDiagnostic::NoAssertType.to_lsp_diagnostic(&range)), + None => diagnostics.push(DenoDiagnostic::NoAssertType), } } } else if let Ok(pkg_ref) = @@ -918,19 +912,15 @@ fn diagnose_resolution( .resolve_pkg_id_from_pkg_req(&pkg_ref.req) .is_err() { - diagnostics.push( - DenoDiagnostic::NoCacheNpm(pkg_ref, specifier.clone()) - .to_lsp_diagnostic(&range), - ); + diagnostics + .push(DenoDiagnostic::NoCacheNpm(pkg_ref, specifier.clone())); } } } else if let Some(module_name) = specifier.as_str().strip_prefix("node:") { if deno_node::resolve_builtin_node_module(module_name).is_err() { - diagnostics.push( - DenoDiagnostic::InvalidNodeSpecifier(specifier.clone()) - .to_lsp_diagnostic(&range), - ); + diagnostics + .push(DenoDiagnostic::InvalidNodeSpecifier(specifier.clone())); } else if let Some(npm_resolver) = &snapshot.maybe_npm_resolver { // check that a @types/node package exists in the resolver let types_node_ref = @@ -939,13 +929,10 @@ fn diagnose_resolution( .resolve_pkg_id_from_pkg_req(&types_node_ref.req) .is_err() { - diagnostics.push( - DenoDiagnostic::NoCacheNpm( - types_node_ref, - ModuleSpecifier::parse("npm:@types/node").unwrap(), - ) - .to_lsp_diagnostic(&range), - ); + diagnostics.push(DenoDiagnostic::NoCacheNpm( + types_node_ref, + ModuleSpecifier::parse("npm:@types/node").unwrap(), + )); } } } else { @@ -958,17 +945,21 @@ fn diagnose_resolution( "blob" => DenoDiagnostic::NoCacheBlob, _ => DenoDiagnostic::NoCache(specifier.clone()), }; - diagnostics.push(deno_diagnostic.to_lsp_diagnostic(&range)); + diagnostics.push(deno_diagnostic); } } // The specifier resolution resulted in an error, so we want to issue a // diagnostic for that. - Resolution::Err(err) => diagnostics.push( - DenoDiagnostic::ResolutionError(*err.clone()) - .to_lsp_diagnostic(&documents::to_lsp_range(err.range())), - ), + Resolution::Err(err) => { + diagnostics.push(DenoDiagnostic::ResolutionError(*err.clone())) + } _ => (), } + for range in ranges { + for diagnostic in &diagnostics { + lsp_diagnostics.push(diagnostic.to_lsp_diagnostic(&range)); + } + } } /// Generate diagnostics related to a dependency. The dependency is analyzed to @@ -1005,17 +996,43 @@ fn diagnose_dependency( diagnose_resolution( diagnostics, snapshot, - &dependency.maybe_code, - dependency.is_dynamic, - dependency.maybe_assert_type.as_deref(), - ); - diagnose_resolution( - diagnostics, - snapshot, - &dependency.maybe_type, + if dependency.maybe_code.is_none() { + &dependency.maybe_type + } else { + &dependency.maybe_code + }, dependency.is_dynamic, dependency.maybe_assert_type.as_deref(), + dependency + .imports + .iter() + .map(|i| documents::to_lsp_range(&i.range)) + .collect(), ); + // TODO(nayeemrmn): This is a crude way of detecting `@deno-types` which has + // a different specifier and therefore needs a separate call to + // `diagnose_resolution()`. It would be much cleaner if that were modelled as + // a separate dependency: https://github.com/denoland/deno_graph/issues/247. + if !dependency.maybe_type.is_none() + && !dependency + .imports + .iter() + .any(|i| dependency.maybe_type.includes(&i.range.start).is_some()) + { + let range = match &dependency.maybe_type { + Resolution::Ok(resolved) => documents::to_lsp_range(&resolved.range), + Resolution::Err(error) => documents::to_lsp_range(error.range()), + Resolution::None => unreachable!(), + }; + diagnose_resolution( + diagnostics, + snapshot, + &dependency.maybe_type, + dependency.is_dynamic, + dependency.maybe_assert_type.as_deref(), + vec![range], + ); + } } /// Generate diagnostics that come from Deno module resolution logic (like @@ -1376,4 +1393,81 @@ let c: number = "a"; }) ); } + + #[tokio::test] + async fn duplicate_diagnostics_for_duplicate_imports() { + let temp_dir = TempDir::new(); + let (snapshot, _) = setup( + &temp_dir, + &[( + "file:///a.ts", + r#" + // @deno-types="bad.d.ts" + import "bad.js"; + import "bad.js"; + "#, + 1, + LanguageId::TypeScript, + )], + None, + ); + let config = mock_config(); + let token = CancellationToken::new(); + let actual = generate_deno_diagnostics(&snapshot, &config, token).await; + assert_eq!(actual.len(), 1); + let (_, _, diagnostics) = actual.first().unwrap(); + assert_eq!( + json!(diagnostics), + json!([ + { + "range": { + "start": { + "line": 2, + "character": 15 + }, + "end": { + "line": 2, + "character": 23 + } + }, + "severity": 1, + "code": "import-prefix-missing", + "source": "deno", + "message": "Relative import path \"bad.js\" not prefixed with / or ./ or ../", + }, + { + "range": { + "start": { + "line": 3, + "character": 15 + }, + "end": { + "line": 3, + "character": 23 + } + }, + "severity": 1, + "code": "import-prefix-missing", + "source": "deno", + "message": "Relative import path \"bad.js\" not prefixed with / or ./ or ../", + }, + { + "range": { + "start": { + "line": 1, + "character": 23 + }, + "end": { + "line": 1, + "character": 33 + } + }, + "severity": 1, + "code": "import-prefix-missing", + "source": "deno", + "message": "Relative import path \"bad.d.ts\" not prefixed with / or ./ or ../", + }, + ]) + ); + } } From 5b4a9b48aeabded174cc161bfdafb6529cffbca1 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Mon, 24 Apr 2023 21:07:48 -0400 Subject: [PATCH 031/320] refactor(ext/node): enforce interior mutable for `NodePermissions` to remove clones (#18831) We can make `NodePermissions` rely on interior mutability (which the `PermissionsContainer` is already doing) in order to not have to clone everything all the time. This also reduces the chance of an accidental `borrow` while `borrrow_mut`. --- cli/lsp/documents.rs | 4 ++-- cli/module_loader.rs | 12 +++++------ cli/npm/resolvers/common.rs | 4 ++-- cli/npm/resolvers/global.rs | 2 +- cli/npm/resolvers/local.rs | 2 +- cli/npm/resolvers/mod.rs | 2 +- cli/tsc/mod.rs | 6 +++--- ext/node/lib.rs | 6 +++--- ext/node/ops/require.rs | 41 +++++++++++++++++-------------------- ext/node/package_json.rs | 2 +- ext/node/resolution.rs | 36 ++++++++++++++++---------------- runtime/build.rs | 5 +---- runtime/permissions/mod.rs | 2 +- 13 files changed, 59 insertions(+), 65 deletions(-) diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs index 31aa3ae8ef..3f77eaaa26 100644 --- a/cli/lsp/documents.rs +++ b/cli/lsp/documents.rs @@ -1074,7 +1074,7 @@ impl Documents { &specifier, referrer, NodeResolutionMode::Types, - &mut PermissionsContainer::allow_all(), + &PermissionsContainer::allow_all(), ) .ok() .flatten(), @@ -1461,7 +1461,7 @@ fn node_resolve_npm_req_ref( .resolve_npm_req_reference( &npm_req_ref, NodeResolutionMode::Types, - &mut PermissionsContainer::allow_all(), + &PermissionsContainer::allow_all(), ) .ok() .flatten(), diff --git a/cli/module_loader.rs b/cli/module_loader.rs index 5a7743ef26..0f0568272e 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -450,10 +450,10 @@ impl ModuleLoader for CliModuleLoader { referrer: &str, kind: ResolutionKind, ) -> Result { - let mut permissions = if matches!(kind, ResolutionKind::DynamicImport) { - self.dynamic_permissions.clone() + let permissions = if matches!(kind, ResolutionKind::DynamicImport) { + &self.dynamic_permissions } else { - self.root_permissions.clone() + &self.root_permissions }; // TODO(bartlomieju): ideally we shouldn't need to call `current_dir()` on each @@ -469,7 +469,7 @@ impl ModuleLoader for CliModuleLoader { specifier, referrer, NodeResolutionMode::Execution, - &mut permissions, + permissions, )) .with_context(|| { format!("Could not resolve '{specifier}' from '{referrer}'.") @@ -494,7 +494,7 @@ impl ModuleLoader for CliModuleLoader { self.node_resolver.resolve_npm_reference( &module.nv_reference, NodeResolutionMode::Execution, - &mut permissions, + permissions, ), ) .with_context(|| { @@ -556,7 +556,7 @@ impl ModuleLoader for CliModuleLoader { self.node_resolver.resolve_npm_req_reference( &reference, NodeResolutionMode::Execution, - &mut permissions, + permissions, ), ) .with_context(|| format!("Could not resolve '{reference}'.")); diff --git a/cli/npm/resolvers/common.rs b/cli/npm/resolvers/common.rs index a31459a70a..ccba00d435 100644 --- a/cli/npm/resolvers/common.rs +++ b/cli/npm/resolvers/common.rs @@ -47,7 +47,7 @@ pub trait NpmPackageFsResolver: Send + Sync { fn ensure_read_permission( &self, - permissions: &mut dyn NodePermissions, + permissions: &dyn NodePermissions, path: &Path, ) -> Result<(), AnyError>; } @@ -90,7 +90,7 @@ pub async fn cache_packages( } pub fn ensure_registry_read_permission( - permissions: &mut dyn NodePermissions, + permissions: &dyn NodePermissions, registry_path: &Path, path: &Path, ) -> Result<(), AnyError> { diff --git a/cli/npm/resolvers/global.rs b/cli/npm/resolvers/global.rs index 66935380fa..79df161875 100644 --- a/cli/npm/resolvers/global.rs +++ b/cli/npm/resolvers/global.rs @@ -126,7 +126,7 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver { fn ensure_read_permission( &self, - permissions: &mut dyn NodePermissions, + permissions: &dyn NodePermissions, path: &Path, ) -> Result<(), AnyError> { let registry_path = self.cache.registry_folder(&self.registry_url); diff --git a/cli/npm/resolvers/local.rs b/cli/npm/resolvers/local.rs index e2919f6ee8..f5385c2f10 100644 --- a/cli/npm/resolvers/local.rs +++ b/cli/npm/resolvers/local.rs @@ -206,7 +206,7 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver { fn ensure_read_permission( &self, - permissions: &mut dyn NodePermissions, + permissions: &dyn NodePermissions, path: &Path, ) -> Result<(), AnyError> { ensure_registry_read_permission( diff --git a/cli/npm/resolvers/mod.rs b/cli/npm/resolvers/mod.rs index fa83cdf59c..60402bd270 100644 --- a/cli/npm/resolvers/mod.rs +++ b/cli/npm/resolvers/mod.rs @@ -262,7 +262,7 @@ impl NpmResolver for CliNpmResolver { fn ensure_read_permission( &self, - permissions: &mut dyn NodePermissions, + permissions: &dyn NodePermissions, path: &Path, ) -> Result<(), AnyError> { self.fs_resolver.ensure_read_permission(permissions, path) diff --git a/cli/tsc/mod.rs b/cli/tsc/mod.rs index aa589a1ca9..b77f39fd52 100644 --- a/cli/tsc/mod.rs +++ b/cli/tsc/mod.rs @@ -639,7 +639,7 @@ fn resolve_graph_specifier_types( let maybe_resolution = node_resolver.resolve_npm_reference( &module.nv_reference, NodeResolutionMode::Types, - &mut PermissionsContainer::allow_all(), + &PermissionsContainer::allow_all(), )?; Ok(Some(NodeResolution::into_specifier_and_media_type( maybe_resolution, @@ -679,7 +679,7 @@ fn resolve_non_graph_specifier_types( specifier, referrer, NodeResolutionMode::Types, - &mut PermissionsContainer::allow_all(), + &PermissionsContainer::allow_all(), ) .ok() .flatten(), @@ -692,7 +692,7 @@ fn resolve_non_graph_specifier_types( let maybe_resolution = node_resolver.resolve_npm_req_reference( &npm_ref, NodeResolutionMode::Types, - &mut PermissionsContainer::allow_all(), + &PermissionsContainer::allow_all(), )?; Ok(Some(NodeResolution::into_specifier_and_media_type( maybe_resolution, diff --git a/ext/node/lib.rs b/ext/node/lib.rs index e63c73537f..84530423f3 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -44,13 +44,13 @@ pub trait NodeEnv { } pub trait NodePermissions { - fn check_read(&mut self, path: &Path) -> Result<(), AnyError>; + fn check_read(&self, path: &Path) -> Result<(), AnyError>; } pub(crate) struct AllowAllNodePermissions; impl NodePermissions for AllowAllNodePermissions { - fn check_read(&mut self, _path: &Path) -> Result<(), AnyError> { + fn check_read(&self, _path: &Path) -> Result<(), AnyError> { Ok(()) } } @@ -164,7 +164,7 @@ pub trait NpmResolver: std::fmt::Debug + Send + Sync { fn ensure_read_permission( &self, - permissions: &mut dyn NodePermissions, + permissions: &dyn NodePermissions, path: &Path, ) -> Result<(), AnyError>; } diff --git a/ext/node/ops/require.rs b/ext/node/ops/require.rs index 513b3f5899..34eac84759 100644 --- a/ext/node/ops/require.rs +++ b/ext/node/ops/require.rs @@ -32,11 +32,8 @@ fn ensure_read_permission

( where P: NodePermissions + 'static, { - let resolver = { - let resolver = state.borrow::>(); - resolver.clone() - }; - let permissions = state.borrow_mut::

(); + let resolver = state.borrow::>(); + let permissions = state.borrow::

(); resolver.ensure_read_permission(permissions, file_path) } @@ -98,7 +95,7 @@ pub fn op_require_node_module_paths( where Env: NodeEnv + 'static, { - let fs = state.borrow::>().clone(); + let fs = state.borrow::>(); // Guarantee that "from" is absolute. let from = deno_core::resolve_path( &from, @@ -267,7 +264,7 @@ where { let path = PathBuf::from(path); ensure_read_permission::(state, &path)?; - let fs = state.borrow::>().clone(); + let fs = state.borrow::>(); if let Ok(metadata) = fs.metadata(&path) { if metadata.is_file { return Ok(0); @@ -289,7 +286,7 @@ where { let path = PathBuf::from(request); ensure_read_permission::(state, &path)?; - let fs = state.borrow::>().clone(); + let fs = state.borrow::>(); let mut canonicalized_path = fs.canonicalize(&path)?; if cfg!(windows) { canonicalized_path = PathBuf::from( @@ -358,7 +355,7 @@ where if let Some(parent_id) = maybe_parent_id { if parent_id == "" || parent_id == "internal/preload" { - let fs = state.borrow::>().clone(); + let fs = state.borrow::>(); if let Ok(cwd) = fs.current_dir() { ensure_read_permission::(state, &cwd)?; return Ok(Some(cwd.to_string_lossy().to_string())); @@ -381,8 +378,8 @@ where return Ok(None); } - let node_resolver = state.borrow::>().clone(); - let permissions = state.borrow_mut::(); + let node_resolver = state.borrow::>(); + let permissions = state.borrow::(); let pkg = node_resolver .get_package_scope_config( &Url::from_file_path(parent_path.unwrap()).unwrap(), @@ -441,7 +438,7 @@ where { let file_path = PathBuf::from(file_path); ensure_read_permission::(state, &file_path)?; - let fs = state.borrow::>().clone(); + let fs = state.borrow::>(); Ok(fs.read_to_string(&file_path)?) } @@ -469,10 +466,10 @@ fn op_require_resolve_exports( where Env: NodeEnv + 'static, { - let fs = state.borrow::>().clone(); - let npm_resolver = state.borrow::>().clone(); - let node_resolver = state.borrow::>().clone(); - let permissions = state.borrow_mut::(); + let fs = state.borrow::>(); + let npm_resolver = state.borrow::>(); + let node_resolver = state.borrow::>(); + let permissions = state.borrow::(); let pkg_path = if npm_resolver .in_npm_package_at_path(&PathBuf::from(&modules_path)) @@ -524,8 +521,8 @@ where state, PathBuf::from(&filename).parent().unwrap(), )?; - let node_resolver = state.borrow::>().clone(); - let permissions = state.borrow_mut::(); + let node_resolver = state.borrow::>(); + let permissions = state.borrow::(); node_resolver.get_closest_package_json( &Url::from_file_path(filename).unwrap(), permissions, @@ -540,8 +537,8 @@ fn op_require_read_package_scope( where Env: NodeEnv + 'static, { - let node_resolver = state.borrow::>().clone(); - let permissions = state.borrow_mut::(); + let node_resolver = state.borrow::>(); + let permissions = state.borrow::(); let package_json_path = PathBuf::from(package_json_path); node_resolver .load_package_json(permissions, package_json_path) @@ -559,8 +556,8 @@ where { let parent_path = PathBuf::from(&parent_filename); ensure_read_permission::(state, &parent_path)?; - let node_resolver = state.borrow::>().clone(); - let permissions = state.borrow_mut::(); + let node_resolver = state.borrow::>(); + let permissions = state.borrow::(); let pkg = node_resolver .load_package_json(permissions, parent_path.join("package.json"))?; diff --git a/ext/node/package_json.rs b/ext/node/package_json.rs index 0e34897e3a..940e326312 100644 --- a/ext/node/package_json.rs +++ b/ext/node/package_json.rs @@ -65,7 +65,7 @@ impl PackageJson { pub fn load( fs: &dyn NodeFs, resolver: &dyn NpmResolver, - permissions: &mut dyn NodePermissions, + permissions: &dyn NodePermissions, path: PathBuf, ) -> Result { resolver.ensure_read_permission(permissions, &path)?; diff --git a/ext/node/resolution.rs b/ext/node/resolution.rs index e5db6b3ac7..0c90fffb6e 100644 --- a/ext/node/resolution.rs +++ b/ext/node/resolution.rs @@ -127,7 +127,7 @@ impl NodeResolver { specifier: &str, referrer: &ModuleSpecifier, mode: NodeResolutionMode, - permissions: &mut dyn NodePermissions, + permissions: &dyn NodePermissions, ) -> Result, AnyError> { // Note: if we are here, then the referrer is an esm module // TODO(bartlomieju): skipped "policy" part as we don't plan to support it @@ -201,7 +201,7 @@ impl NodeResolver { referrer: &ModuleSpecifier, conditions: &[&str], mode: NodeResolutionMode, - permissions: &mut dyn NodePermissions, + permissions: &dyn NodePermissions, ) -> Result, AnyError> { // note: if we're here, the referrer is an esm module let url = if should_be_treated_as_relative_or_absolute_path(specifier) { @@ -305,7 +305,7 @@ impl NodeResolver { &self, reference: &NpmPackageReqReference, mode: NodeResolutionMode, - permissions: &mut dyn NodePermissions, + permissions: &dyn NodePermissions, ) -> Result, AnyError> { let reference = self .npm_resolver @@ -317,7 +317,7 @@ impl NodeResolver { &self, reference: &NpmPackageNvReference, mode: NodeResolutionMode, - permissions: &mut dyn NodePermissions, + permissions: &dyn NodePermissions, ) -> Result, AnyError> { let package_folder = self .npm_resolver @@ -367,8 +367,8 @@ impl NodeResolver { .npm_resolver .resolve_package_folder_from_deno_module(pkg_nv)?; let package_json_path = package_folder.join("package.json"); - let package_json = self - .load_package_json(&mut AllowAllNodePermissions, package_json_path)?; + let package_json = + self.load_package_json(&AllowAllNodePermissions, package_json_path)?; Ok(match package_json.bin { Some(Value::String(_)) => vec![pkg_nv.name.to_string()], @@ -392,8 +392,8 @@ impl NodeResolver { .npm_resolver .resolve_package_folder_from_deno_module(&pkg_nv)?; let package_json_path = package_folder.join("package.json"); - let package_json = self - .load_package_json(&mut AllowAllNodePermissions, package_json_path)?; + let package_json = + self.load_package_json(&AllowAllNodePermissions, package_json_path)?; let bin = match &package_json.bin { Some(bin) => bin, None => bail!( @@ -420,7 +420,7 @@ impl NodeResolver { Ok(NodeResolution::Esm(url)) } else if url_str.ends_with(".js") || url_str.ends_with(".d.ts") { let package_config = - self.get_closest_package_json(&url, &mut AllowAllNodePermissions)?; + self.get_closest_package_json(&url, &AllowAllNodePermissions)?; if package_config.typ == "module" { Ok(NodeResolution::Esm(url)) } else { @@ -444,7 +444,7 @@ impl NodeResolver { referrer_kind: NodeModuleKind, conditions: &[&str], mode: NodeResolutionMode, - permissions: &mut dyn NodePermissions, + permissions: &dyn NodePermissions, ) -> Result, AnyError> { let package_json_path = package_dir.join("package.json"); let referrer = ModuleSpecifier::from_directory_path(package_dir).unwrap(); @@ -537,7 +537,7 @@ impl NodeResolver { referrer_kind: NodeModuleKind, conditions: &[&str], mode: NodeResolutionMode, - permissions: &mut dyn NodePermissions, + permissions: &dyn NodePermissions, ) -> Result { if name == "#" || name.starts_with("#/") || name.ends_with('/') { let reason = "is not a valid internal imports specifier name"; @@ -638,7 +638,7 @@ impl NodeResolver { internal: bool, conditions: &[&str], mode: NodeResolutionMode, - permissions: &mut dyn NodePermissions, + permissions: &dyn NodePermissions, ) -> Result { if !subpath.is_empty() && !pattern && !target.ends_with('/') { return Err(throw_invalid_package_target( @@ -747,7 +747,7 @@ impl NodeResolver { internal: bool, conditions: &[&str], mode: NodeResolutionMode, - permissions: &mut dyn NodePermissions, + permissions: &dyn NodePermissions, ) -> Result, AnyError> { if let Some(target) = target.as_str() { return self @@ -871,7 +871,7 @@ impl NodeResolver { referrer_kind: NodeModuleKind, conditions: &[&str], mode: NodeResolutionMode, - permissions: &mut dyn NodePermissions, + permissions: &dyn NodePermissions, ) -> Result { if package_exports.contains_key(&package_subpath) && package_subpath.find('*').is_none() @@ -975,7 +975,7 @@ impl NodeResolver { referrer_kind: NodeModuleKind, conditions: &[&str], mode: NodeResolutionMode, - permissions: &mut dyn NodePermissions, + permissions: &dyn NodePermissions, ) -> Result, AnyError> { let (package_name, package_subpath, _is_scoped) = parse_package_name(specifier, referrer)?; @@ -1055,7 +1055,7 @@ impl NodeResolver { pub(super) fn get_package_scope_config( &self, referrer: &ModuleSpecifier, - permissions: &mut dyn NodePermissions, + permissions: &dyn NodePermissions, ) -> Result { let root_folder = self .npm_resolver @@ -1067,7 +1067,7 @@ impl NodeResolver { pub(super) fn get_closest_package_json( &self, url: &ModuleSpecifier, - permissions: &mut dyn NodePermissions, + permissions: &dyn NodePermissions, ) -> Result { let package_json_path = self.get_closest_package_json_path(url)?; self.load_package_json(permissions, package_json_path) @@ -1099,7 +1099,7 @@ impl NodeResolver { pub(super) fn load_package_json( &self, - permissions: &mut dyn NodePermissions, + permissions: &dyn NodePermissions, package_json_path: PathBuf, ) -> Result { PackageJson::load( diff --git a/runtime/build.rs b/runtime/build.rs index 358a30dc59..2f3b125959 100644 --- a/runtime/build.rs +++ b/runtime/build.rs @@ -122,10 +122,7 @@ mod startup_snapshot { } impl deno_node::NodePermissions for Permissions { - fn check_read( - &mut self, - _p: &Path, - ) -> Result<(), deno_core::error::AnyError> { + fn check_read(&self, _p: &Path) -> Result<(), deno_core::error::AnyError> { unreachable!("snapshotting!") } } diff --git a/runtime/permissions/mod.rs b/runtime/permissions/mod.rs index b15750313e..6cce7d1e99 100644 --- a/runtime/permissions/mod.rs +++ b/runtime/permissions/mod.rs @@ -1872,7 +1872,7 @@ impl PermissionsContainer { impl deno_node::NodePermissions for PermissionsContainer { #[inline(always)] - fn check_read(&mut self, path: &Path) -> Result<(), AnyError> { + fn check_read(&self, path: &Path) -> Result<(), AnyError> { self.0.lock().read.check(path, None) } } From 63befe9377fcf688415633df76184fac5fe4ef73 Mon Sep 17 00:00:00 2001 From: Yoshiya Hinosawa Date: Tue, 25 Apr 2023 11:21:26 +0900 Subject: [PATCH 032/320] fix(ext/node): fix hash.flush (#18818) --- cli/tests/node_compat/config.jsonc | 2 + .../test/parallel/test-crypto-stream.js | 96 +++++++++++++++++++ ext/node/polyfills/internal/crypto/hash.ts | 2 +- tools/node_compat/TODO.md | 3 +- 4 files changed, 100 insertions(+), 3 deletions(-) create mode 100644 cli/tests/node_compat/test/parallel/test-crypto-stream.js diff --git a/cli/tests/node_compat/config.jsonc b/cli/tests/node_compat/config.jsonc index fd7703d6b8..81463bcaf5 100644 --- a/cli/tests/node_compat/config.jsonc +++ b/cli/tests/node_compat/config.jsonc @@ -48,6 +48,7 @@ "test-crypto-hkdf.js", "test-crypto-hmac.js", "test-crypto-prime.js", + "test-crypto-stream.js", "test-crypto-x509.js", "test-dgram-custom-lookup.js", "test-dgram-ipv6only.js", @@ -242,6 +243,7 @@ "test-crypto-hmac.js", "test-crypto-prime.js", "test-crypto-secret-keygen.js", + "test-crypto-stream.js", "test-crypto-update-encoding.js", "test-crypto-x509.js", "test-dgram-close-during-bind.js", diff --git a/cli/tests/node_compat/test/parallel/test-crypto-stream.js b/cli/tests/node_compat/test/parallel/test-crypto-stream.js new file mode 100644 index 0000000000..07d3c5c6d7 --- /dev/null +++ b/cli/tests/node_compat/test/parallel/test-crypto-stream.js @@ -0,0 +1,96 @@ +// deno-fmt-ignore-file +// deno-lint-ignore-file + +// Copyright Joyent and Node contributors. All rights reserved. MIT license. +// Taken from Node 18.12.1 +// This file is automatically generated by "node/_tools/setup.ts". Do not modify this file manually + +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; +const common = require('../common'); +if (!common.hasCrypto) + common.skip('missing crypto'); + +const assert = require('assert'); +const stream = require('stream'); +const crypto = require('crypto'); + +if (!common.hasFipsCrypto) { + // Small stream to buffer converter + class Stream2buffer extends stream.Writable { + constructor(callback) { + super(); + + this._buffers = []; + this.once('finish', function() { + callback(null, Buffer.concat(this._buffers)); + }); + } + + _write(data, encoding, done) { + this._buffers.push(data); + return done(null); + } + } + + // Create an md5 hash of "Hallo world" + const hasher1 = crypto.createHash('md5'); + hasher1.pipe(new Stream2buffer(common.mustCall(function end(err, hash) { + assert.strictEqual(err, null); + assert.strictEqual( + hash.toString('hex'), '06460dadb35d3d503047ce750ceb2d07' + ); + }))); + hasher1.end('Hallo world'); + + // Simpler check for unpipe, setEncoding, pause and resume + crypto.createHash('md5').unpipe({}); + crypto.createHash('md5').setEncoding('utf8'); + crypto.createHash('md5').pause(); + crypto.createHash('md5').resume(); +} + +// Decipher._flush() should emit an error event, not an exception. +const key = Buffer.from('48fb56eb10ffeb13fc0ef551bbca3b1b', 'hex'); +const badkey = Buffer.from('12341234123412341234123412341234', 'hex'); +const iv = Buffer.from('6d358219d1f488f5f4eb12820a66d146', 'hex'); +const cipher = crypto.createCipheriv('aes-128-cbc', key, iv); +const decipher = crypto.createDecipheriv('aes-128-cbc', badkey, iv); + +// TODO(kt3k): Align error message of decipher with wrong padding and +// enable the below test case. +/* +cipher.pipe(decipher) + .on('error', common.expectsError(common.hasOpenSSL3 ? { + message: /bad decrypt/, + library: 'Provider routines', + reason: 'bad decrypt', + } : { + message: /bad decrypt/, + function: 'EVP_DecryptFinal_ex', + library: 'digital envelope routines', + reason: 'bad decrypt', + })); +*/ + +cipher.end('Papaya!'); // Should not cause an unhandled exception. diff --git a/ext/node/polyfills/internal/crypto/hash.ts b/ext/node/polyfills/internal/crypto/hash.ts index 00dfa19aff..34e3c1230e 100644 --- a/ext/node/polyfills/internal/crypto/hash.ts +++ b/ext/node/polyfills/internal/crypto/hash.ts @@ -66,7 +66,7 @@ export class Hash extends Transform { callback(); }, flush(callback: () => void) { - this.push(context.digest(undefined)); + this.push(this.digest(undefined)); callback(); }, }); diff --git a/tools/node_compat/TODO.md b/tools/node_compat/TODO.md index ae29d72e59..cca14b4997 100644 --- a/tools/node_compat/TODO.md +++ b/tools/node_compat/TODO.md @@ -3,7 +3,7 @@ NOTE: This file should not be manually edited. Please edit 'cli/tests/node_compat/config.json' and run 'tools/node_compat/setup.ts' instead. -Total: 2924 +Total: 2923 - [abort/test-abort-backtrace.js](https://github.com/nodejs/node/tree/v18.12.1/test/abort/test-abort-backtrace.js) - [abort/test-abort-fatal-error.js](https://github.com/nodejs/node/tree/v18.12.1/test/abort/test-abort-fatal-error.js) @@ -504,7 +504,6 @@ Total: 2924 - [parallel/test-crypto-scrypt.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-scrypt.js) - [parallel/test-crypto-secure-heap.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-secure-heap.js) - [parallel/test-crypto-sign-verify.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-sign-verify.js) -- [parallel/test-crypto-stream.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-stream.js) - [parallel/test-crypto-subtle-zero-length.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-subtle-zero-length.js) - [parallel/test-crypto-verify-failure.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-verify-failure.js) - [parallel/test-crypto-webcrypto-aes-decrypt-tag-too-small.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-webcrypto-aes-decrypt-tag-too-small.js) From 21c888d4dbe2175333cc9d58b227661e2d0185d8 Mon Sep 17 00:00:00 2001 From: Matt Mastracci Date: Tue, 25 Apr 2023 12:41:01 +0200 Subject: [PATCH 033/320] refactor(ext/http): comments for h2c code (#18833) --- ext/http/network_buffered_stream.rs | 30 ++++++++++++++++++++++++++--- 1 file changed, 27 insertions(+), 3 deletions(-) diff --git a/ext/http/network_buffered_stream.rs b/ext/http/network_buffered_stream.rs index e4b2ee895d..bb128ba045 100644 --- a/ext/http/network_buffered_stream.rs +++ b/ext/http/network_buffered_stream.rs @@ -13,10 +13,19 @@ use tokio::io::ReadBuf; const MAX_PREFIX_SIZE: usize = 256; +/// [`NetworkStreamPrefixCheck`] is used to differentiate a stream between two different modes, depending +/// on whether the first bytes match a given prefix (or not). +/// +/// IMPORTANT: This stream makes the assumption that the incoming bytes will never partially match the prefix +/// and then "hang" waiting for a write. For this code not to hang, the incoming stream must: +/// +/// * match the prefix fully and then request writes at a later time +/// * not match the prefix, and then request writes after writing a byte that causes the prefix not to match +/// * not match the prefix and then close pub struct NetworkStreamPrefixCheck { + buffer: [MaybeUninit; MAX_PREFIX_SIZE * 2], io: S, prefix: &'static [u8], - buffer: [MaybeUninit; MAX_PREFIX_SIZE * 2], } impl NetworkStreamPrefixCheck { @@ -29,7 +38,7 @@ impl NetworkStreamPrefixCheck { } } - // Returns a [`NetworkBufferedStream`], rewound with the bytes we read to determine what + // Returns a [`NetworkBufferedStream`] and a flag determining if we matched a prefix, rewound with the bytes we read to determine what // type of stream this is. pub async fn match_prefix( self, @@ -95,15 +104,30 @@ impl NetworkStreamPrefixCheck { } } +/// [`NetworkBufferedStream`] is a stream that allows us to efficiently search for an incoming prefix in another stream without +/// reading too much data. If the stream detects that the prefix has definitely been matched, or definitely not been matched, +/// it returns a flag and a rewound stream allowing later code to take another pass at that data. +/// +/// [`NetworkBufferedStream`] is a custom wrapper around an asynchronous stream that implements AsyncRead +/// and AsyncWrite. It is designed to provide additional buffering functionality to the wrapped stream. +/// The primary use case for this struct is when you want to read a small amount of data from the beginning +/// of a stream, process it, and then continue reading the rest of the stream. +/// +/// While the bounds for the class are limited to [`AsyncRead`] for easier testing, it is far more useful to use +/// with interactive duplex streams that have a prefix determining which mode to operate in. For example, this class +/// can determine whether an incoming stream is HTTP/2 or non-HTTP/2 and allow downstream code to make that determination. pub struct NetworkBufferedStream { + prefix: [MaybeUninit; MAX_PREFIX_SIZE * 2], io: S, initialized_len: usize, prefix_offset: usize, - prefix: [MaybeUninit; MAX_PREFIX_SIZE * 2], + /// Have the prefix bytes been completely read out? prefix_read: bool, } impl NetworkBufferedStream { + /// This constructor is private, because passing partically initialized data between the [`NetworkStreamPrefixCheck`] and + /// this [`NetworkBufferedStream`] is challenging without the introduction of extra copies. fn new( io: S, prefix: [MaybeUninit; MAX_PREFIX_SIZE * 2], From 531754c35497568aa2f19179344eb9e205c9a4b3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Tue, 25 Apr 2023 13:53:06 +0200 Subject: [PATCH 034/320] refactor(ext/websocket): use specialized ops (#18819) Instead of relying on `op_ws_send` to send different kinds of messages, use specialized ops everywhere. --- cli/js/40_testing.js | 5 ++++- ext/websocket/01_websocket.js | 8 +------ ext/websocket/02_websocketstream.js | 14 +++--------- ext/websocket/lib.rs | 33 ++++++++++++++++------------- 4 files changed, 26 insertions(+), 34 deletions(-) diff --git a/cli/js/40_testing.js b/cli/js/40_testing.js index a0dcaf4991..1464483563 100644 --- a/cli/js/40_testing.js +++ b/cli/js/40_testing.js @@ -128,7 +128,10 @@ const OP_DETAILS = { "op_ws_close": ["close a WebSocket", "awaiting until the `close` event is emitted on a `WebSocket`, or the `WebSocketStream#closed` promise resolves"], "op_ws_create": ["create a WebSocket", "awaiting until the `open` event is emitted on a `WebSocket`, or the result of a `WebSocketStream#connection` promise"], "op_ws_next_event": ["receive the next message on a WebSocket", "closing a `WebSocket` or `WebSocketStream`"], - "op_ws_send": ["send a message on a WebSocket", "closing a `WebSocket` or `WebSocketStream`"], + "op_ws_send_text": ["send a message on a WebSocket", "closing a `WebSocket` or `WebSocketStream`"], + "op_ws_send_binary": ["send a message on a WebSocket", "closing a `WebSocket` or `WebSocketStream`"], + "op_ws_send_ping": ["send a message on a WebSocket", "closing a `WebSocket` or `WebSocketStream`"], + "op_ws_send_pong": ["send a message on a WebSocket", "closing a `WebSocket` or `WebSocketStream`"], }; // Wrap test function in additional assertion that makes sure diff --git a/ext/websocket/01_websocket.js b/ext/websocket/01_websocket.js index 60378b6758..1b7a45ce0b 100644 --- a/ext/websocket/01_websocket.js +++ b/ext/websocket/01_websocket.js @@ -534,13 +534,7 @@ class WebSocket extends EventTarget { clearTimeout(this[_idleTimeoutTimeout]); this[_idleTimeoutTimeout] = setTimeout(async () => { if (this[_readyState] === OPEN) { - await core.opAsync( - "op_ws_send", - this[_rid], - { - kind: "ping", - }, - ); + await core.opAsync("op_ws_send_ping", this[_rid]); this[_idleTimeoutTimeout] = setTimeout(async () => { if (this[_readyState] === OPEN) { this[_readyState] = CLOSING; diff --git a/ext/websocket/02_websocketstream.js b/ext/websocket/02_websocketstream.js index 0d01e62eea..f545d7a99b 100644 --- a/ext/websocket/02_websocketstream.js +++ b/ext/websocket/02_websocketstream.js @@ -207,17 +207,11 @@ class WebSocketStream { const writable = new WritableStream({ write: async (chunk) => { if (typeof chunk === "string") { - await core.opAsync("op_ws_send", this[_rid], { - kind: "text", - value: chunk, - }); + await core.opAsync2("op_ws_send_text", this[_rid], chunk); } else if ( ObjectPrototypeIsPrototypeOf(Uint8ArrayPrototype, chunk) ) { - await core.opAsync("op_ws_send", this[_rid], { - kind: "binary", - value: chunk, - }, chunk); + await core.opAsync2("op_ws_send_binary", this[_rid], chunk); } else { throw new TypeError( "A chunk may only be either a string or an Uint8Array", @@ -265,9 +259,7 @@ class WebSocketStream { } case 3: { /* ping */ - await core.opAsync("op_ws_send", this[_rid], { - kind: "pong", - }); + await core.opAsync("op_ws_send_pong", this[_rid]); await pull(controller); break; } diff --git a/ext/websocket/lib.rs b/ext/websocket/lib.rs index 07cddc85bb..74898a471f 100644 --- a/ext/websocket/lib.rs +++ b/ext/websocket/lib.rs @@ -406,27 +406,29 @@ pub async fn op_ws_send_text( } #[op] -pub async fn op_ws_send( +pub async fn op_ws_send_ping( state: Rc>, rid: ResourceId, - value: SendValue, ) -> Result<(), AnyError> { - let msg = match value { - SendValue::Text(text) => { - Frame::new(true, OpCode::Text, None, text.into_bytes()) - } - SendValue::Binary(buf) => { - Frame::new(true, OpCode::Binary, None, buf.to_vec()) - } - SendValue::Pong => Frame::new(true, OpCode::Pong, None, vec![]), - SendValue::Ping => Frame::new(true, OpCode::Ping, None, vec![]), - }; - let resource = state .borrow_mut() .resource_table .get::(rid)?; - resource.write_frame(msg).await + resource + .write_frame(Frame::new(true, OpCode::Ping, None, vec![])) + .await +} + +#[op] +pub async fn op_ws_send_pong( + state: Rc>, + rid: ResourceId, +) -> Result<(), AnyError> { + let resource = state + .borrow_mut() + .resource_table + .get::(rid)?; + resource.write_frame(Frame::pong(vec![])).await } #[op(deferred)] @@ -521,11 +523,12 @@ deno_core::extension!(deno_websocket, ops = [ op_ws_check_permission_and_cancel_handle

, op_ws_create

, - op_ws_send, op_ws_close, op_ws_next_event, op_ws_send_binary, op_ws_send_text, + op_ws_send_ping, + op_ws_send_pong, op_ws_server_create, ], esm = [ "01_websocket.js", "02_websocketstream.js" ], From 97820fe8abb15baabaf6b6eed632514867c7d97d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Tue, 25 Apr 2023 22:43:39 +0200 Subject: [PATCH 035/320] refactor(ext/kv): don't use bigint literals (#18841) This causes `DCHECK` fail in V8 when pointer compression is disabled. --- ext/kv/01_db.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ext/kv/01_db.ts b/ext/kv/01_db.ts index 16099c2251..1a7b27dac7 100644 --- a/ext/kv/01_db.ts +++ b/ext/kv/01_db.ts @@ -278,8 +278,8 @@ class AtomicOperation { } } -const MIN_U64 = 0n; -const MAX_U64 = 0xffffffffffffffffn; +const MIN_U64 = BigInt("0"); +const MAX_U64 = BigInt("0xffffffffffffffff"); class KvU64 { readonly value: bigint; From 9b49de46446f3acb3081bfa809652a8a66d54bfb Mon Sep 17 00:00:00 2001 From: Kenta Moriuchi Date: Wed, 26 Apr 2023 07:36:22 +0900 Subject: [PATCH 036/320] fix(core): Wrap safe collections' argument of primordials (#18750) --- cli/tests/unit/websocket_test.ts | 22 ++++++++++++++++++++++ core/00_primordials.js | 24 ++++++++++++++++++++---- ext/console/02_console.js | 4 ++-- 3 files changed, 44 insertions(+), 6 deletions(-) diff --git a/cli/tests/unit/websocket_test.ts b/cli/tests/unit/websocket_test.ts index 999eede414..795d5ebc18 100644 --- a/cli/tests/unit/websocket_test.ts +++ b/cli/tests/unit/websocket_test.ts @@ -147,3 +147,25 @@ Deno.test({ }; await Promise.all([promise, server]); }); + +Deno.test( + { sanitizeOps: false }, + function websocketConstructorWithPrototypePollusion() { + const originalSymbolIterator = Array.prototype[Symbol.iterator]; + try { + Array.prototype[Symbol.iterator] = () => { + throw Error("unreachable"); + }; + assertThrows(() => { + new WebSocket( + new URL("ws://localhost:4242/"), + // Allow `Symbol.iterator` to be called in WebIDL conversion to `sequence` + // deno-lint-ignore no-explicit-any + ["soap", "soap"].values() as any, + ); + }, DOMException); + } finally { + Array.prototype[Symbol.iterator] = originalSymbolIterator; + } + }, +); diff --git a/core/00_primordials.js b/core/00_primordials.js index f49a11de4b..60474e649d 100644 --- a/core/00_primordials.js +++ b/core/00_primordials.js @@ -405,7 +405,11 @@ Map, class SafeMap extends Map { constructor(i) { - super(i); + if (i == null) { + super(); + return; + } + super(new SafeArrayIterator(i)); } }, ); @@ -413,7 +417,11 @@ WeakMap, class SafeWeakMap extends WeakMap { constructor(i) { - super(i); + if (i == null) { + super(); + return; + } + super(new SafeArrayIterator(i)); } }, ); @@ -422,7 +430,11 @@ Set, class SafeSet extends Set { constructor(i) { - super(i); + if (i == null) { + super(); + return; + } + super(new SafeArrayIterator(i)); } }, ); @@ -430,7 +442,11 @@ WeakSet, class SafeWeakSet extends WeakSet { constructor(i) { - super(i); + if (i == null) { + super(); + return; + } + super(new SafeArrayIterator(i)); } }, ); diff --git a/ext/console/02_console.js b/ext/console/02_console.js index 3e55efb749..5873a2ec2e 100644 --- a/ext/console/02_console.js +++ b/ext/console/02_console.js @@ -56,7 +56,7 @@ const { SafeArrayIterator, SafeMap, SafeStringIterator, - SafeSet, + SafeSetIterator, SafeRegExp, SetPrototype, SetPrototypeEntries, @@ -2158,7 +2158,7 @@ class Console { const indexKey = isSet || isMap ? "(iter idx)" : "(idx)"; if (isSet) { - resultData = [...new SafeSet(data)]; + resultData = [...new SafeSetIterator(data)]; } else if (isMap) { let idx = 0; resultData = {}; From 041d1e093bcac9a834afa1a2a036d07aef0ad1b2 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Tue, 25 Apr 2023 18:36:31 -0400 Subject: [PATCH 037/320] refactor(cli): extract out NpmModuleLoader from CliModuleLoader (#18842) Need to share this with the loader used in deno compile --- cli/module_loader.rs | 271 ++++++++++++++++++++++++++++--------------- ext/node/analyze.rs | 4 +- 2 files changed, 178 insertions(+), 97 deletions(-) diff --git a/cli/module_loader.rs b/cli/module_loader.rs index 0f0568272e..9798983748 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -222,7 +222,7 @@ impl ModuleLoadPreparer { } } -struct ModuleCodeSource { +pub struct ModuleCodeSource { pub code: ModuleCode, pub found_url: ModuleSpecifier, pub media_type: MediaType, @@ -238,14 +238,12 @@ pub struct CliModuleLoader { /// "root permissions" for Web Worker. dynamic_permissions: PermissionsContainer, cli_options: Arc, - cjs_resolutions: Arc, emitter: Arc, graph_container: Arc, module_load_preparer: Arc, - node_code_translator: Arc, - node_resolver: Arc, parsed_source_cache: Arc, resolver: Arc, + npm_module_loader: NpmModuleLoader, } impl CliModuleLoader { @@ -259,14 +257,16 @@ impl CliModuleLoader { root_permissions, dynamic_permissions, cli_options: ps.options.clone(), - cjs_resolutions: ps.cjs_resolutions.clone(), emitter: ps.emitter.clone(), graph_container: ps.graph_container.clone(), module_load_preparer: ps.module_load_preparer.clone(), - node_code_translator: ps.node_code_translator.clone(), - node_resolver: ps.node_resolver.clone(), parsed_source_cache: ps.parsed_source_cache.clone(), resolver: ps.resolver.clone(), + npm_module_loader: NpmModuleLoader::new( + ps.cjs_resolutions.clone(), + ps.node_code_translator.clone(), + ps.node_resolver.clone(), + ), }) } @@ -280,14 +280,16 @@ impl CliModuleLoader { root_permissions, dynamic_permissions, cli_options: ps.options.clone(), - cjs_resolutions: ps.cjs_resolutions.clone(), emitter: ps.emitter.clone(), graph_container: ps.graph_container.clone(), module_load_preparer: ps.module_load_preparer.clone(), - node_code_translator: ps.node_code_translator.clone(), - node_resolver: ps.node_resolver.clone(), parsed_source_cache: ps.parsed_source_cache.clone(), resolver: ps.resolver.clone(), + npm_module_loader: NpmModuleLoader::new( + ps.cjs_resolutions.clone(), + ps.node_code_translator.clone(), + ps.node_resolver.clone(), + ), }) } @@ -367,41 +369,16 @@ impl CliModuleLoader { maybe_referrer: Option<&ModuleSpecifier>, is_dynamic: bool, ) -> Result { - let code_source = if self.node_resolver.in_npm_package(specifier) { - let file_path = specifier.to_file_path().unwrap(); - let code = std::fs::read_to_string(&file_path).with_context(|| { - let mut msg = "Unable to load ".to_string(); - msg.push_str(&file_path.to_string_lossy()); - if let Some(referrer) = &maybe_referrer { - msg.push_str(" imported from "); - msg.push_str(referrer.as_str()); - } - msg - })?; - - let code = if self.cjs_resolutions.contains(specifier) { - let mut permissions = if is_dynamic { - self.dynamic_permissions.clone() - } else { - self.root_permissions.clone() - }; - // translate cjs to esm if it's cjs and inject node globals - self.node_code_translator.translate_cjs_to_esm( - specifier, - &code, - &mut permissions, - )? - } else { - // only inject node globals for esm - self - .node_code_translator - .esm_code_with_node_globals(specifier, &code)? - }; - ModuleCodeSource { - code: code.into(), - found_url: specifier.clone(), - media_type: MediaType::from_specifier(specifier), - } + let permissions = if is_dynamic { + &self.dynamic_permissions + } else { + &self.root_permissions + }; + let code_source = if let Some(code_source) = self + .npm_module_loader + .load_sync(specifier, maybe_referrer, permissions)? + { + code_source } else { self.load_prepared_module(specifier, maybe_referrer)? }; @@ -424,23 +401,6 @@ impl CliModuleLoader { &code_source.found_url, )) } - - fn handle_node_resolve_result( - &self, - result: Result, AnyError>, - ) -> Result { - let response = match result? { - Some(response) => response, - None => return Err(generic_error("not found")), - }; - if let NodeResolution::CommonJs(specifier) = &response { - // remember that this was a common js resolution - self.cjs_resolutions.insert(specifier.clone()); - } else if let NodeResolution::BuiltIn(specifier) = &response { - return deno_node::resolve_builtin_node_module(specifier); - } - Ok(response.into_url()) - } } impl ModuleLoader for CliModuleLoader { @@ -462,18 +422,12 @@ impl ModuleLoader for CliModuleLoader { let referrer_result = deno_core::resolve_url_or_path(referrer, &cwd); if let Ok(referrer) = referrer_result.as_ref() { - if self.node_resolver.in_npm_package(referrer) { - // we're in an npm package, so use node resolution - return self - .handle_node_resolve_result(self.node_resolver.resolve( - specifier, - referrer, - NodeResolutionMode::Execution, - permissions, - )) - .with_context(|| { - format!("Could not resolve '{specifier}' from '{referrer}'.") - }); + if let Some(result) = self.npm_module_loader.resolve_if_in_npm_package( + specifier, + referrer, + permissions, + ) { + return result; } let graph = self.graph_container.graph(); @@ -490,16 +444,8 @@ impl ModuleLoader for CliModuleLoader { return match graph.get(specifier) { Some(Module::Npm(module)) => self - .handle_node_resolve_result( - self.node_resolver.resolve_npm_reference( - &module.nv_reference, - NodeResolutionMode::Execution, - permissions, - ), - ) - .with_context(|| { - format!("Could not resolve '{}'.", module.nv_reference) - }), + .npm_module_loader + .resolve_npm_module(module, permissions), Some(Module::Node(module)) => { deno_node::resolve_builtin_node_module(&module.module_name) } @@ -552,14 +498,8 @@ impl ModuleLoader for CliModuleLoader { NpmPackageReqReference::from_specifier(&specifier) { return self - .handle_node_resolve_result( - self.node_resolver.resolve_npm_req_reference( - &reference, - NodeResolutionMode::Execution, - permissions, - ), - ) - .with_context(|| format!("Could not resolve '{reference}'.")); + .npm_module_loader + .resolve_for_repl(&reference, permissions); } } } @@ -590,9 +530,8 @@ impl ModuleLoader for CliModuleLoader { _maybe_referrer: Option, is_dynamic: bool, ) -> Pin>>> { - if self.node_resolver.in_npm_package(specifier) { - // nothing to prepare - return Box::pin(deno_core::futures::future::ready(Ok(()))); + if let Some(result) = self.npm_module_loader.maybe_prepare_load(specifier) { + return Box::pin(deno_core::futures::future::ready(result)); } let specifier = specifier.clone(); @@ -658,3 +597,145 @@ impl SourceMapGetter for CliModuleLoader { } } } + +pub struct NpmModuleLoader { + cjs_resolutions: Arc, + node_code_translator: Arc, + node_resolver: Arc, +} + +impl NpmModuleLoader { + pub fn new( + cjs_resolutions: Arc, + node_code_translator: Arc, + node_resolver: Arc, + ) -> Self { + Self { + cjs_resolutions, + node_code_translator, + node_resolver, + } + } + + pub fn resolve_if_in_npm_package( + &self, + specifier: &str, + referrer: &ModuleSpecifier, + permissions: &PermissionsContainer, + ) -> Option> { + if self.node_resolver.in_npm_package(referrer) { + // we're in an npm package, so use node resolution + Some( + self + .handle_node_resolve_result(self.node_resolver.resolve( + specifier, + referrer, + NodeResolutionMode::Execution, + permissions, + )) + .with_context(|| { + format!("Could not resolve '{specifier}' from '{referrer}'.") + }), + ) + } else { + None + } + } + + pub fn resolve_npm_module( + &self, + module: &deno_graph::NpmModule, + permissions: &PermissionsContainer, + ) -> Result { + self + .handle_node_resolve_result(self.node_resolver.resolve_npm_reference( + &module.nv_reference, + NodeResolutionMode::Execution, + permissions, + )) + .with_context(|| format!("Could not resolve '{}'.", module.nv_reference)) + } + + pub fn resolve_for_repl( + &self, + reference: &NpmPackageReqReference, + permissions: &PermissionsContainer, + ) -> Result { + self + .handle_node_resolve_result(self.node_resolver.resolve_npm_req_reference( + reference, + NodeResolutionMode::Execution, + permissions, + )) + .with_context(|| format!("Could not resolve '{reference}'.")) + } + + pub fn maybe_prepare_load( + &self, + specifier: &ModuleSpecifier, + ) -> Option> { + if self.node_resolver.in_npm_package(specifier) { + // nothing to prepare + Some(Ok(())) + } else { + None + } + } + + pub fn load_sync( + &self, + specifier: &ModuleSpecifier, + maybe_referrer: Option<&ModuleSpecifier>, + permissions: &PermissionsContainer, + ) -> Result, AnyError> { + if !self.node_resolver.in_npm_package(specifier) { + return Ok(None); + } + let file_path = specifier.to_file_path().unwrap(); + let code = std::fs::read_to_string(&file_path).with_context(|| { + let mut msg = "Unable to load ".to_string(); + msg.push_str(&file_path.to_string_lossy()); + if let Some(referrer) = &maybe_referrer { + msg.push_str(" imported from "); + msg.push_str(referrer.as_str()); + } + msg + })?; + + let code = if self.cjs_resolutions.contains(specifier) { + // translate cjs to esm if it's cjs and inject node globals + self.node_code_translator.translate_cjs_to_esm( + specifier, + &code, + permissions, + )? + } else { + // only inject node globals for esm + self + .node_code_translator + .esm_code_with_node_globals(specifier, &code)? + }; + Ok(Some(ModuleCodeSource { + code: code.into(), + found_url: specifier.clone(), + media_type: MediaType::from_specifier(specifier), + })) + } + + fn handle_node_resolve_result( + &self, + result: Result, AnyError>, + ) -> Result { + let response = match result? { + Some(response) => response, + None => return Err(generic_error("not found")), + }; + if let NodeResolution::CommonJs(specifier) = &response { + // remember that this was a common js resolution + self.cjs_resolutions.insert(specifier.clone()); + } else if let NodeResolution::BuiltIn(specifier) = &response { + return deno_node::resolve_builtin_node_module(specifier); + } + Ok(response.into_url()) + } +} diff --git a/ext/node/analyze.rs b/ext/node/analyze.rs index f1af2f6110..c7181c4aca 100644 --- a/ext/node/analyze.rs +++ b/ext/node/analyze.rs @@ -113,7 +113,7 @@ impl &self, specifier: &ModuleSpecifier, source: &str, - permissions: &mut dyn NodePermissions, + permissions: &dyn NodePermissions, ) -> Result { let mut temp_var_count = 0; let mut handled_reexports: HashSet = HashSet::default(); @@ -220,7 +220,7 @@ impl referrer: &ModuleSpecifier, conditions: &[&str], mode: NodeResolutionMode, - permissions: &mut dyn NodePermissions, + permissions: &dyn NodePermissions, ) -> Result { if specifier.starts_with('/') { todo!(); From 78b630d92a1a2dbec6054e592d978a89d09af7fa Mon Sep 17 00:00:00 2001 From: Leo Kettmeir Date: Wed, 26 Apr 2023 05:38:49 +0200 Subject: [PATCH 038/320] ci: switch release doc to mention dotcom (#18845) --- tools/release/release_doc_template.md | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/tools/release/release_doc_template.md b/tools/release/release_doc_template.md index fe36d16777..c0eb9e2140 100644 --- a/tools/release/release_doc_template.md +++ b/tools/release/release_doc_template.md @@ -5,7 +5,7 @@ - Forks and local clones of [`denoland/deno`](https://github.com/denoland/deno/), [`denoland/deno_std`](https://github.com/denoland/deno_std/), - [`denoland/dotland`](https://github.com/denoland/dotland/), + [`denoland/dotcom`](https://github.com/denoland/dotcom/), [`denoland/deno_docker`](https://github.com/denoland/deno_docker/) [`denoland/manual`](https://github.com/denoland/manual/) @@ -163,15 +163,14 @@ verify on GitHub that everything looks correct. - [ ] Publish the release on Github - [ ] Run the - https://github.com/denoland/dotland/actions/workflows/update_versions.yml + https://github.com/denoland/dotcom/actions/workflows/update_versions.yml workflow. - [ ] This should open a PR. Review and merge it.

Failure Steps - 1. Update https://github.com/denoland/dotland/blob/main/versions.json - manually. + 1. Update https://github.com/denoland/dotcom/blob/main/versions.json manually. 2. Open a PR and merge.
From 106e4c475febe3e08fa7fc632e044fe75a792cc3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Wed, 26 Apr 2023 10:02:03 +0200 Subject: [PATCH 039/320] chore: upgrade rusty_v8 to 0.70.0 (#18844) Closes https://github.com/denoland/deno/issues/18369 --- .github/workflows/ci.generate.ts | 4 ++-- .github/workflows/ci.yml | 6 +++--- Cargo.lock | 4 ++-- Cargo.toml | 2 +- cli/tests/testdata/run/heapstats.js.out | 4 ++-- core/runtime.rs | 4 ++-- 6 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/workflows/ci.generate.ts b/.github/workflows/ci.generate.ts index b59659e624..41abf17370 100755 --- a/.github/workflows/ci.generate.ts +++ b/.github/workflows/ci.generate.ts @@ -17,7 +17,7 @@ const Runners = (() => { })(); // bump the number at the start when you want to purge the cache const prCacheKeyPrefix = - "21-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-"; + "22-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-"; const installPkgsCommand = "sudo apt-get install --no-install-recommends debootstrap clang-15 lld-15"; @@ -476,7 +476,7 @@ const ci = { "~/.cargo/git/db", ].join("\n"), key: - "21-cargo-home-${{ matrix.os }}-${{ hashFiles('Cargo.lock') }}", + "22-cargo-home-${{ matrix.os }}-${{ hashFiles('Cargo.lock') }}", }, }, { diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 501feea016..c70590d61b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -290,7 +290,7 @@ jobs: ~/.cargo/registry/index ~/.cargo/registry/cache ~/.cargo/git/db - key: '21-cargo-home-${{ matrix.os }}-${{ hashFiles(''Cargo.lock'') }}' + key: '22-cargo-home-${{ matrix.os }}-${{ hashFiles(''Cargo.lock'') }}' if: '!(github.event_name == ''pull_request'' && matrix.skip_pr)' - name: Restore cache build output (PR) uses: actions/cache/restore@v3 @@ -302,7 +302,7 @@ jobs: !./target/*/*.zip !./target/*/*.tar.gz key: never_saved - restore-keys: '21-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-' + restore-keys: '22-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-' - name: Apply and update mtime cache if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (!startsWith(github.ref, ''refs/tags/''))' uses: ./.github/mtime_cache @@ -578,7 +578,7 @@ jobs: !./target/*/gn_out !./target/*/*.zip !./target/*/*.tar.gz - key: '21-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' + key: '22-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' publish-canary: name: publish canary runs-on: ubuntu-22.04 diff --git a/Cargo.lock b/Cargo.lock index e2480d54c7..d515dbd72d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5511,9 +5511,9 @@ dependencies = [ [[package]] name = "v8" -version = "0.69.0" +version = "0.70.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "687e14c2535fe5749098994fd67773962050abe64bcc6a8c92dbf7221b746f49" +checksum = "ab13e022340b67561836bbb90ceeebbfca7e35fbc05471ceff5ce099e5a754a3" dependencies = [ "bitflags 1.3.2", "fslock", diff --git a/Cargo.toml b/Cargo.toml index 0602f2a5f4..3b0a0abf2a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -41,7 +41,7 @@ license = "MIT" repository = "https://github.com/denoland/deno" [workspace.dependencies] -v8 = { version = "0.69.0", default-features = false } +v8 = { version = "0.70.0", default-features = false } deno_ast = { version = "0.26.0", features = ["transpiling"] } deno_core = { version = "0.181.0", path = "./core" } diff --git a/cli/tests/testdata/run/heapstats.js.out b/cli/tests/testdata/run/heapstats.js.out index b75a755f8e..9542663331 100644 --- a/cli/tests/testdata/run/heapstats.js.out +++ b/cli/tests/testdata/run/heapstats.js.out @@ -1,2 +1,2 @@ -Allocated: 4MB -Freed: -4MB +Allocated: 8MB +Freed: -8MB diff --git a/core/runtime.rs b/core/runtime.rs index 923caaea9a..bae6a40db7 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -3165,7 +3165,7 @@ pub mod tests { #[test] fn test_heap_limits() { let create_params = - v8::Isolate::create_params().heap_limits(0, 3 * 1024 * 1024); + v8::Isolate::create_params().heap_limits(0, 5 * 1024 * 1024); let mut runtime = JsRuntime::new(RuntimeOptions { create_params: Some(create_params), ..Default::default() @@ -3209,7 +3209,7 @@ pub mod tests { #[test] fn test_heap_limit_cb_multiple() { let create_params = - v8::Isolate::create_params().heap_limits(0, 3 * 1024 * 1024); + v8::Isolate::create_params().heap_limits(0, 5 * 1024 * 1024); let mut runtime = JsRuntime::new(RuntimeOptions { create_params: Some(create_params), ..Default::default() From 9d7e3f84c852a538937399e542fe4fecb9d52f08 Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Wed, 26 Apr 2023 15:37:38 +0530 Subject: [PATCH 040/320] chore(ext/websocket): remove ping frame handling (#18847) Automatically done in the fastwebsockets crate --- ext/websocket/01_websocket.js | 6 +- ext/websocket/02_websocketstream.js | 20 ++--- ext/websocket/lib.rs | 119 +++++++++++----------------- 3 files changed, 53 insertions(+), 92 deletions(-) diff --git a/ext/websocket/01_websocket.js b/ext/websocket/01_websocket.js index 1b7a45ce0b..cb9f756d21 100644 --- a/ext/websocket/01_websocket.js +++ b/ext/websocket/01_websocket.js @@ -478,7 +478,7 @@ class WebSocket extends EventTarget { this[_serverHandleIdleTimeout](); break; } - case 5: { + case 3: { /* error */ this[_readyState] = CLOSED; @@ -492,10 +492,6 @@ class WebSocket extends EventTarget { core.tryClose(this[_rid]); break; } - case 3: { - /* ping */ - break; - } default: { /* close */ const code = kind; diff --git a/ext/websocket/02_websocketstream.js b/ext/websocket/02_websocketstream.js index f545d7a99b..8e7100cdb3 100644 --- a/ext/websocket/02_websocketstream.js +++ b/ext/websocket/02_websocketstream.js @@ -236,7 +236,7 @@ class WebSocketStream { }, }); const pull = async (controller) => { - const { 0: kind, 1: value } = await core.opAsync( + const { 0: kind, 1: value } = await core.opAsync2( "op_ws_next_event", this[_rid], ); @@ -249,7 +249,11 @@ class WebSocketStream { controller.enqueue(value); break; } - case 5: { + case 2: { + /* pong */ + break; + } + case 3: { /* error */ const err = new Error(value); this[_closed].reject(err); @@ -257,17 +261,7 @@ class WebSocketStream { core.tryClose(this[_rid]); break; } - case 3: { - /* ping */ - await core.opAsync("op_ws_send_pong", this[_rid]); - await pull(controller); - break; - } - case 2: { - /* pong */ - break; - } - case 6: { + case 4: { /* closed */ this[_closed].resolve(undefined); core.tryClose(this[_rid]); diff --git a/ext/websocket/lib.rs b/ext/websocket/lib.rs index 74898a471f..df4127d273 100644 --- a/ext/websocket/lib.rs +++ b/ext/websocket/lib.rs @@ -27,7 +27,6 @@ use http::Method; use http::Request; use http::Uri; use hyper::Body; -use serde::Deserialize; use serde::Serialize; use std::borrow::Cow; use std::cell::Cell; @@ -85,15 +84,6 @@ impl Resource for WsCancelResource { } } -#[derive(Deserialize)] -#[serde(tag = "kind", content = "value", rename_all = "camelCase")] -pub enum SendValue { - Text(String), - Binary(ZeroCopyBuf), - Pong, - Ping, -} - // This op is needed because creating a WS instance in JavaScript is a sync // operation and should throw error when permissions are not fulfilled, // but actual op that connects WS is async. @@ -301,9 +291,8 @@ pub enum MessageKind { Text = 0, Binary = 1, Pong = 2, - Ping = 3, - Error = 5, - Closed = 6, + Error = 3, + Closed = 4, } pub struct ServerWebSocket { @@ -405,20 +394,6 @@ pub async fn op_ws_send_text( .await } -#[op] -pub async fn op_ws_send_ping( - state: Rc>, - rid: ResourceId, -) -> Result<(), AnyError> { - let resource = state - .borrow_mut() - .resource_table - .get::(rid)?; - resource - .write_frame(Frame::new(true, OpCode::Ping, None, vec![])) - .await -} - #[op] pub async fn op_ws_send_pong( state: Rc>, @@ -463,58 +438,55 @@ pub async fn op_ws_next_event( .get::(rid)?; let mut ws = RcRef::map(&resource, |r| &r.ws).borrow_mut().await; - let val = match ws.read_frame().await { - Ok(val) => val, - Err(err) => { - // No message was received, socket closed while we waited. - // Try close the stream, ignoring any errors, and report closed status to JavaScript. - if resource.closed.get() { - let _ = state.borrow_mut().resource_table.close(rid); + loop { + let val = match ws.read_frame().await { + Ok(val) => val, + Err(err) => { + // No message was received, socket closed while we waited. + // Try close the stream, ignoring any errors, and report closed status to JavaScript. + if resource.closed.get() { + let _ = state.borrow_mut().resource_table.close(rid); + return Ok(( + MessageKind::Closed as u16, + StringOrBuffer::Buffer(vec![].into()), + )); + } + return Ok(( - MessageKind::Closed as u16, - StringOrBuffer::Buffer(vec![].into()), + MessageKind::Error as u16, + StringOrBuffer::String(err.to_string()), )); } + }; - return Ok(( - MessageKind::Error as u16, - StringOrBuffer::String(err.to_string()), - )); - } - }; + break Ok(match val.opcode { + OpCode::Text => ( + MessageKind::Text as u16, + StringOrBuffer::String(String::from_utf8(val.payload).unwrap()), + ), + OpCode::Binary => ( + MessageKind::Binary as u16, + StringOrBuffer::Buffer(val.payload.into()), + ), + OpCode::Close => { + if val.payload.len() < 2 { + return Ok((1005, StringOrBuffer::String("".to_string()))); + } - let res = match val.opcode { - OpCode::Text => ( - MessageKind::Text as u16, - StringOrBuffer::String(String::from_utf8(val.payload).unwrap()), - ), - OpCode::Binary => ( - MessageKind::Binary as u16, - StringOrBuffer::Buffer(val.payload.into()), - ), - OpCode::Close => { - if val.payload.len() < 2 { - return Ok((1005, StringOrBuffer::String("".to_string()))); + let close_code = + CloseCode::from(u16::from_be_bytes([val.payload[0], val.payload[1]])); + let reason = String::from_utf8(val.payload[2..].to_vec()).unwrap(); + (close_code.into(), StringOrBuffer::String(reason)) } - - let close_code = - CloseCode::from(u16::from_be_bytes([val.payload[0], val.payload[1]])); - let reason = String::from_utf8(val.payload[2..].to_vec()).unwrap(); - (close_code.into(), StringOrBuffer::String(reason)) - } - OpCode::Ping => ( - MessageKind::Ping as u16, - StringOrBuffer::Buffer(vec![].into()), - ), - OpCode::Pong => ( - MessageKind::Pong as u16, - StringOrBuffer::Buffer(vec![].into()), - ), - OpCode::Continuation => { - return Err(type_error("Unexpected continuation frame")) - } - }; - Ok(res) + OpCode::Pong => ( + MessageKind::Pong as u16, + StringOrBuffer::Buffer(vec![].into()), + ), + OpCode::Continuation | OpCode::Ping => { + continue; + } + }); + } } deno_core::extension!(deno_websocket, @@ -527,7 +499,6 @@ deno_core::extension!(deno_websocket, op_ws_next_event, op_ws_send_binary, op_ws_send_text, - op_ws_send_ping, op_ws_send_pong, op_ws_server_create, ], From 18170f2326ec715f1df93592b71ccb1de5cd40a2 Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Wed, 26 Apr 2023 15:37:57 +0530 Subject: [PATCH 041/320] perf(ext/http): avoid spread arg deopt in op_http_wait (#18850) 2% improvement on macOS hello world. --- ext/http/00_serve.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ext/http/00_serve.js b/ext/http/00_serve.js index 3022bc5fac..6e8f846df9 100644 --- a/ext/http/00_serve.js +++ b/ext/http/00_serve.js @@ -515,7 +515,7 @@ async function serve(arg1, arg2) { const rid = context.serverRid; let req; try { - req = await core.opAsync("op_http_wait", rid); + req = await core.opAsync2("op_http_wait", rid); } catch (error) { if (ObjectPrototypeIsPrototypeOf(BadResourcePrototype, error)) { break; From fbefceeb56ebde276fe2fe9e5bcb7ebbcdc9ab22 Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Wed, 26 Apr 2023 15:38:13 +0530 Subject: [PATCH 042/320] perf(ext/http): use smi for slab IDs (#18848) --- ext/http/http_next.rs | 80 ++++++++++++++++++++----------------------- 1 file changed, 38 insertions(+), 42 deletions(-) diff --git a/ext/http/http_next.rs b/ext/http/http_next.rs index 71f2a32b68..1c2a232e20 100644 --- a/ext/http/http_next.rs +++ b/ext/http/http_next.rs @@ -141,11 +141,11 @@ macro_rules! with { ($ref:ident, $mut:ident, $type:ty, $http:ident, $expr:expr) => { #[inline(always)] #[allow(dead_code)] - pub(crate) fn $mut(key: usize, f: impl FnOnce(&mut $type) -> T) -> T { + pub(crate) fn $mut(key: u32, f: impl FnOnce(&mut $type) -> T) -> T { SLAB.with(|slab| { let mut borrow = slab.borrow_mut(); #[allow(unused_mut)] // TODO(mmastrac): compiler issue? - let mut $http = match borrow.get_mut(key) { + let mut $http = match borrow.get_mut(key as usize) { Some(http) => http, None => panic!( "Attemped to access invalid request {} ({} in total available)", @@ -163,10 +163,10 @@ macro_rules! with { #[inline(always)] #[allow(dead_code)] - pub(crate) fn $ref(key: usize, f: impl FnOnce(&$type) -> T) -> T { + pub(crate) fn $ref(key: u32, f: impl FnOnce(&$type) -> T) -> T { SLAB.with(|slab| { let borrow = slab.borrow(); - let $http = borrow.get(key).unwrap(); + let $http = borrow.get(key as usize).unwrap(); #[cfg(__zombie_http_tracking)] if !$http.alive { panic!("Attempted to access a dead HTTP object") @@ -211,7 +211,7 @@ with!(with_http, with_http_mut, HttpSlabRecord, http, http); fn slab_insert( request: Request, request_info: HttpConnectionProperties, -) -> usize { +) -> u32 { SLAB.with(|slab| { let (request_parts, request_body) = request.into_parts(); slab.borrow_mut().insert(HttpSlabRecord { @@ -224,7 +224,7 @@ fn slab_insert( #[cfg(__zombie_http_tracking)] alive: true, }) - }) + }) as u32 } #[op] @@ -233,7 +233,7 @@ pub fn op_upgrade_raw(_index: usize) {} #[op] pub async fn op_upgrade( state: Rc>, - index: usize, + index: u32, headers: Vec<(ByteString, ByteString)>, ) -> Result<(ResourceId, ZeroCopyBuf), AnyError> { // Stage 1: set the respnse to 101 Switching Protocols and send it @@ -273,8 +273,8 @@ pub async fn op_upgrade( )) } -#[op] -pub fn op_set_promise_complete(index: usize, status: u16) { +#[op(fast)] +pub fn op_set_promise_complete(index: u32, status: u16) { with_resp_mut(index, |resp| { // The Javascript code will never provide a status that is invalid here (see 23_response.js) *resp.as_mut().unwrap().status_mut() = @@ -287,7 +287,7 @@ pub fn op_set_promise_complete(index: usize, status: u16) { #[op] pub fn op_get_request_method_and_url( - index: usize, + index: u32, ) -> (String, Option, String, String, Option) { // TODO(mmastrac): Passing method can be optimized with_http(index, |http| { @@ -314,7 +314,7 @@ pub fn op_get_request_method_and_url( } #[op] -pub fn op_get_request_header(index: usize, name: String) -> Option { +pub fn op_get_request_header(index: u32, name: String) -> Option { with_req(index, |req| { let value = req.headers.get(name); value.map(|value| value.as_bytes().into()) @@ -322,7 +322,7 @@ pub fn op_get_request_header(index: usize, name: String) -> Option { } #[op] -pub fn op_get_request_headers(index: usize) -> Vec<(ByteString, ByteString)> { +pub fn op_get_request_headers(index: u32) -> Vec<(ByteString, ByteString)> { with_req(index, |req| { let headers = &req.headers; let mut vec = Vec::with_capacity(headers.len()); @@ -356,8 +356,8 @@ pub fn op_get_request_headers(index: usize) -> Vec<(ByteString, ByteString)> { }) } -#[op] -pub fn op_read_request_body(state: &mut OpState, index: usize) -> ResourceId { +#[op(fast)] +pub fn op_read_request_body(state: &mut OpState, index: u32) -> ResourceId { let incoming = with_req_body_mut(index, |body| body.take().unwrap()); let body_resource = Rc::new(HttpRequestBody::new(incoming)); let res = state.resource_table.add_rc(body_resource.clone()); @@ -367,24 +367,20 @@ pub fn op_read_request_body(state: &mut OpState, index: usize) -> ResourceId { res } -#[op] -pub fn op_set_response_header( - index: usize, - name: ByteString, - value: ByteString, -) { +#[op(fast)] +pub fn op_set_response_header(index: u32, name: &str, value: &str) { with_resp_mut(index, |resp| { let resp_headers = resp.as_mut().unwrap().headers_mut(); // These are valid latin-1 strings - let name = HeaderName::from_bytes(&name).unwrap(); - let value = HeaderValue::from_bytes(&value).unwrap(); + let name = HeaderName::from_bytes(name.as_bytes()).unwrap(); + let value = HeaderValue::from_bytes(value.as_bytes()).unwrap(); resp_headers.append(name, value); }); } #[op] pub fn op_set_response_headers( - index: usize, + index: u32, headers: Vec<(ByteString, ByteString)>, ) { // TODO(mmastrac): Invalid headers should be handled? @@ -400,10 +396,10 @@ pub fn op_set_response_headers( }) } -#[op] +#[op(fast)] pub fn op_set_response_body_resource( state: &mut OpState, - index: usize, + index: u32, stream_rid: ResourceId, auto_close: bool, ) -> Result<(), AnyError> { @@ -426,10 +422,10 @@ pub fn op_set_response_body_resource( Ok(()) } -#[op] +#[op(fast)] pub fn op_set_response_body_stream( state: &mut OpState, - index: usize, + index: u32, ) -> Result { // TODO(mmastrac): what should this channel size be? let (tx, rx) = tokio::sync::mpsc::channel(1); @@ -445,8 +441,8 @@ pub fn op_set_response_body_stream( Ok(state.resource_table.add(tx)) } -#[op] -pub fn op_set_response_body_text(index: usize, text: String) { +#[op(fast)] +pub fn op_set_response_body_text(index: u32, text: String) { if !text.is_empty() { with_resp_mut(index, move |response| { response @@ -458,15 +454,15 @@ pub fn op_set_response_body_text(index: usize, text: String) { } } -#[op] -pub fn op_set_response_body_bytes(index: usize, buffer: ZeroCopyBuf) { +#[op(fast)] +pub fn op_set_response_body_bytes(index: u32, buffer: &[u8]) { if !buffer.is_empty() { with_resp_mut(index, |response| { response .as_mut() .unwrap() .body_mut() - .initialize(ResponseBytesInner::Bytes(BufView::from(buffer))) + .initialize(ResponseBytesInner::Bytes(BufView::from(buffer.to_vec()))) }); }; } @@ -474,7 +470,7 @@ pub fn op_set_response_body_bytes(index: usize, buffer: ZeroCopyBuf) { #[op] pub async fn op_http_track( state: Rc>, - index: usize, + index: u32, server_rid: ResourceId, ) -> Result<(), AnyError> { let handle = with_resp(index, |resp| { @@ -496,12 +492,12 @@ pub async fn op_http_track( } #[pin_project(PinnedDrop)] -pub struct SlabFuture>(usize, #[pin] F); +pub struct SlabFuture>(u32, #[pin] F); pub fn new_slab_future( request: Request, request_info: HttpConnectionProperties, - tx: tokio::sync::mpsc::Sender, + tx: tokio::sync::mpsc::Sender, ) -> SlabFuture> { let index = slab_insert(request, request_info); let rx = with_promise(index, |promise| promise.clone()); @@ -521,11 +517,11 @@ impl> PinnedDrop for SlabFuture { SLAB.with(|slab| { #[cfg(__zombie_http_tracking)] { - slab.borrow_mut().get_mut(self.0).unwrap().alive = false; + slab.borrow_mut().get_mut(self.0 as usize).unwrap().alive = false; } #[cfg(not(__zombie_http_tracking))] { - slab.borrow_mut().remove(self.0); + slab.borrow_mut().remove(self.0 as usize); } }); } @@ -589,7 +585,7 @@ fn serve_https( mut io: TlsStream, request_info: HttpConnectionProperties, cancel: RcRef, - tx: tokio::sync::mpsc::Sender, + tx: tokio::sync::mpsc::Sender, ) -> JoinHandle> { // TODO(mmastrac): This is faster if we can use tokio::spawn but then the send bounds get us let svc = service_fn(move |req: Request| { @@ -614,7 +610,7 @@ fn serve_http( io: impl HttpServeStream, request_info: HttpConnectionProperties, cancel: RcRef, - tx: tokio::sync::mpsc::Sender, + tx: tokio::sync::mpsc::Sender, ) -> JoinHandle> { // TODO(mmastrac): This is faster if we can use tokio::spawn but then the send bounds get us let svc = service_fn(move |req: Request| { @@ -627,7 +623,7 @@ fn serve_http_on( network_stream: NetworkStream, listen_properties: &HttpListenProperties, cancel: RcRef, - tx: tokio::sync::mpsc::Sender, + tx: tokio::sync::mpsc::Sender, ) -> JoinHandle> { // We always want some sort of peer address. If we can't get one, just make up one. let peer_address = network_stream.peer_address().unwrap_or_else(|_| { @@ -659,7 +655,7 @@ fn serve_http_on( struct HttpJoinHandle( AsyncRefCell>>>, CancelHandle, - AsyncRefCell>, + AsyncRefCell>, ); impl HttpJoinHandle { @@ -798,7 +794,7 @@ pub async fn op_http_wait( // Do we have a request? if let Some(req) = next { - return Ok(req as u32); + return Ok(req); } // No - we're shutting down From 036778c2e8e159ef1e586de4102f823367b7c554 Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Wed, 26 Apr 2023 17:55:48 +0530 Subject: [PATCH 043/320] chore(ext/websocket): Add autobahn|testsuite fuzzingclient (#18846) Closes #17242 --- .dprint.json | 7 +++-- .github/workflows/ci.generate.ts | 9 +++++++ .github/workflows/ci.yml | 5 ++++ .gitignore | 2 ++ ext/websocket/autobahn/autobahn_server.js | 20 ++++++++++++++ ext/websocket/autobahn/fuzzingclient.js | 33 +++++++++++++++++++++++ ext/websocket/autobahn/fuzzingclient.json | 26 ++++++++++++++++++ 7 files changed, 100 insertions(+), 2 deletions(-) create mode 100644 ext/websocket/autobahn/autobahn_server.js create mode 100644 ext/websocket/autobahn/fuzzingclient.js create mode 100644 ext/websocket/autobahn/fuzzingclient.json diff --git a/.dprint.json b/.dprint.json index d20b1673ba..51a52d8812 100644 --- a/.dprint.json +++ b/.dprint.json @@ -13,7 +13,9 @@ "associations": "**/*.rs", "rustfmt": "rustfmt --config imports_granularity=item" }, - "includes": ["**/*.{ts,tsx,js,jsx,json,md,toml,rs}"], + "includes": [ + "**/*.{ts,tsx,js,jsx,json,md,toml,rs}" + ], "excludes": [ ".cargo_home", ".git", @@ -48,7 +50,8 @@ "tools/node_compat/TODO.md", "tools/node_compat/versions", "tools/wpt/expectation.json", - "tools/wpt/manifest.json" + "tools/wpt/manifest.json", + "ext/websocket/autobahn/reports" ], "plugins": [ "https://plugins.dprint.dev/typescript-0.84.0.wasm", diff --git a/.github/workflows/ci.generate.ts b/.github/workflows/ci.generate.ts index 41abf17370..af212cd472 100755 --- a/.github/workflows/ci.generate.ts +++ b/.github/workflows/ci.generate.ts @@ -642,6 +642,15 @@ const ci = { run: 'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/canary/$(git rev-parse HEAD)/', }, + { + name: "Autobahn testsuite", + if: [ + "matrix.job == 'test' && matrix.profile == 'release' &&", + "!startsWith(github.ref, 'refs/tags/') && startsWith(matrix.os, 'ubuntu')", + ].join("\n"), + run: + "target/release/deno run -A --unstable ext/websocket/autobahn/fuzzingclient.js", + }, { name: "Test debug", if: [ diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c70590d61b..dc9c709e95 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -399,6 +399,11 @@ jobs: env: CLOUDSDK_PYTHON: '${{env.pythonLocation}}\python.exe' run: 'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/canary/$(git rev-parse HEAD)/' + - name: Autobahn testsuite + if: |- + !(github.event_name == 'pull_request' && matrix.skip_pr) && (matrix.job == 'test' && matrix.profile == 'release' && + !startsWith(github.ref, 'refs/tags/') && startsWith(matrix.os, 'ubuntu')) + run: target/release/deno run -A --unstable ext/websocket/autobahn/fuzzingclient.js - name: Test debug if: |- !(github.event_name == 'pull_request' && matrix.skip_pr) && (matrix.job == 'test' && matrix.profile == 'debug' && diff --git a/.gitignore b/.gitignore index 6f806b1433..a8738ea41d 100644 --- a/.gitignore +++ b/.gitignore @@ -26,3 +26,5 @@ gclient_config.py_entries # WPT generated cert files /tools/wpt/certs/index.txt* /tools/wpt/certs/serial* + +/ext/websocket/autobahn/reports diff --git a/ext/websocket/autobahn/autobahn_server.js b/ext/websocket/autobahn/autobahn_server.js new file mode 100644 index 0000000000..c678dfc1a6 --- /dev/null +++ b/ext/websocket/autobahn/autobahn_server.js @@ -0,0 +1,20 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +import { parse } from "../../../test_util/std/flags/mod.ts"; + +const { port } = parse(Deno.args, { + number: ["port"], + default: { + port: 6969, + }, +}); + +const { serve } = Deno; + +// A message-based WebSocket echo server. +serve((request) => { + const { socket, response } = Deno.upgradeWebSocket(request); + socket.onmessage = (event) => { + socket.send(event.data); + }; + return response; +}, { port }); diff --git a/ext/websocket/autobahn/fuzzingclient.js b/ext/websocket/autobahn/fuzzingclient.js new file mode 100644 index 0000000000..8aa7166958 --- /dev/null +++ b/ext/websocket/autobahn/fuzzingclient.js @@ -0,0 +1,33 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +// deno-lint-ignore-file + +import { $ } from "https://deno.land/x/dax@0.31.0/mod.ts"; + +const pwd = new URL(".", import.meta.url).pathname; + +const AUTOBAHN_TESTSUITE_DOCKER = + "crossbario/autobahn-testsuite:0.8.2@sha256:5d4ba3aa7d6ab2fdbf6606f3f4ecbe4b66f205ce1cbc176d6cdf650157e52242"; + +const self = Deno.execPath(); +$`${self} run -A --unstable ${pwd}/autobahn_server.js`.spawn(); +await $`docker run --name fuzzingserver -v ${pwd}/fuzzingclient.json:/fuzzingclient.json:ro -v ${pwd}/reports:/reports -p 9001:9001 --net=host --rm ${AUTOBAHN_TESTSUITE_DOCKER} wstest -m fuzzingclient -s fuzzingclient.json` + .cwd(pwd); + +const { deno_websocket } = JSON.parse( + Deno.readTextFileSync(`${pwd}/reports/servers/index.json`), +); +const result = Object.values(deno_websocket); + +function failed(name) { + return name != "OK" && name != "INFORMATIONAL" && name != "NON-STRICT"; +} + +const failedtests = result.filter((outcome) => failed(outcome.behavior)); + +console.log( + `%c${result.length - failedtests.length} / ${result.length} tests OK`, + `color: ${failedtests.length == 0 ? "green" : "red"}`, +); + +Deno.exit(failedtests.length == 0 ? 0 : 1); diff --git a/ext/websocket/autobahn/fuzzingclient.json b/ext/websocket/autobahn/fuzzingclient.json new file mode 100644 index 0000000000..fcee80c993 --- /dev/null +++ b/ext/websocket/autobahn/fuzzingclient.json @@ -0,0 +1,26 @@ +{ + "outdir": "./reports/servers", + "servers": [ + { + "agent": "deno_websocket", + "url": "ws://localhost:6969" + } + ], + "cases": [ + "1.*", + "2.*", + "3.*", + "4.*", + "5.*", + "6.*", + "7.*", + "9.*", + "10.*" + ], + "exclude-cases": [ + "11.*", + "12.*", + "13.*" + ], + "exclude-agent-cases": {} +} From 1b450015e7046bff952b2841e73074d432006272 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Wed, 26 Apr 2023 14:54:03 +0200 Subject: [PATCH 044/320] BREAKING(unstable): remove "Deno.serve(handler, options)" overload (#18759) In preparation to stabilization of the API this overload was decided to be removed. --- cli/bench/http/deno_flash_hono_router.js | 2 +- cli/tests/unit/serve_test.ts | 59 +++++++++++++----------- cli/tests/unit_node/async_hooks_test.ts | 8 ++-- cli/tsc/dts/lib.deno.unstable.d.ts | 5 +- ext/http/00_serve.js | 1 - 5 files changed, 37 insertions(+), 38 deletions(-) diff --git a/cli/bench/http/deno_flash_hono_router.js b/cli/bench/http/deno_flash_hono_router.js index ef78849b25..062c37cbff 100644 --- a/cli/bench/http/deno_flash_hono_router.js +++ b/cli/bench/http/deno_flash_hono_router.js @@ -7,4 +7,4 @@ const [hostname, port] = addr.split(":"); const app = new Hono(); app.get("/", (c) => c.text("Hello, World!")); -Deno.serve(app.fetch, { port: Number(port), hostname }); +Deno.serve({ port: Number(port), hostname }, app.fetch); diff --git a/cli/tests/unit/serve_test.ts b/cli/tests/unit/serve_test.ts index 55b7c4590a..2bdfbfe3cd 100644 --- a/cli/tests/unit/serve_test.ts +++ b/cli/tests/unit/serve_test.ts @@ -248,7 +248,12 @@ Deno.test({ permissions: { net: true } }, async function httpServerOverload2() { const promise = deferred(); const listeningPromise = deferred(); - const server = Deno.serve(async (request) => { + const server = Deno.serve({ + port: 4501, + signal: ac.signal, + onListen: onListen(listeningPromise), + onError: createOnErrorCb(ac), + }, async (request) => { // FIXME(bartlomieju): // make sure that request can be inspected console.log(request); @@ -256,11 +261,6 @@ Deno.test({ permissions: { net: true } }, async function httpServerOverload2() { assertEquals(await request.text(), ""); promise.resolve(); return new Response("Hello World", { headers: { "foo": "bar" } }); - }, { - port: 4501, - signal: ac.signal, - onListen: onListen(listeningPromise), - onError: createOnErrorCb(ac), }); await listeningPromise; @@ -1015,12 +1015,15 @@ Deno.test( const promise = deferred(); const ac = new AbortController(); - const server = Deno.serve((request) => { - assert(request.body); + const server = Deno.serve( + { port: 2333, signal: ac.signal }, + (request) => { + assert(request.body); - promise.resolve(); - return new Response(request.body); - }, { port: 2333, signal: ac.signal }); + promise.resolve(); + return new Response(request.body); + }, + ); const ts = new TransformStream(); const writable = ts.writable.getWriter(); @@ -2484,10 +2487,7 @@ Deno.test( const ac = new AbortController(); const promise = deferred(); let count = 0; - const server = Deno.serve(() => { - count++; - return new Response(`hello world ${count}`); - }, { + const server = Deno.serve({ async onListen({ port }: { port: number }) { const res1 = await fetch(`http://localhost:${port}/`); assertEquals(await res1.text(), "hello world 1"); @@ -2499,6 +2499,9 @@ Deno.test( ac.abort(); }, signal: ac.signal, + }, () => { + count++; + return new Response(`hello world ${count}`); }); await promise; @@ -2552,7 +2555,16 @@ Deno.test( async function testIssue16567() { const ac = new AbortController(); const promise = deferred(); - const server = Deno.serve(() => + const server = Deno.serve({ + async onListen({ port }) { + const res1 = await fetch(`http://localhost:${port}/`); + assertEquals((await res1.text()).length, 40 * 50_000); + + promise.resolve(); + ac.abort(); + }, + signal: ac.signal, + }, () => new Response( new ReadableStream({ start(c) { @@ -2563,16 +2575,7 @@ Deno.test( c.close(); }, }), - ), { - async onListen({ port }) { - const res1 = await fetch(`http://localhost:${port}/`); - assertEquals((await res1.text()).length, 40 * 50_000); - - promise.resolve(); - ac.abort(); - }, - signal: ac.signal, - }); + )); await promise; await server; @@ -2716,8 +2719,8 @@ Deno.test( async function httpServeCurlH2C() { const ac = new AbortController(); const server = Deno.serve( - () => new Response("hello world!"), { signal: ac.signal }, + () => new Response("hello world!"), ); assertEquals( @@ -2747,12 +2750,12 @@ Deno.test( async function httpsServeCurlH2C() { const ac = new AbortController(); const server = Deno.serve( - () => new Response("hello world!"), { signal: ac.signal, cert: Deno.readTextFileSync("cli/tests/testdata/tls/localhost.crt"), key: Deno.readTextFileSync("cli/tests/testdata/tls/localhost.key"), }, + () => new Response("hello world!"), ); assertEquals( diff --git a/cli/tests/unit_node/async_hooks_test.ts b/cli/tests/unit_node/async_hooks_test.ts index 73d6a99bc8..4062443151 100644 --- a/cli/tests/unit_node/async_hooks_test.ts +++ b/cli/tests/unit_node/async_hooks_test.ts @@ -41,7 +41,10 @@ Deno.test(async function bar() { let differentScopeDone = false; const als = new AsyncLocalStorage(); const ac = new AbortController(); - const server = Deno.serve(() => { + const server = Deno.serve({ + signal: ac.signal, + port: 4000, + }, () => { const differentScope = als.run(123, () => AsyncResource.bind(() => { differentScopeDone = true; @@ -54,9 +57,6 @@ Deno.test(async function bar() { await new Promise((res) => setTimeout(res, 10)); return new Response(als.getStore() as string); // "Hello World" }); - }, { - signal: ac.signal, - port: 4000, }); const res = await fetch("http://localhost:4000"); diff --git a/cli/tsc/dts/lib.deno.unstable.d.ts b/cli/tsc/dts/lib.deno.unstable.d.ts index c11bfb09e7..cf6cedf414 100644 --- a/cli/tsc/dts/lib.deno.unstable.d.ts +++ b/cli/tsc/dts/lib.deno.unstable.d.ts @@ -1361,10 +1361,7 @@ declare namespace Deno { * * @category HTTP Server */ - export function serve( - handler: ServeHandler, - options?: ServeOptions | ServeTlsOptions, - ): Promise; + export function serve(handler: ServeHandler): Promise; /** **UNSTABLE**: New API, yet to be vetted. * * Serves HTTP requests with the given handler. diff --git a/ext/http/00_serve.js b/ext/http/00_serve.js index 6e8f846df9..1efa4cddbc 100644 --- a/ext/http/00_serve.js +++ b/ext/http/00_serve.js @@ -425,7 +425,6 @@ async function serve(arg1, arg2) { let handler = undefined; if (typeof arg1 === "function") { handler = arg1; - options = arg2; } else if (typeof arg2 === "function") { handler = arg2; options = arg1; From 38681dfa88defd1f9f3abd74cbb6c83cff597d84 Mon Sep 17 00:00:00 2001 From: Matt Mastracci Date: Wed, 26 Apr 2023 15:33:30 +0200 Subject: [PATCH 045/320] perf(ext/http): optimize for zero or one-packet response streams (#18834) Improve `deno_reactdom_ssr_flash.jsx` by optimizing for zero/one-packet response streams. --- cli/tests/unit/serve_test.ts | 148 ++++++++++++++--------------------- ext/http/00_serve.js | 88 +++++++++++++++++++-- 2 files changed, 141 insertions(+), 95 deletions(-) diff --git a/cli/tests/unit/serve_test.ts b/cli/tests/unit/serve_test.ts index 2bdfbfe3cd..6158f587e6 100644 --- a/cli/tests/unit/serve_test.ts +++ b/cli/tests/unit/serve_test.ts @@ -532,21 +532,43 @@ Deno.test( }, ); -Deno.test( - { permissions: { net: true } }, - async function httpServerStreamResponse() { - const stream = new TransformStream(); - const writer = stream.writable.getWriter(); - writer.write(new TextEncoder().encode("hello ")); - writer.write(new TextEncoder().encode("world")); - writer.close(); +function createStreamTest(count: number, delay: number, action: string) { + function doAction(controller: ReadableStreamDefaultController, i: number) { + if (i == count) { + if (action == "Throw") { + controller.error(new Error("Expected error!")); + } else { + controller.close(); + } + } else { + controller.enqueue(`a${i}`); - const listeningPromise = deferred(); + if (delay == 0) { + doAction(controller, i + 1); + } else { + setTimeout(() => doAction(controller, i + 1), delay); + } + } + } + + function makeStream(count: number, delay: number): ReadableStream { + return new ReadableStream({ + start(controller) { + if (delay == 0) { + doAction(controller, 0); + } else { + setTimeout(() => doAction(controller, 0), delay); + } + }, + }).pipeThrough(new TextEncoderStream()); + } + + Deno.test(`httpServerStreamCount${count}Delay${delay}${action}`, async () => { const ac = new AbortController(); + const listeningPromise = deferred(); const server = Deno.serve({ - handler: (request) => { - assert(!request.body); - return new Response(stream.readable); + handler: async (request) => { + return new Response(makeStream(count, delay)); }, port: 4501, signal: ac.signal, @@ -556,12 +578,34 @@ Deno.test( await listeningPromise; const resp = await fetch("http://127.0.0.1:4501/"); - const respBody = await resp.text(); - assertEquals("hello world", respBody); + const text = await resp.text(); + ac.abort(); await server; - }, -); + let expected = ""; + if (action == "Throw" && count < 2 && delay < 1000) { + // NOTE: This is specific to the current implementation. In some cases where a stream errors, we + // don't send the first packet. + expected = ""; + } else { + for (let i = 0; i < count; i++) { + expected += `a${i}`; + } + } + + assertEquals(text, expected); + }); +} + +for (let count of [0, 1, 2, 3]) { + for (let delay of [0, 1, 1000]) { + // Creating a stream that errors in start will throw + if (delay > 0) { + createStreamTest(count, delay, "Throw"); + } + createStreamTest(count, delay, "Close"); + } +} Deno.test( { permissions: { net: true } }, @@ -1690,78 +1734,6 @@ createServerLengthTest("autoResponseWithUnknownLengthEmpty", { expects_con_len: false, }); -Deno.test( - { permissions: { net: true } }, - async function httpServerGetChunkedResponseWithKa() { - const promises = [deferred(), deferred()]; - let reqCount = 0; - const listeningPromise = deferred(); - const ac = new AbortController(); - - const server = Deno.serve({ - handler: async (request) => { - assertEquals(request.method, "GET"); - promises[reqCount].resolve(); - reqCount++; - return new Response(reqCount <= 1 ? stream("foo bar baz") : "zar quux"); - }, - port: 4503, - signal: ac.signal, - onListen: onListen(listeningPromise), - onError: createOnErrorCb(ac), - }); - - await listeningPromise; - const conn = await Deno.connect({ port: 4503 }); - const encoder = new TextEncoder(); - { - const body = - `GET / HTTP/1.1\r\nHost: example.domain\r\nConnection: keep-alive\r\n\r\n`; - const writeResult = await conn.write(encoder.encode(body)); - assertEquals(body.length, writeResult); - await promises[0]; - } - - const decoder = new TextDecoder(); - { - let msg = ""; - while (true) { - try { - const buf = new Uint8Array(1024); - const readResult = await conn.read(buf); - assert(readResult); - msg += decoder.decode(buf.subarray(0, readResult)); - assert(msg.endsWith("\r\nfoo bar baz\r\n0\r\n\r\n")); - break; - } catch { - continue; - } - } - } - - // once more! - { - const body = - `GET /quux HTTP/1.1\r\nHost: example.domain\r\nConnection: close\r\n\r\n`; - const writeResult = await conn.write(encoder.encode(body)); - assertEquals(body.length, writeResult); - await promises[1]; - } - { - const buf = new Uint8Array(1024); - const readResult = await conn.read(buf); - assert(readResult); - const msg = decoder.decode(buf.subarray(0, readResult)); - assert(msg.endsWith("zar quux")); - } - - conn.close(); - - ac.abort(); - await server; - }, -); - Deno.test( { permissions: { net: true } }, async function httpServerPostWithContentLengthBody() { diff --git a/ext/http/00_serve.js b/ext/http/00_serve.js index 1efa4cddbc..56f250d1db 100644 --- a/ext/http/00_serve.js +++ b/ext/http/00_serve.js @@ -28,6 +28,7 @@ import { import { Deferred, getReadableStreamResourceBacking, + readableStreamClose, readableStreamForRid, ReadableStreamPrototype, } from "ext:deno_web/06_streams.js"; @@ -331,24 +332,97 @@ function fastSyncResponseOrStream(req, respBody) { } async function asyncResponse(responseBodies, req, status, stream) { - const responseRid = core.ops.op_set_response_body_stream(req); - SetPrototypeAdd(responseBodies, responseRid); const reader = stream.getReader(); - core.ops.op_set_promise_complete(req, status); + let responseRid; + let closed = false; + let timeout; + try { + // IMPORTANT: We get a performance boost from this optimization, but V8 is very + // sensitive to the order and structure. Benchmark any changes to this code. + + // Optimize for streams that are done in zero or one packets. We will not + // have to allocate a resource in this case. + const { value: value1, done: done1 } = await reader.read(); + if (done1) { + closed = true; + // Exit 1: no response body at all, extreme fast path + // Reader will be closed by finally block + return; + } + + // The second value cannot block indefinitely, as someone may be waiting on a response + // of the first packet that may influence this packet. We set this timeout arbitrarily to 250ms + // and we race it. + let timeoutPromise; + timeout = setTimeout(() => { + responseRid = core.ops.op_set_response_body_stream(req); + SetPrototypeAdd(responseBodies, responseRid); + core.ops.op_set_promise_complete(req, status); + timeoutPromise = core.writeAll(responseRid, value1); + }, 250); + const { value: value2, done: done2 } = await reader.read(); + + if (timeoutPromise) { + await timeoutPromise; + if (done2) { + closed = true; + // Exit 2(a): read 2 is EOS, and timeout resolved. + // Reader will be closed by finally block + // Response stream will be closed by finally block. + return; + } + + // Timeout resolved, value1 written but read2 is not EOS. Carry value2 forward. + } else { + clearTimeout(timeout); + timeout = undefined; + + if (done2) { + // Exit 2(b): read 2 is EOS, and timeout did not resolve as we read fast enough. + // Reader will be closed by finally block + // No response stream + closed = true; + core.ops.op_set_response_body_bytes(req, value1); + return; + } + + responseRid = core.ops.op_set_response_body_stream(req); + SetPrototypeAdd(responseBodies, responseRid); + core.ops.op_set_promise_complete(req, status); + // Write our first packet + await core.writeAll(responseRid, value1); + } + + await core.writeAll(responseRid, value2); while (true) { const { value, done } = await reader.read(); if (done) { + closed = true; break; } await core.writeAll(responseRid, value); } } catch (error) { - await reader.cancel(error); + closed = true; + try { + await reader.cancel(error); + } catch { + // Pass + } } finally { - core.tryClose(responseRid); - SetPrototypeDelete(responseBodies, responseRid); - reader.releaseLock(); + if (!closed) { + readableStreamClose(reader); + } + if (timeout !== undefined) { + clearTimeout(timeout); + } + if (responseRid) { + core.tryClose(responseRid); + SetPrototypeDelete(responseBodies, responseRid); + } else { + core.ops.op_set_promise_complete(req, status); + } } } From 17d1c7e444542f43229a047853605ac22081abdf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Wed, 26 Apr 2023 17:48:23 +0200 Subject: [PATCH 046/320] =?UTF-8?q?Revert=20"chore(ext/websocket):=20Add?= =?UTF-8?q?=20autobahn|testsuite=20fuzzingclient=20(#=E2=80=A6=20(#18856)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit …18846)" This reverts commit 036778c2e8e159ef1e586de4102f823367b7c554. Keeps failing on `main` branch. --- .dprint.json | 7 ++--- .github/workflows/ci.generate.ts | 9 ------- .github/workflows/ci.yml | 5 ---- .gitignore | 2 -- ext/websocket/autobahn/autobahn_server.js | 20 -------------- ext/websocket/autobahn/fuzzingclient.js | 33 ----------------------- ext/websocket/autobahn/fuzzingclient.json | 26 ------------------ 7 files changed, 2 insertions(+), 100 deletions(-) delete mode 100644 ext/websocket/autobahn/autobahn_server.js delete mode 100644 ext/websocket/autobahn/fuzzingclient.js delete mode 100644 ext/websocket/autobahn/fuzzingclient.json diff --git a/.dprint.json b/.dprint.json index 51a52d8812..d20b1673ba 100644 --- a/.dprint.json +++ b/.dprint.json @@ -13,9 +13,7 @@ "associations": "**/*.rs", "rustfmt": "rustfmt --config imports_granularity=item" }, - "includes": [ - "**/*.{ts,tsx,js,jsx,json,md,toml,rs}" - ], + "includes": ["**/*.{ts,tsx,js,jsx,json,md,toml,rs}"], "excludes": [ ".cargo_home", ".git", @@ -50,8 +48,7 @@ "tools/node_compat/TODO.md", "tools/node_compat/versions", "tools/wpt/expectation.json", - "tools/wpt/manifest.json", - "ext/websocket/autobahn/reports" + "tools/wpt/manifest.json" ], "plugins": [ "https://plugins.dprint.dev/typescript-0.84.0.wasm", diff --git a/.github/workflows/ci.generate.ts b/.github/workflows/ci.generate.ts index af212cd472..41abf17370 100755 --- a/.github/workflows/ci.generate.ts +++ b/.github/workflows/ci.generate.ts @@ -642,15 +642,6 @@ const ci = { run: 'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/canary/$(git rev-parse HEAD)/', }, - { - name: "Autobahn testsuite", - if: [ - "matrix.job == 'test' && matrix.profile == 'release' &&", - "!startsWith(github.ref, 'refs/tags/') && startsWith(matrix.os, 'ubuntu')", - ].join("\n"), - run: - "target/release/deno run -A --unstable ext/websocket/autobahn/fuzzingclient.js", - }, { name: "Test debug", if: [ diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index dc9c709e95..c70590d61b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -399,11 +399,6 @@ jobs: env: CLOUDSDK_PYTHON: '${{env.pythonLocation}}\python.exe' run: 'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/canary/$(git rev-parse HEAD)/' - - name: Autobahn testsuite - if: |- - !(github.event_name == 'pull_request' && matrix.skip_pr) && (matrix.job == 'test' && matrix.profile == 'release' && - !startsWith(github.ref, 'refs/tags/') && startsWith(matrix.os, 'ubuntu')) - run: target/release/deno run -A --unstable ext/websocket/autobahn/fuzzingclient.js - name: Test debug if: |- !(github.event_name == 'pull_request' && matrix.skip_pr) && (matrix.job == 'test' && matrix.profile == 'debug' && diff --git a/.gitignore b/.gitignore index a8738ea41d..6f806b1433 100644 --- a/.gitignore +++ b/.gitignore @@ -26,5 +26,3 @@ gclient_config.py_entries # WPT generated cert files /tools/wpt/certs/index.txt* /tools/wpt/certs/serial* - -/ext/websocket/autobahn/reports diff --git a/ext/websocket/autobahn/autobahn_server.js b/ext/websocket/autobahn/autobahn_server.js deleted file mode 100644 index c678dfc1a6..0000000000 --- a/ext/websocket/autobahn/autobahn_server.js +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -import { parse } from "../../../test_util/std/flags/mod.ts"; - -const { port } = parse(Deno.args, { - number: ["port"], - default: { - port: 6969, - }, -}); - -const { serve } = Deno; - -// A message-based WebSocket echo server. -serve((request) => { - const { socket, response } = Deno.upgradeWebSocket(request); - socket.onmessage = (event) => { - socket.send(event.data); - }; - return response; -}, { port }); diff --git a/ext/websocket/autobahn/fuzzingclient.js b/ext/websocket/autobahn/fuzzingclient.js deleted file mode 100644 index 8aa7166958..0000000000 --- a/ext/websocket/autobahn/fuzzingclient.js +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. - -// deno-lint-ignore-file - -import { $ } from "https://deno.land/x/dax@0.31.0/mod.ts"; - -const pwd = new URL(".", import.meta.url).pathname; - -const AUTOBAHN_TESTSUITE_DOCKER = - "crossbario/autobahn-testsuite:0.8.2@sha256:5d4ba3aa7d6ab2fdbf6606f3f4ecbe4b66f205ce1cbc176d6cdf650157e52242"; - -const self = Deno.execPath(); -$`${self} run -A --unstable ${pwd}/autobahn_server.js`.spawn(); -await $`docker run --name fuzzingserver -v ${pwd}/fuzzingclient.json:/fuzzingclient.json:ro -v ${pwd}/reports:/reports -p 9001:9001 --net=host --rm ${AUTOBAHN_TESTSUITE_DOCKER} wstest -m fuzzingclient -s fuzzingclient.json` - .cwd(pwd); - -const { deno_websocket } = JSON.parse( - Deno.readTextFileSync(`${pwd}/reports/servers/index.json`), -); -const result = Object.values(deno_websocket); - -function failed(name) { - return name != "OK" && name != "INFORMATIONAL" && name != "NON-STRICT"; -} - -const failedtests = result.filter((outcome) => failed(outcome.behavior)); - -console.log( - `%c${result.length - failedtests.length} / ${result.length} tests OK`, - `color: ${failedtests.length == 0 ? "green" : "red"}`, -); - -Deno.exit(failedtests.length == 0 ? 0 : 1); diff --git a/ext/websocket/autobahn/fuzzingclient.json b/ext/websocket/autobahn/fuzzingclient.json deleted file mode 100644 index fcee80c993..0000000000 --- a/ext/websocket/autobahn/fuzzingclient.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "outdir": "./reports/servers", - "servers": [ - { - "agent": "deno_websocket", - "url": "ws://localhost:6969" - } - ], - "cases": [ - "1.*", - "2.*", - "3.*", - "4.*", - "5.*", - "6.*", - "7.*", - "9.*", - "10.*" - ], - "exclude-cases": [ - "11.*", - "12.*", - "13.*" - ], - "exclude-agent-cases": {} -} From 5f7db93d0b883abaeae392e5bd8ea5b48e9fe4b5 Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Wed, 26 Apr 2023 22:11:54 +0530 Subject: [PATCH 047/320] perf(ext/http): optimize away code based on callback length (#18849) hello world on macOS: ``` divy@mini ~> wrk -d 10s --latency http://127.0.0.1:4500 Running 10s test @ http://127.0.0.1:4500 2 threads and 10 connections Thread Stats Avg Stdev Max +/- Stdev Latency 80.82us 42.95us 2.91ms 96.40% Req/Sec 56.91k 1.94k 60.77k 95.54% Latency Distribution 50% 77.00us 75% 89.00us 90% 105.00us 99% 146.00us 1143455 requests in 10.10s, 138.49MB read Requests/sec: 113212.38 Transfer/sec: 13.71MB divy@mini ~> wrk -d 10s --latency http://127.0.0.1:4500 Running 10s test @ http://127.0.0.1:4500 2 threads and 10 connections Thread Stats Avg Stdev Max +/- Stdev Latency 88.63us 78.77us 2.55ms 98.72% Req/Sec 54.84k 2.16k 57.35k 98.51% Latency Distribution 50% 80.00us 75% 93.00us 90% 109.00us 99% 249.00us 1102313 requests in 10.10s, 133.51MB read Requests/sec: 109136.61 Transfer/sec: 13.22MB ``` Expected to have a larger impact on Linux --- ext/http/00_serve.js | 30 ++++++++++++++++++++---------- 1 file changed, 20 insertions(+), 10 deletions(-) diff --git a/ext/http/00_serve.js b/ext/http/00_serve.js index 56f250d1db..8518e8d621 100644 --- a/ext/http/00_serve.js +++ b/ext/http/00_serve.js @@ -435,16 +435,26 @@ async function asyncResponse(responseBodies, req, status, stream) { */ function mapToCallback(responseBodies, context, signal, callback, onError) { return async function (req) { - const innerRequest = new InnerRequest(req, context); - const request = fromInnerRequest(innerRequest, signal, "immutable"); - // Get the response from the user-provided callback. If that fails, use onError. If that fails, return a fallback // 500 error. + let innerRequest; let response; try { - response = await callback(request, { - remoteAddr: innerRequest.remoteAddr, - }); + if (callback.length > 0) { + innerRequest = new InnerRequest(req, context); + const request = fromInnerRequest(innerRequest, signal, "immutable"); + if (callback.length === 1) { + response = await callback(request); + } else { + response = await callback(request, { + get remoteAddr() { + return innerRequest.remoteAddr; + }, + }); + } + } else { + response = await callback(); + } } catch (error) { try { response = await onError(error); @@ -455,19 +465,19 @@ function mapToCallback(responseBodies, context, signal, callback, onError) { } const inner = toInnerResponse(response); - if (innerRequest[_upgraded]) { + if (innerRequest?.[_upgraded]) { // We're done here as the connection has been upgraded during the callback and no longer requires servicing. if (response !== UPGRADE_RESPONSE_SENTINEL) { console.error("Upgrade response was not returned from callback"); context.close(); } - innerRequest[_upgraded](); + innerRequest?.[_upgraded](); return; } // Did everything shut down while we were waiting? if (context.closed) { - innerRequest.close(); + innerRequest?.close(); return; } @@ -490,7 +500,7 @@ function mapToCallback(responseBodies, context, signal, callback, onError) { core.ops.op_set_promise_complete(req, status); } - innerRequest.close(); + innerRequest?.close(); }; } From 55a9977c6252a38fac721ad789df0c7e8acf33c9 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Wed, 26 Apr 2023 13:07:15 -0400 Subject: [PATCH 048/320] refactor(compile): remove usage of ProcState and CliOptions (#18855) --- cli/args/flags.rs | 5 +- cli/lsp/language_server.rs | 4 +- cli/npm/cache.rs | 12 +-- cli/proc_state.rs | 12 +-- cli/standalone/mod.rs | 194 +++++++++++++++++++++++++------------ cli/tools/bench.rs | 5 +- 6 files changed, 146 insertions(+), 86 deletions(-) diff --git a/cli/args/flags.rs b/cli/args/flags.rs index 7d5c59fd77..0efaa5ea3d 100644 --- a/cli/args/flags.rs +++ b/cli/args/flags.rs @@ -2230,7 +2230,7 @@ fn check_arg(checks_local_by_default: bool) -> Arg { default, so adding --check is redundant. If the value of '--check=all' is supplied, diagnostic errors from remote modules will be included. - + Alternatively, the 'deno check' subcommand can be used.", ) } else { @@ -2745,7 +2745,8 @@ fn run_parse(flags: &mut Flags, matches: &mut ArgMatches) { fn task_parse(flags: &mut Flags, matches: &mut ArgMatches) { flags.config_flag = matches .remove_one::("config") - .map_or(ConfigFlag::Discover, ConfigFlag::Path); + .map(ConfigFlag::Path) + .unwrap_or(ConfigFlag::Discover); let mut task_flags = TaskFlags { cwd: matches.remove_one::("cwd"), diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index e7968a6655..288e453626 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -430,8 +430,8 @@ fn create_lsp_structs( ) { let registry_url = CliNpmRegistryApi::default_url(); let progress_bar = ProgressBar::new(ProgressBarStyle::TextOnly); - let npm_cache = Arc::new(NpmCache::from_deno_dir( - dir, + let npm_cache = Arc::new(NpmCache::new( + dir.npm_folder_path(), // Use an "only" cache setting in order to make the // user do an explicit "cache" command and prevent // the cache from being filled with lots of packages while diff --git a/cli/npm/cache.rs b/cli/npm/cache.rs index eb674d3cb4..0d88109de3 100644 --- a/cli/npm/cache.rs +++ b/cli/npm/cache.rs @@ -129,7 +129,7 @@ impl Default for ReadonlyNpmCache { // This only gets used when creating the tsc runtime and for testing, and so // it shouldn't ever actually access the DenoDir, so it doesn't support a // custom root. - Self::from_deno_dir(&DenoDir::new(None).unwrap()) + Self::new(DenoDir::new(None).unwrap().npm_folder_path()) } } @@ -155,10 +155,6 @@ impl ReadonlyNpmCache { } } - pub fn from_deno_dir(dir: &DenoDir) -> Self { - Self::new(dir.npm_folder_path()) - } - pub fn root_dir_url(&self) -> &Url { &self.root_dir_url } @@ -306,14 +302,14 @@ pub struct NpmCache { } impl NpmCache { - pub fn from_deno_dir( - dir: &DenoDir, + pub fn new( + cache_dir_path: PathBuf, cache_setting: CacheSetting, http_client: HttpClient, progress_bar: ProgressBar, ) -> Self { Self { - readonly: ReadonlyNpmCache::from_deno_dir(dir), + readonly: ReadonlyNpmCache::new(cache_dir_path), cache_setting, http_client, progress_bar, diff --git a/cli/proc_state.rs b/cli/proc_state.rs index b6529d3a07..950e198242 100644 --- a/cli/proc_state.rs +++ b/cli/proc_state.rs @@ -227,8 +227,8 @@ impl ProcState { let lockfile = cli_options.maybe_lock_file(); let npm_registry_url = CliNpmRegistryApi::default_url().to_owned(); - let npm_cache = Arc::new(NpmCache::from_deno_dir( - &dir, + let npm_cache = Arc::new(NpmCache::new( + dir.npm_folder_path(), cli_options.cache_setting(), http_client.clone(), progress_bar.clone(), @@ -250,7 +250,7 @@ impl ProcState { let node_fs = Arc::new(deno_node::RealFs); let npm_fs_resolver = create_npm_fs_resolver( node_fs.clone(), - npm_cache, + npm_cache.clone(), &progress_bar, npm_registry_url, npm_resolution.clone(), @@ -302,12 +302,6 @@ impl ProcState { parsed_source_cache.clone(), emit_options, )); - let npm_cache = Arc::new(NpmCache::from_deno_dir( - &dir, - cli_options.cache_setting(), - http_client.clone(), - progress_bar.clone(), - )); let file_fetcher = Arc::new(file_fetcher); let node_analysis_cache = NodeAnalysisCache::new(caches.node_analysis_db(&dir)); diff --git a/cli/standalone/mod.rs b/cli/standalone/mod.rs index 669ad1d813..d0126168d8 100644 --- a/cli/standalone/mod.rs +++ b/cli/standalone/mod.rs @@ -1,11 +1,20 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use crate::args::get_root_cert_store; use crate::args::CaData; -use crate::args::Flags; +use crate::args::CacheSetting; +use crate::cache::DenoDir; use crate::colors; use crate::file_fetcher::get_source_from_data_url; +use crate::http_util::HttpClient; +use crate::npm::create_npm_fs_resolver; +use crate::npm::CliNpmRegistryApi; +use crate::npm::CliNpmResolver; +use crate::npm::NpmCache; +use crate::npm::NpmResolution; use crate::ops; -use crate::proc_state::ProcState; +use crate::util::progress_bar::ProgressBar; +use crate::util::progress_bar::ProgressBarStyle; use crate::util::v8::construct_v8_flags; use crate::version; use crate::CliGraphResolver; @@ -16,11 +25,17 @@ use deno_core::futures::task::LocalFutureObj; use deno_core::futures::FutureExt; use deno_core::located_script_name; use deno_core::v8_set_flags; +use deno_core::CompiledWasmModuleStore; use deno_core::ModuleLoader; use deno_core::ModuleSpecifier; use deno_core::ModuleType; use deno_core::ResolutionKind; +use deno_core::SharedArrayBufferStore; use deno_graph::source::Resolver; +use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel; +use deno_runtime::deno_node; +use deno_runtime::deno_tls::rustls::RootCertStore; +use deno_runtime::deno_web::BlobStore; use deno_runtime::fmt_errors::format_js_error; use deno_runtime::ops::worker_host::CreateWebWorkerCb; use deno_runtime::ops::worker_host::WorkerEventCb; @@ -122,28 +137,6 @@ impl ModuleLoader for EmbeddedModuleLoader { } } -fn metadata_to_flags(metadata: &Metadata) -> Flags { - let permissions = metadata.permissions.clone(); - Flags { - argv: metadata.argv.clone(), - unstable: metadata.unstable, - seed: metadata.seed, - location: metadata.location.clone(), - allow_env: permissions.allow_env, - allow_hrtime: permissions.allow_hrtime, - allow_net: permissions.allow_net, - allow_ffi: permissions.allow_ffi, - allow_read: permissions.allow_read, - allow_run: permissions.allow_run, - allow_write: permissions.allow_write, - v8_flags: metadata.v8_flags.clone(), - log_level: metadata.log_level, - ca_stores: metadata.ca_stores.clone(), - ca_data: metadata.ca_data.clone().map(CaData::Bytes), - ..Default::default() - } -} - fn web_worker_callback() -> Arc { Arc::new(|worker| { let fut = async move { Ok(worker) }; @@ -151,25 +144,41 @@ fn web_worker_callback() -> Arc { }) } +struct SharedWorkerState { + npm_resolver: Arc, + root_cert_store: RootCertStore, + node_fs: Arc, + blob_store: BlobStore, + broadcast_channel: InMemoryBroadcastChannel, + shared_array_buffer_store: SharedArrayBufferStore, + compiled_wasm_module_store: CompiledWasmModuleStore, + // options + argv: Vec, + seed: Option, + unsafely_ignore_certificate_errors: Option>, + unstable: bool, +} + fn create_web_worker_callback( - ps: &ProcState, - module_loader: &Rc, + shared: &Arc, + module_loader: &EmbeddedModuleLoader, ) -> Arc { - let ps = ps.clone(); - let module_loader = module_loader.as_ref().clone(); + let shared = shared.clone(); + let module_loader = module_loader.clone(); Arc::new(move |args| { let module_loader = Rc::new(module_loader.clone()); - let create_web_worker_cb = create_web_worker_callback(&ps, &module_loader); + let create_web_worker_cb = + create_web_worker_callback(&shared, &module_loader); let web_worker_cb = web_worker_callback(); let options = WebWorkerOptions { bootstrap: BootstrapOptions { - args: ps.options.argv().clone(), + args: shared.argv.clone(), cpu_count: std::thread::available_parallelism() .map(|p| p.get()) .unwrap_or(1), - debug_flag: ps.options.log_level().map_or(false, |l| l == Level::Debug), + debug_flag: false, enable_testing_features: false, locale: deno_core::v8::icu::get_language_tag(), location: Some(args.main_module.clone()), @@ -177,20 +186,19 @@ fn create_web_worker_callback( is_tty: colors::is_tty(), runtime_version: version::deno().to_string(), ts_version: version::TYPESCRIPT.to_string(), - unstable: ps.options.unstable(), + unstable: shared.unstable, user_agent: version::get_user_agent().to_string(), - inspect: ps.options.is_inspecting(), + inspect: false, }, - extensions: ops::cli_exts(ps.npm_resolver.clone()), + extensions: ops::cli_exts(shared.npm_resolver.clone()), startup_snapshot: Some(crate::js::deno_isolate_init()), - unsafely_ignore_certificate_errors: ps - .options - .unsafely_ignore_certificate_errors() + unsafely_ignore_certificate_errors: shared + .unsafely_ignore_certificate_errors .clone(), - root_cert_store: Some(ps.root_cert_store.clone()), - seed: ps.options.seed(), + root_cert_store: Some(shared.root_cert_store.clone()), + seed: shared.seed, module_loader, - node_fs: Some(ps.node_fs.clone()), + node_fs: Some(shared.node_fs.clone()), npm_resolver: None, // not currently supported create_web_worker_cb, preload_module_cb: web_worker_cb.clone(), @@ -200,10 +208,12 @@ fn create_web_worker_callback( worker_type: args.worker_type, maybe_inspector_server: None, get_error_class_fn: Some(&get_error_class_name), - blob_store: ps.blob_store.clone(), - broadcast_channel: ps.broadcast_channel.clone(), - shared_array_buffer_store: Some(ps.shared_array_buffer_store.clone()), - compiled_wasm_module_store: Some(ps.compiled_wasm_module_store.clone()), + blob_store: shared.blob_store.clone(), + broadcast_channel: shared.broadcast_channel.clone(), + shared_array_buffer_store: Some(shared.shared_array_buffer_store.clone()), + compiled_wasm_module_store: Some( + shared.compiled_wasm_module_store.clone(), + ), cache_storage_dir: None, stdio: Default::default(), }; @@ -222,13 +232,67 @@ pub async fn run( eszip: eszip::EszipV2, metadata: Metadata, ) -> Result<(), AnyError> { - let flags = metadata_to_flags(&metadata); let main_module = &metadata.entrypoint; - let ps = ProcState::from_flags(flags).await?; + let dir = DenoDir::new(None)?; + let root_cert_store = get_root_cert_store( + None, + metadata.ca_stores, + metadata.ca_data.map(CaData::Bytes), + )?; + let progress_bar = ProgressBar::new(ProgressBarStyle::TextOnly); + let http_client = HttpClient::new( + Some(root_cert_store.clone()), + metadata.unsafely_ignore_certificate_errors.clone(), + )?; + let npm_registry_url = CliNpmRegistryApi::default_url().to_owned(); + let npm_cache = Arc::new(NpmCache::new( + dir.npm_folder_path(), + CacheSetting::Use, + http_client.clone(), + progress_bar.clone(), + )); + let npm_api = Arc::new(CliNpmRegistryApi::new( + npm_registry_url.clone(), + npm_cache.clone(), + http_client.clone(), + progress_bar.clone(), + )); + let node_fs = Arc::new(deno_node::RealFs); + let npm_resolution = + Arc::new(NpmResolution::from_serialized(npm_api.clone(), None, None)); + let npm_fs_resolver = create_npm_fs_resolver( + node_fs.clone(), + npm_cache, + &progress_bar, + npm_registry_url, + npm_resolution.clone(), + None, + ); + let npm_resolver = Arc::new(CliNpmResolver::new( + npm_resolution.clone(), + npm_fs_resolver, + None, + )); + + let shared = Arc::new(SharedWorkerState { + npm_resolver, + root_cert_store, + node_fs, + blob_store: BlobStore::default(), + broadcast_channel: InMemoryBroadcastChannel::default(), + shared_array_buffer_store: SharedArrayBufferStore::default(), + compiled_wasm_module_store: CompiledWasmModuleStore::default(), + argv: metadata.argv, + seed: metadata.seed, + unsafely_ignore_certificate_errors: metadata + .unsafely_ignore_certificate_errors, + unstable: metadata.unstable, + }); + let permissions = PermissionsContainer::new(Permissions::from_options( &metadata.permissions, )?); - let module_loader = Rc::new(EmbeddedModuleLoader { + let module_loader = EmbeddedModuleLoader { eszip: Arc::new(eszip), maybe_import_map_resolver: metadata.maybe_import_map.map( |(base, source)| { @@ -238,21 +302,22 @@ pub async fn run( parse_from_json(&base, &source).unwrap().import_map, )), false, - ps.npm_api.clone(), - ps.npm_resolution.clone(), - ps.package_json_deps_installer.clone(), + npm_api.clone(), + npm_resolution.clone(), + Default::default(), )) }, ), - }); - let create_web_worker_cb = create_web_worker_callback(&ps, &module_loader); + }; + let create_web_worker_cb = + create_web_worker_callback(&shared, &module_loader); let web_worker_cb = web_worker_callback(); v8_set_flags(construct_v8_flags(&metadata.v8_flags, vec![])); let options = WorkerOptions { bootstrap: BootstrapOptions { - args: metadata.argv, + args: shared.argv.clone(), cpu_count: std::thread::available_parallelism() .map(|p| p.get()) .unwrap_or(1), @@ -269,13 +334,14 @@ pub async fn run( ts_version: version::TYPESCRIPT.to_string(), unstable: metadata.unstable, user_agent: version::get_user_agent().to_string(), - inspect: ps.options.is_inspecting(), + inspect: false, }, - extensions: ops::cli_exts(ps.npm_resolver.clone()), + extensions: ops::cli_exts(shared.npm_resolver.clone()), startup_snapshot: Some(crate::js::deno_isolate_init()), - unsafely_ignore_certificate_errors: metadata - .unsafely_ignore_certificate_errors, - root_cert_store: Some(ps.root_cert_store.clone()), + unsafely_ignore_certificate_errors: shared + .unsafely_ignore_certificate_errors + .clone(), + root_cert_store: Some(shared.root_cert_store.clone()), seed: metadata.seed, source_map_getter: None, format_js_error_fn: Some(Arc::new(format_js_error)), @@ -285,16 +351,16 @@ pub async fn run( maybe_inspector_server: None, should_break_on_first_statement: false, should_wait_for_inspector_session: false, - module_loader, - node_fs: Some(ps.node_fs.clone()), + module_loader: Rc::new(module_loader), + node_fs: Some(shared.node_fs.clone()), npm_resolver: None, // not currently supported get_error_class_fn: Some(&get_error_class_name), cache_storage_dir: None, origin_storage_dir: None, - blob_store: ps.blob_store.clone(), - broadcast_channel: ps.broadcast_channel.clone(), - shared_array_buffer_store: Some(ps.shared_array_buffer_store.clone()), - compiled_wasm_module_store: Some(ps.compiled_wasm_module_store.clone()), + blob_store: shared.blob_store.clone(), + broadcast_channel: shared.broadcast_channel.clone(), + shared_array_buffer_store: Some(shared.shared_array_buffer_store.clone()), + compiled_wasm_module_store: Some(shared.compiled_wasm_module_store.clone()), stdio: Default::default(), }; let mut worker = MainWorker::bootstrap_from_options( diff --git a/cli/tools/bench.rs b/cli/tools/bench.rs index 0b6ef8bb1d..9930bcc771 100644 --- a/cli/tools/bench.rs +++ b/cli/tools/bench.rs @@ -931,7 +931,10 @@ mod mitata { sysctl.arg("-n"); sysctl.arg("machdep.cpu.brand_string"); return std::str::from_utf8( - &sysctl.output().map_or(Vec::from("unknown"), |x| x.stdout), + &sysctl + .output() + .map(|x| x.stdout) + .unwrap_or(Vec::from("unknown")), ) .unwrap() .trim() From 2df6db36c85c27d424d54e9c168ef4ea09c5c08c Mon Sep 17 00:00:00 2001 From: Ryan Dahl Date: Wed, 26 Apr 2023 13:14:01 -0400 Subject: [PATCH 049/320] feat(ext/kv): add more atomic operation helpers (#18854) Co-authored-by: losfair Co-authored-by: Luca Casonato --- cli/tests/unit/kv_test.ts | 30 ++++++++++++++++++++++++++++++ cli/tsc/dts/lib.deno.unstable.d.ts | 19 +++++++++++++++++-- ext/kv/01_db.ts | 23 +++++++++++++++-------- 3 files changed, 62 insertions(+), 10 deletions(-) diff --git a/cli/tests/unit/kv_test.ts b/cli/tests/unit/kv_test.ts index 60cf11b8ef..0af8f338d2 100644 --- a/cli/tests/unit/kv_test.ts +++ b/cli/tests/unit/kv_test.ts @@ -219,6 +219,36 @@ dbTest("compare and mutate not exists", async (db) => { assertEquals(res, null); }); +dbTest("atomic mutation helper (sum)", async (db) => { + await db.set(["t"], new Deno.KvU64(42n)); + assertEquals((await db.get(["t"])).value, new Deno.KvU64(42n)); + + await db.atomic().sum(["t"], 1n).commit(); + assertEquals((await db.get(["t"])).value, new Deno.KvU64(43n)); +}); + +dbTest("atomic mutation helper (min)", async (db) => { + await db.set(["t"], new Deno.KvU64(42n)); + assertEquals((await db.get(["t"])).value, new Deno.KvU64(42n)); + + await db.atomic().min(["t"], 1n).commit(); + assertEquals((await db.get(["t"])).value, new Deno.KvU64(1n)); + + await db.atomic().min(["t"], 2n).commit(); + assertEquals((await db.get(["t"])).value, new Deno.KvU64(1n)); +}); + +dbTest("atomic mutation helper (max)", async (db) => { + await db.set(["t"], new Deno.KvU64(42n)); + assertEquals((await db.get(["t"])).value, new Deno.KvU64(42n)); + + await db.atomic().max(["t"], 41n).commit(); + assertEquals((await db.get(["t"])).value, new Deno.KvU64(42n)); + + await db.atomic().max(["t"], 43n).commit(); + assertEquals((await db.get(["t"])).value, new Deno.KvU64(43n)); +}); + dbTest("compare multiple and mutate", async (db) => { await db.set(["t1"], "1"); await db.set(["t2"], "2"); diff --git a/cli/tsc/dts/lib.deno.unstable.d.ts b/cli/tsc/dts/lib.deno.unstable.d.ts index cf6cedf414..dc3bfcfc01 100644 --- a/cli/tsc/dts/lib.deno.unstable.d.ts +++ b/cli/tsc/dts/lib.deno.unstable.d.ts @@ -1646,7 +1646,8 @@ declare namespace Deno { * - `sum` - Adds the given value to the existing value of the key. Both the * value specified in the mutation, and any existing value must be of type * `Deno.KvU64`. If the key does not exist, the value is set to the given - * value (summed with 0). + * value (summed with 0). If the result of the sum overflows an unsigned + * 64-bit integer, the result is wrapped around. * - `max` - Sets the value of the key to the maximum of the existing value * and the given value. Both the value specified in the mutation, and any * existing value must be of type `Deno.KvU64`. If the key does not exist, @@ -1845,9 +1846,23 @@ declare namespace Deno { */ mutate(...mutations: KvMutation[]): this; /** - * Shortcut for creating a sum mutation. + * Shortcut for creating a `sum` mutation. This method wraps `n` in a + * {@linkcode Deno.KvU64}, so the value of `n` must be in the range + * `[0, 2^64-1]`. */ sum(key: KvKey, n: bigint): this; + /** + * Shortcut for creating a `min` mutation. This method wraps `n` in a + * {@linkcode Deno.KvU64}, so the value of `n` must be in the range + * `[0, 2^64-1]`. + */ + min(key: KvKey, n: bigint): this; + /** + * Shortcut for creating a `max` mutation. This method wraps `n` in a + * {@linkcode Deno.KvU64}, so the value of `n` must be in the range + * `[0, 2^64-1]`. + */ + max(key: KvKey, n: bigint): this; /** * Add to the operation a mutation that sets the value of the specified key * to the specified value if all checks pass during the commit. diff --git a/ext/kv/01_db.ts b/ext/kv/01_db.ts index 1a7b27dac7..da29a09521 100644 --- a/ext/kv/01_db.ts +++ b/ext/kv/01_db.ts @@ -211,14 +211,6 @@ class AtomicOperation { return this; } - sum(key: Deno.KvKey, n: bigint): this { - return this.mutate({ - type: "sum", - key, - value: new KvU64(n), - }); - } - mutate(...mutations: Deno.KvMutation[]): this { for (const mutation of mutations) { const key = mutation.key; @@ -249,6 +241,21 @@ class AtomicOperation { return this; } + sum(key: Deno.KvKey, n: bigint): this { + this.#mutations.push([key, "sum", serializeValue(new KvU64(n))]); + return this; + } + + min(key: Deno.KvKey, n: bigint): this { + this.#mutations.push([key, "min", serializeValue(new KvU64(n))]); + return this; + } + + max(key: Deno.KvKey, n: bigint): this { + this.#mutations.push([key, "max", serializeValue(new KvU64(n))]); + return this; + } + set(key: Deno.KvKey, value: unknown): this { this.#mutations.push([key, "set", serializeValue(value)]); return this; From 14aaa73c0200d7fac4aa224d623e28b5955daab9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Wed, 26 Apr 2023 19:57:38 +0200 Subject: [PATCH 050/320] refactor: don't expose Deno[Deno.internal].core namespace (#18816) --- cli/tests/integration/bench_tests.rs | 2 +- cli/tests/integration/inspector_tests.rs | 1 + cli/tests/integration/js_unit_tests.rs | 1 + cli/tests/integration/npm_tests.rs | 2 +- cli/tests/integration/run_tests.rs | 95 ++++++++++++------------ cli/tests/integration/test_tests.rs | 4 +- cli/tools/test.rs | 2 +- core/01_core.js | 8 +- core/runtime.rs | 20 +++++ runtime/js/99_main.js | 19 +++-- test_napi/cleanup_hook_test.js | 1 + test_napi/tests/napi_tests.rs | 1 + 12 files changed, 95 insertions(+), 61 deletions(-) diff --git a/cli/tests/integration/bench_tests.rs b/cli/tests/integration/bench_tests.rs index 16ac5852ec..2a12be9636 100644 --- a/cli/tests/integration/bench_tests.rs +++ b/cli/tests/integration/bench_tests.rs @@ -198,7 +198,7 @@ fn recursive_permissions_pledge() { let context = TestContext::default(); let output = context .new_command() - .args("bench bench/recursive_permissions_pledge.js") + .args("bench --enable-testing-features-do-not-use bench/recursive_permissions_pledge.js") .run(); output.assert_exit_code(1); assert_contains!( diff --git a/cli/tests/integration/inspector_tests.rs b/cli/tests/integration/inspector_tests.rs index cf66c4adc1..29d13cd462 100644 --- a/cli/tests/integration/inspector_tests.rs +++ b/cli/tests/integration/inspector_tests.rs @@ -307,6 +307,7 @@ async fn inspector_break_on_first_line() { let child = util::deno_cmd() .arg("run") .arg(inspect_flag_with_unique_port("--inspect-brk")) + .arg("--enable-testing-features-do-not-use") .arg(script) .stdout(std::process::Stdio::piped()) .stderr(std::process::Stdio::piped()) diff --git a/cli/tests/integration/js_unit_tests.rs b/cli/tests/integration/js_unit_tests.rs index 793f66b1e1..0e94390e86 100644 --- a/cli/tests/integration/js_unit_tests.rs +++ b/cli/tests/integration/js_unit_tests.rs @@ -28,6 +28,7 @@ fn js_unit_tests() { .arg("--unstable") .arg("--location=http://js-unit-tests/foo/bar") .arg("--no-prompt") + .arg("--enable-testing-features-do-not-use") .arg("-A") .arg(util::tests_path().join("unit")) .spawn() diff --git a/cli/tests/integration/npm_tests.rs b/cli/tests/integration/npm_tests.rs index fad79e371f..29f9054ba7 100644 --- a/cli/tests/integration/npm_tests.rs +++ b/cli/tests/integration/npm_tests.rs @@ -118,7 +118,7 @@ itest!(dual_cjs_esm { }); itest!(child_process_fork_test { - args: "run -A --quiet npm/child_process_fork_test/main.ts", + args: "run -A --quiet --enable-testing-features-do-not-use npm/child_process_fork_test/main.ts", output: "npm/child_process_fork_test/main.out", envs: env_vars_for_npm_tests(), http_server: true, diff --git a/cli/tests/integration/run_tests.rs b/cli/tests/integration/run_tests.rs index aba6283d12..cc37cf523a 100644 --- a/cli/tests/integration/run_tests.rs +++ b/cli/tests/integration/run_tests.rs @@ -213,7 +213,7 @@ itest!(_038_checkjs { }); itest!(_042_dyn_import_evalcontext { - args: "run --quiet --allow-read --reload run/042_dyn_import_evalcontext.ts", + args: "run --quiet --allow-read --reload --enable-testing-features-do-not-use run/042_dyn_import_evalcontext.ts", output: "run/042_dyn_import_evalcontext.ts.out", }); @@ -1161,25 +1161,25 @@ itest!(exit_error42 { }); itest!(set_exit_code_0 { - args: "run --no-check --unstable run/set_exit_code_0.ts", + args: "run --no-check --unstable --enable-testing-features-do-not-use run/set_exit_code_0.ts", output_str: Some(""), exit_code: 0, }); itest!(set_exit_code_1 { - args: "run --no-check --unstable run/set_exit_code_1.ts", + args: "run --no-check --unstable --enable-testing-features-do-not-use run/set_exit_code_1.ts", output_str: Some(""), exit_code: 42, }); itest!(set_exit_code_2 { - args: "run --no-check --unstable run/set_exit_code_2.ts", + args: "run --no-check --unstable --enable-testing-features-do-not-use run/set_exit_code_2.ts", output_str: Some(""), exit_code: 42, }); itest!(op_exit_op_set_exit_code_in_worker { - args: "run --no-check --unstable --allow-read run/op_exit_op_set_exit_code_in_worker.ts", + args: "run --no-check --unstable --allow-read --enable-testing-features-do-not-use run/op_exit_op_set_exit_code_in_worker.ts", exit_code: 21, output_str: Some(""), }); @@ -1197,7 +1197,7 @@ itest!(heapstats { itest!(finalization_registry { args: - "run --quiet --unstable --v8-flags=--expose-gc run/finalization_registry.js", + "run --quiet --unstable --enable-testing-features-do-not-use --v8-flags=--expose-gc run/finalization_registry.js", output: "run/finalization_registry.js.out", }); @@ -2755,7 +2755,7 @@ itest!(long_data_url_formatting { }); itest!(eval_context_throw_dom_exception { - args: "run run/eval_context_throw_dom_exception.js", + args: "run --enable-testing-features-do-not-use run/eval_context_throw_dom_exception.js", output: "run/eval_context_throw_dom_exception.js.out", }); @@ -3117,115 +3117,115 @@ itest!(fetch_async_error_stack { }); itest!(unstable_ffi_1 { - args: "run run/ffi/unstable_ffi_1.js", + args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_1.js", output: "run/ffi/unstable_ffi_1.js.out", exit_code: 70, }); itest!(unstable_ffi_2 { - args: "run run/ffi/unstable_ffi_2.js", + args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_2.js", output: "run/ffi/unstable_ffi_2.js.out", exit_code: 70, }); itest!(unstable_ffi_3 { - args: "run run/ffi/unstable_ffi_3.js", + args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_3.js", output: "run/ffi/unstable_ffi_3.js.out", exit_code: 70, }); itest!(unstable_ffi_4 { - args: "run run/ffi/unstable_ffi_4.js", + args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_4.js", output: "run/ffi/unstable_ffi_4.js.out", exit_code: 70, }); itest!(unstable_ffi_5 { - args: "run run/ffi/unstable_ffi_5.js", + args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_5.js", output: "run/ffi/unstable_ffi_5.js.out", exit_code: 70, }); itest!(unstable_ffi_6 { - args: "run run/ffi/unstable_ffi_6.js", + args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_6.js", output: "run/ffi/unstable_ffi_6.js.out", exit_code: 70, }); itest!(unstable_ffi_7 { - args: "run run/ffi/unstable_ffi_7.js", + args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_7.js", output: "run/ffi/unstable_ffi_7.js.out", exit_code: 70, }); itest!(unstable_ffi_8 { - args: "run run/ffi/unstable_ffi_8.js", + args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_8.js", output: "run/ffi/unstable_ffi_8.js.out", exit_code: 70, }); itest!(unstable_ffi_9 { - args: "run run/ffi/unstable_ffi_9.js", + args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_9.js", output: "run/ffi/unstable_ffi_9.js.out", exit_code: 70, }); itest!(unstable_ffi_10 { - args: "run run/ffi/unstable_ffi_10.js", + args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_10.js", output: "run/ffi/unstable_ffi_10.js.out", exit_code: 70, }); itest!(unstable_ffi_11 { - args: "run run/ffi/unstable_ffi_11.js", + args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_11.js", output: "run/ffi/unstable_ffi_11.js.out", exit_code: 70, }); itest!(unstable_ffi_12 { - args: "run run/ffi/unstable_ffi_12.js", + args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_12.js", output: "run/ffi/unstable_ffi_12.js.out", exit_code: 70, }); itest!(unstable_ffi_13 { - args: "run run/ffi/unstable_ffi_13.js", + args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_13.js", output: "run/ffi/unstable_ffi_13.js.out", exit_code: 70, }); itest!(unstable_ffi_14 { - args: "run run/ffi/unstable_ffi_14.js", + args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_14.js", output: "run/ffi/unstable_ffi_14.js.out", exit_code: 70, }); itest!(unstable_ffi_15 { - args: "run run/ffi/unstable_ffi_15.js", + args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_15.js", output: "run/ffi/unstable_ffi_15.js.out", exit_code: 70, }); itest!(unstable_ffi_16 { - args: "run run/ffi/unstable_ffi_16.js", + args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_16.js", output: "run/ffi/unstable_ffi_16.js.out", exit_code: 70, }); itest!(unstable_ffi_17 { - args: "run run/ffi/unstable_ffi_17.js", + args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_17.js", output: "run/ffi/unstable_ffi_17.js.out", exit_code: 70, }); itest!(unstable_ffi_18 { - args: "run run/ffi/unstable_ffi_18.js", + args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_18.js", output: "run/ffi/unstable_ffi_18.js.out", exit_code: 70, }); itest!(unstable_ffi_19 { - args: "run run/ffi/unstable_ffi_19.js", + args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_19.js", output: "run/ffi/unstable_ffi_19.js.out", exit_code: 70, }); @@ -3248,7 +3248,7 @@ itest!(event_listener_error_handled { // https://github.com/denoland/deno/pull/14159#issuecomment-1092285446 itest!(event_listener_error_immediate_exit { - args: "run --quiet run/event_listener_error_immediate_exit.ts", + args: "run --quiet --enable-testing-features-do-not-use run/event_listener_error_immediate_exit.ts", output: "run/event_listener_error_immediate_exit.ts.out", exit_code: 1, }); @@ -3256,7 +3256,7 @@ itest!(event_listener_error_immediate_exit { // https://github.com/denoland/deno/pull/14159#issuecomment-1092285446 itest!(event_listener_error_immediate_exit_worker { args: - "run --quiet --unstable -A run/event_listener_error_immediate_exit_worker.ts", + "run --quiet --unstable -A --enable-testing-features-do-not-use run/event_listener_error_immediate_exit_worker.ts", output: "run/event_listener_error_immediate_exit_worker.ts.out", exit_code: 1, }); @@ -4363,22 +4363,24 @@ fn permission_prompt_strips_ansi_codes_and_control_chars() { ) }); - util::with_pty(&["repl"], |mut console| { - console.write_line_raw(r#"const boldANSI = "\u001b[1m";"#); - console.expect("undefined"); - console.write_line_raw(r#"const unboldANSI = "\u001b[22m";"#); - console.expect("undefined"); - console.write_line_raw(r#"const prompt = `┌ ⚠️ ${boldANSI}Deno requests run access to "echo"${unboldANSI}\n ├ Requested by \`Deno.Command().output()`"#); - console.expect("undefined"); - console.write_line_raw(r#"const moveANSIUp = "\u001b[1A";"#); - console.expect("undefined"); - console.write_line_raw(r#"const clearANSI = "\u001b[2K";"#); - console.expect("undefined"); - console.write_line_raw(r#"const moveANSIStart = "\u001b[1000D";"#); - console.expect("undefined"); + util::with_pty( + &["repl", "--enable-testing-features-do-not-use"], + |mut console| { + console.write_line_raw(r#"const boldANSI = "\u001b[1m";"#); + console.expect("undefined"); + console.write_line_raw(r#"const unboldANSI = "\u001b[22m";"#); + console.expect("undefined"); + console.write_line_raw(r#"const prompt = `┌ ⚠️ ${boldANSI}Deno requests run access to "echo"${unboldANSI}\n ├ Requested by \`Deno.Command().output()`"#); + console.expect("undefined"); + console.write_line_raw(r#"const moveANSIUp = "\u001b[1A";"#); + console.expect("undefined"); + console.write_line_raw(r#"const clearANSI = "\u001b[2K";"#); + console.expect("undefined"); + console.write_line_raw(r#"const moveANSIStart = "\u001b[1000D";"#); + console.expect("undefined"); - console.write_line_raw( - r#"Deno[Deno.internal].core.ops.op_spawn_child({ + console.write_line_raw( + r#"Deno[Deno.internal].core.ops.op_spawn_child({ cmd: "cat", args: ["file.txt"], clearEnv: false, @@ -4392,10 +4394,11 @@ fn permission_prompt_strips_ansi_codes_and_control_chars() { signal: undefined, windowsRawArguments: false, }, moveANSIUp + clearANSI + moveANSIStart + prompt)"#, - ); + ); - console.expect(r#"┌ ⚠️ Deno requests run access to "cat""#); - }); + console.expect(r#"┌ ⚠️ Deno requests run access to "cat""#); + }, + ); } itest!(node_builtin_modules_ts { diff --git a/cli/tests/integration/test_tests.rs b/cli/tests/integration/test_tests.rs index 8e24045301..cf16652897 100644 --- a/cli/tests/integration/test_tests.rs +++ b/cli/tests/integration/test_tests.rs @@ -216,7 +216,7 @@ itest!(ops_sanitizer_timeout_failure { }); itest!(ops_sanitizer_multiple_timeout_tests { - args: "test --trace-ops test/ops_sanitizer_multiple_timeout_tests.ts", + args: "test --trace-ops --enable-testing-features-do-not-use test/ops_sanitizer_multiple_timeout_tests.ts", exit_code: 1, output: "test/ops_sanitizer_multiple_timeout_tests.out", }); @@ -390,7 +390,7 @@ fn recursive_permissions_pledge() { let context = TestContext::default(); let output = context .new_command() - .args("test test/recursive_permissions_pledge.js") + .args("test --enable-testing-features-do-not-use test/recursive_permissions_pledge.js") .run(); output.assert_exit_code(1); assert_contains!( diff --git a/cli/tools/test.rs b/cli/tools/test.rs index 8533073744..17d1cebf32 100644 --- a/cli/tools/test.rs +++ b/cli/tools/test.rs @@ -954,7 +954,7 @@ pub async fn test_specifier( if ps.options.trace_ops() { worker.js_runtime.execute_script_static( located_script_name!(), - "Deno[Deno.internal].core.enableOpCallTracing();", + "Deno[Deno.internal].enableOpCallTracing();", )?; } worker.dispatch_load_event(located_script_name!())?; diff --git a/core/01_core.js b/core/01_core.js index a8bdeb2a86..7663db5d9f 100644 --- a/core/01_core.js +++ b/core/01_core.js @@ -453,7 +453,6 @@ BadResourcePrototype, Interrupted, InterruptedPrototype, - enableOpCallTracing, isOpCallTracingEnabled, opCallTraces, refOp, @@ -507,8 +506,11 @@ }); ObjectAssign(globalThis.__bootstrap, { core }); - const internals = {}; - ObjectAssign(globalThis.__bootstrap, { internals }); + ObjectAssign(globalThis.__bootstrap, { + internals: { + enableOpCallTracing, + }, + }); ObjectAssign(globalThis.Deno, { core }); // Direct bindings on `globalThis` diff --git a/core/runtime.rs b/core/runtime.rs index bae6a40db7..6820df6bca 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -479,6 +479,26 @@ impl JsRuntime { } } } + // Cache bust plain JS (non-ES modules as well) + #[cfg(feature = "include_js_files_for_snapshotting")] + if snapshot_options != snapshot_util::SnapshotOptions::None { + let js_sources = options + .extensions + .iter() + .flat_map(|ext| match ext.get_js_sources() { + Some(s) => s.to_owned(), + None => vec![], + }) + .collect::>(); + for source in js_sources { + use crate::ExtensionFileSourceCode; + if let ExtensionFileSourceCode::LoadedFromFsDuringSnapshot(path) = + &source.code + { + println!("cargo:rerun-if-changed={}", path.display()) + } + } + } Rc::new(crate::modules::ExtModuleLoader::new( options.module_loader, diff --git a/runtime/js/99_main.js b/runtime/js/99_main.js index fa16cc1f40..914940f5cf 100644 --- a/runtime/js/99_main.js +++ b/runtime/js/99_main.js @@ -391,12 +391,6 @@ function promiseRejectMacrotaskCallback() { let hasBootstrapped = false; // Set up global properties shared by main and worker runtime. ObjectDefineProperties(globalThis, windowOrWorkerGlobalScope); -// FIXME(bartlomieju): temporarily add whole `Deno.core` to -// `Deno[Deno.internal]` namespace. It should be removed and only necessary -// methods should be left there. -ObjectAssign(internals, { - core, -}); const internalSymbol = Symbol("Deno.internal"); const finalDenoNs = { internal: internalSymbol, @@ -428,7 +422,7 @@ function bootstrapMainRuntime(runtimeOptions) { 13: v8Version, 14: userAgent, 15: inspectFlag, - // 16: enableTestingFeaturesFlag + 16: enableTestingFeaturesFlag, } = runtimeOptions; performance.setTimeOrigin(DateNow()); @@ -503,6 +497,12 @@ function bootstrapMainRuntime(runtimeOptions) { ObjectAssign(finalDenoNs, denoNsUnstable); } + // Add `Deno[Deno.internal].core` namespace if + // `--enable-testing-features-do-not-use` flag is set. + if (enableTestingFeaturesFlag) { + ObjectAssign(internals, { core }); + } + // Setup `Deno` global - we're actually overriding already existing global // `Deno` with `Deno` namespace from "./deno.ts". ObjectDefineProperty(globalThis, "Deno", util.readOnly(finalDenoNs)); @@ -612,6 +612,11 @@ function bootstrapWorkerRuntime( noColor: util.readOnly(noColor), args: util.readOnly(ObjectFreeze(args)), }); + // Add `Deno[Deno.internal].core` namespace if + // `--enable-testing-features-do-not-use` flag is set. + if (enableTestingFeaturesFlag) { + ObjectAssign(internals, { core }); + } // Setup `Deno` global - we're actually overriding already // existing global `Deno` with `Deno` namespace from "./deno.ts". ObjectDefineProperty(globalThis, "Deno", util.readOnly(finalDenoNs)); diff --git a/test_napi/cleanup_hook_test.js b/test_napi/cleanup_hook_test.js index 30ceae470c..15741b60a1 100644 --- a/test_napi/cleanup_hook_test.js +++ b/test_napi/cleanup_hook_test.js @@ -12,6 +12,7 @@ if (import.meta.main) { const { stdout, stderr, code } = await new Deno.Command(Deno.execPath(), { args: [ "run", + "--enable-testing-features-do-not-use", "--allow-read", "--allow-run", "--allow-ffi", diff --git a/test_napi/tests/napi_tests.rs b/test_napi/tests/napi_tests.rs index 747f6aa276..722dc94177 100644 --- a/test_napi/tests/napi_tests.rs +++ b/test_napi/tests/napi_tests.rs @@ -31,6 +31,7 @@ fn napi_tests() { .arg("--allow-env") .arg("--allow-ffi") .arg("--allow-run") + .arg("--enable-testing-features-do-not-use") .spawn() .unwrap() .wait_with_output() From 77e25a656eca0cb1639ae39c515ac6c5f86d2ac9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Wed, 26 Apr 2023 20:02:27 +0200 Subject: [PATCH 051/320] refactor(core): simplify op types and codegeneration (#18843) About 2% improvement on WS/HTTP benchmarks, possibly unlocking more optimizations in the future. --------- Co-authored-by: Matt Mastracci --- core/lib.rs | 7 +- core/ops.rs | 119 ++++++++---------- core/ops_metrics.rs | 2 +- core/runtime.rs | 156 ++++++++++++++++++------ ops/fast_call.rs | 35 +----- ops/lib.rs | 88 +++++++------ ops/optimizer_tests/async_nop.out | 30 +---- ops/optimizer_tests/async_result.out | 35 ++---- ops/optimizer_tests/issue16934.out | 29 ++--- ops/optimizer_tests/issue16934_fast.out | 29 ++--- 10 files changed, 262 insertions(+), 268 deletions(-) diff --git a/core/lib.rs b/core/lib.rs index 70dadfc6a8..cb16c26548 100644 --- a/core/lib.rs +++ b/core/lib.rs @@ -89,11 +89,8 @@ pub use crate::modules::ModuleType; pub use crate::modules::NoopModuleLoader; pub use crate::modules::ResolutionKind; pub use crate::normalize_path::normalize_path; -pub use crate::ops::Op; -pub use crate::ops::OpAsyncFuture; pub use crate::ops::OpCall; pub use crate::ops::OpError; -pub use crate::ops::OpFn; pub use crate::ops::OpId; pub use crate::ops::OpResult; pub use crate::ops::OpState; @@ -135,6 +132,10 @@ pub mod _ops { pub use super::ops::to_op_result; pub use super::ops::OpCtx; pub use super::ops::OpResult; + pub use super::runtime::map_async_op1; + pub use super::runtime::map_async_op2; + pub use super::runtime::map_async_op3; + pub use super::runtime::map_async_op4; pub use super::runtime::queue_async_op; pub use super::runtime::queue_fast_async_op; pub use super::runtime::V8_WRAPPER_OBJECT_INDEX; diff --git a/core/ops.rs b/core/ops.rs index cceeb56547..b7dcc26638 100644 --- a/core/ops.rs +++ b/core/ops.rs @@ -8,12 +8,10 @@ use crate::runtime::JsRuntimeState; use crate::OpDecl; use crate::OpsTracker; use anyhow::Error; -use futures::future::maybe_done; -use futures::future::FusedFuture; use futures::future::MaybeDone; -use futures::ready; -use futures::task::noop_waker; use futures::Future; +use futures::FutureExt; +use pin_project::pin_project; use serde::Serialize; use std::cell::RefCell; use std::ops::Deref; @@ -22,91 +20,78 @@ use std::pin::Pin; use std::ptr::NonNull; use std::rc::Rc; use std::rc::Weak; -use std::task::Context; -use std::task::Poll; use v8::fast_api::CFunctionInfo; use v8::fast_api::CTypeInfo; -/// Wrapper around a Future, which causes that Future to be polled immediately. -/// -/// Background: ops are stored in a `FuturesUnordered` structure which polls -/// them, but without the `OpCall` wrapper this doesn't happen until the next -/// turn of the event loop, which is too late for certain ops. -pub struct OpCall(MaybeDone>>>); +pub type RealmIdx = u16; +pub type PromiseId = i32; +pub type OpId = u16; -pub enum EagerPollResult { - Ready(T), - Pending(OpCall), +#[pin_project] +pub struct OpCall { + realm_idx: RealmIdx, + promise_id: PromiseId, + op_id: OpId, + /// Future is not necessarily Unpin, so we need to pin_project. + #[pin] + fut: MaybeDone>>>, } -impl OpCall { - /// Wraps a future, and polls the inner future immediately. - /// This should be the default choice for ops. - pub fn eager(fut: impl Future + 'static) -> EagerPollResult { - let boxed = Box::pin(fut) as Pin>>; - let mut inner = maybe_done(boxed); - let waker = noop_waker(); - let mut cx = Context::from_waker(&waker); - let mut pinned = Pin::new(&mut inner); - let poll = pinned.as_mut().poll(&mut cx); - match poll { - Poll::Ready(_) => EagerPollResult::Ready(pinned.take_output().unwrap()), - _ => EagerPollResult::Pending(Self(inner)), - } - } - +impl OpCall { /// Wraps a future; the inner future is polled the usual way (lazily). - pub fn lazy(fut: impl Future + 'static) -> Self { - let boxed = Box::pin(fut) as Pin>>; - let inner = maybe_done(boxed); - Self(inner) + pub fn pending( + op_ctx: &OpCtx, + promise_id: PromiseId, + fut: Pin + 'static>>, + ) -> Self { + Self { + realm_idx: op_ctx.realm_idx, + op_id: op_ctx.id, + promise_id, + fut: MaybeDone::Future(fut), + } } /// Create a future by specifying its output. This is basically the same as /// `async { value }` or `futures::future::ready(value)`. - pub fn ready(value: T) -> Self { - Self(MaybeDone::Done(value)) + pub fn ready(op_ctx: &OpCtx, promise_id: PromiseId, value: OpResult) -> Self { + Self { + realm_idx: op_ctx.realm_idx, + op_id: op_ctx.id, + promise_id, + fut: MaybeDone::Done(value), + } } } -impl Future for OpCall { - type Output = T; +impl Future for OpCall { + type Output = (RealmIdx, PromiseId, OpId, OpResult); fn poll( self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>, ) -> std::task::Poll { - // TODO(piscisaureus): safety comment - #[allow(clippy::undocumented_unsafe_blocks)] - let inner = unsafe { &mut self.get_unchecked_mut().0 }; - let mut pinned = Pin::new(inner); - ready!(pinned.as_mut().poll(cx)); - Poll::Ready(pinned.as_mut().take_output().unwrap()) + let realm_idx = self.realm_idx; + let promise_id = self.promise_id; + let op_id = self.op_id; + let fut = &mut *self.project().fut; + match fut { + MaybeDone::Done(_) => { + // Let's avoid using take_output as it keeps our Pin::box + let res = std::mem::replace(fut, MaybeDone::Gone); + let MaybeDone::Done(res) = res + else { + unreachable!() + }; + std::task::Poll::Ready(res) + } + MaybeDone::Future(f) => f.poll_unpin(cx), + MaybeDone::Gone => std::task::Poll::Pending, + } + .map(move |res| (realm_idx, promise_id, op_id, res)) } } -impl FusedFuture for OpCall -where - F: Future, -{ - fn is_terminated(&self) -> bool { - self.0.is_terminated() - } -} - -pub type RealmIdx = usize; -pub type PromiseId = i32; -pub type OpAsyncFuture = OpCall<(PromiseId, OpId, OpResult)>; -pub type OpFn = - fn(&mut v8::HandleScope, v8::FunctionCallbackArguments, v8::ReturnValue); -pub type OpId = usize; - -pub enum Op { - Sync(OpResult), - Async(OpAsyncFuture), - NotFound, -} - pub enum OpResult { Ok(serde_v8::SerializablePkg), Err(OpError), diff --git a/core/ops_metrics.rs b/core/ops_metrics.rs index c0b8abb519..b25368bd01 100644 --- a/core/ops_metrics.rs +++ b/core/ops_metrics.rs @@ -63,7 +63,7 @@ impl OpsTracker { #[inline] fn metrics_mut(&self, id: OpId) -> RefMut { - RefMut::map(self.ops.borrow_mut(), |ops| &mut ops[id]) + RefMut::map(self.ops.borrow_mut(), |ops| &mut ops[id as usize]) } #[inline] diff --git a/core/runtime.rs b/core/runtime.rs index 6820df6bca..3723a917ac 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -35,8 +35,10 @@ use futures::channel::oneshot; use futures::future::poll_fn; use futures::future::Future; use futures::future::FutureExt; +use futures::future::MaybeDone; use futures::stream::FuturesUnordered; use futures::stream::StreamExt; +use futures::task::noop_waker; use futures::task::AtomicWaker; use smallvec::SmallVec; use std::any::Any; @@ -45,6 +47,7 @@ use std::collections::HashMap; use std::collections::VecDeque; use std::ffi::c_void; use std::option::Option; +use std::pin::Pin; use std::rc::Rc; use std::sync::Arc; use std::sync::Mutex; @@ -53,8 +56,6 @@ use std::task::Context; use std::task::Poll; use v8::OwnedIsolate; -type PendingOpFuture = OpCall<(RealmIdx, PromiseId, OpId, OpResult)>; - pub enum Snapshot { Static(&'static [u8]), JustCreated(v8::StartupData), @@ -165,7 +166,7 @@ pub struct JsRuntimeState { dyn_module_evaluate_idle_counter: u32, pub(crate) source_map_getter: Option>>, pub(crate) source_map_cache: Rc>, - pub(crate) pending_ops: FuturesUnordered, + pub(crate) pending_ops: FuturesUnordered, pub(crate) have_unpolled_ops: bool, pub(crate) op_state: Rc>, pub(crate) shared_array_buffer_store: Option, @@ -360,7 +361,7 @@ impl JsRuntime { .into_iter() .enumerate() .map(|(id, decl)| { - OpCtx::new(id, 0, Rc::new(decl), op_state.clone(), weak.clone()) + OpCtx::new(id as u16, 0, Rc::new(decl), op_state.clone(), weak.clone()) }) .collect::>() .into_boxed_slice(); @@ -610,7 +611,7 @@ impl JsRuntime { /// constructed. pub fn create_realm(&mut self) -> Result { let realm = { - let realm_idx = self.state.borrow().known_realms.len(); + let realm_idx = self.state.borrow().known_realms.len() as u16; let op_ctxs: Box<[OpCtx]> = self .global_realm() @@ -2231,7 +2232,7 @@ impl JsRuntime { { let (realm_idx, promise_id, op_id, resp) = item; state.op_state.borrow().tracker.track_async_completed(op_id); - responses_per_realm[realm_idx].push((promise_id, resp)); + responses_per_realm[realm_idx as usize].push((promise_id, resp)); } } @@ -2335,7 +2336,7 @@ impl JsRuntime { { let (realm_idx, promise_id, op_id, mut resp) = item; debug_assert_eq!( - state.known_realms[realm_idx], + state.known_realms[realm_idx as usize], state.global_realm.as_ref().unwrap().context() ); realm_state.unrefed_ops.remove(&promise_id); @@ -2382,27 +2383,106 @@ impl JsRuntime { } #[inline] -pub fn queue_fast_async_op( +pub fn queue_fast_async_op( ctx: &OpCtx, - op: impl Future + 'static, + promise_id: PromiseId, + op: impl Future> + 'static, ) { let runtime_state = match ctx.runtime_state.upgrade() { Some(rc_state) => rc_state, // atleast 1 Rc is held by the JsRuntime. None => unreachable!(), }; - + let get_class = { + let state = RefCell::borrow(&ctx.state); + state.tracker.track_async(ctx.id); + state.get_error_class_fn + }; + let fut = op + .map(|result| crate::_ops::to_op_result(get_class, result)) + .boxed_local(); let mut state = runtime_state.borrow_mut(); - state.pending_ops.push(OpCall::lazy(op)); + state + .pending_ops + .push(OpCall::pending(ctx, promise_id, fut)); state.have_unpolled_ops = true; } #[inline] +pub fn map_async_op1( + ctx: &OpCtx, + op: impl Future> + 'static, +) -> MaybeDone>>> { + let get_class = { + let state = RefCell::borrow(&ctx.state); + state.tracker.track_async(ctx.id); + state.get_error_class_fn + }; + + let fut = op + .map(|result| crate::_ops::to_op_result(get_class, result)) + .boxed_local(); + MaybeDone::Future(fut) +} + +#[inline] +pub fn map_async_op2( + ctx: &OpCtx, + op: impl Future + 'static, +) -> MaybeDone>>> { + let state = RefCell::borrow(&ctx.state); + state.tracker.track_async(ctx.id); + + let fut = op.map(|result| OpResult::Ok(result.into())).boxed_local(); + MaybeDone::Future(fut) +} + +#[inline] +pub fn map_async_op3( + ctx: &OpCtx, + op: Result> + 'static, Error>, +) -> MaybeDone>>> { + let get_class = { + let state = RefCell::borrow(&ctx.state); + state.tracker.track_async(ctx.id); + state.get_error_class_fn + }; + + match op { + Err(err) => MaybeDone::Done(OpResult::Err(OpError::new(get_class, err))), + Ok(fut) => MaybeDone::Future( + fut + .map(|result| crate::_ops::to_op_result(get_class, result)) + .boxed_local(), + ), + } +} + +#[inline] +pub fn map_async_op4( + ctx: &OpCtx, + op: Result + 'static, Error>, +) -> MaybeDone>>> { + let get_class = { + let state = RefCell::borrow(&ctx.state); + state.tracker.track_async(ctx.id); + state.get_error_class_fn + }; + + match op { + Err(err) => MaybeDone::Done(OpResult::Err(OpError::new(get_class, err))), + Ok(fut) => MaybeDone::Future( + fut.map(|result| OpResult::Ok(result.into())).boxed_local(), + ), + } +} + pub fn queue_async_op<'s>( ctx: &OpCtx, scope: &'s mut v8::HandleScope, deferred: bool, - op: impl Future + 'static, + promise_id: PromiseId, + mut op: MaybeDone>>>, ) -> Option> { let runtime_state = match ctx.runtime_state.upgrade() { Some(rc_state) => rc_state, @@ -2415,32 +2495,40 @@ pub fn queue_async_op<'s>( // deno_core doesn't currently support such exposure, even though embedders // can cause them, so we panic in debug mode (since the check is expensive). debug_assert_eq!( - runtime_state.borrow().known_realms[ctx.realm_idx].to_local(scope), + runtime_state.borrow().known_realms[ctx.realm_idx as usize].to_local(scope), Some(scope.get_current_context()) ); - match OpCall::eager(op) { - // If the result is ready we'll just return it straight to the caller, so - // we don't have to invoke a JS callback to respond. // This works under the - // assumption that `()` return value is serialized as `null`. - EagerPollResult::Ready((_, _, op_id, mut resp)) if !deferred => { - let resp = resp.to_v8(scope).unwrap(); - ctx.state.borrow_mut().tracker.track_async_completed(op_id); - return Some(resp); - } - EagerPollResult::Ready(op) => { - let ready = OpCall::ready(op); - let mut state = runtime_state.borrow_mut(); - state.pending_ops.push(ready); - state.have_unpolled_ops = true; - } - EagerPollResult::Pending(op) => { - let mut state = runtime_state.borrow_mut(); - state.pending_ops.push(op); - state.have_unpolled_ops = true; - } - } + // All ops are polled immediately + let waker = noop_waker(); + let mut cx = Context::from_waker(&waker); + // Note that MaybeDone returns () from the future + let op_call = match op.poll_unpin(&mut cx) { + Poll::Pending => { + let MaybeDone::Future(fut) = op else { + unreachable!() + }; + OpCall::pending(ctx, promise_id, fut) + } + Poll::Ready(_) => { + let mut op_result = Pin::new(&mut op).take_output().unwrap(); + // If the op is ready and is not marked as deferred we can immediately return + // the result. + if !deferred { + ctx.state.borrow_mut().tracker.track_async_completed(ctx.id); + return Some(op_result.to_v8(scope).unwrap()); + } + + OpCall::ready(ctx, promise_id, op_result) + } + }; + + // Otherwise we will push it to the `pending_ops` and let it be polled again + // or resolved on the next tick of the event loop. + let mut state = runtime_state.borrow_mut(); + state.pending_ops.push(op_call); + state.have_unpolled_ops = true; None } diff --git a/ops/fast_call.rs b/ops/fast_call.rs index 2485b6083c..ebbb1927bc 100644 --- a/ops/fast_call.rs +++ b/ops/fast_call.rs @@ -245,41 +245,16 @@ pub(crate) fn generate( } if optimizer.is_async { - // Referenced variables are declared in parent block. - let track_async = q!({ - let __op_id = __ctx.id; - let __state = ::std::cell::RefCell::borrow(&__ctx.state); - __state.tracker.track_async(__op_id); - }); - - output_transforms.push_tokens(&track_async); - let queue_future = if optimizer.returns_result { q!({ - let realm_idx = __ctx.realm_idx; - let __get_class = __state.get_error_class_fn; - let result = _ops::queue_fast_async_op(__ctx, async move { - let result = result.await; - ( - realm_idx, - __promise_id, - __op_id, - _ops::to_op_result(__get_class, result), - ) - }); + let result = _ops::queue_fast_async_op(__ctx, __promise_id, result); }) } else { q!({ - let realm_idx = __ctx.realm_idx; - let result = _ops::queue_fast_async_op(__ctx, async move { - let result = result.await; - ( - realm_idx, - __promise_id, - __op_id, - _ops::OpResult::Ok(result.into()), - ) - }); + let result = + _ops::queue_fast_async_op(__ctx, __promise_id, async move { + Ok(result.await) + }); }) }; diff --git a/ops/lib.rs b/ops/lib.rs index 7bf9620917..5a192537fd 100644 --- a/ops/lib.rs +++ b/ops/lib.rs @@ -258,32 +258,55 @@ fn codegen_v8_async( let (arg_decls, args_tail, _) = codegen_args(core, f, rust_i0, 1, asyncness); let type_params = exclude_lifetime_params(&f.sig.generics.params); - let (pre_result, mut result_fut) = match asyncness { - true => ( - quote! {}, - quote! { Self::call::<#type_params>(#args_head #args_tail).await; }, - ), - false => ( - quote! { let result_fut = Self::call::<#type_params>(#args_head #args_tail); }, - quote! { result_fut.await; }, - ), - }; - let result_wrapper = match is_result(&f.sig.output) { - true => { - // Support `Result> + 'static, AnyError>` - if !asyncness { - result_fut = quote! { result_fut; }; - quote! { - let result = match result { - Ok(fut) => fut.await, - Err(e) => return (realm_idx, promise_id, op_id, #core::_ops::to_op_result::<()>(get_class, Err(e))), - }; - } - } else { - quote! {} + let wrapper = match (asyncness, is_result(&f.sig.output)) { + (true, true) => { + quote! { + let fut = #core::_ops::map_async_op1(ctx, Self::call::<#type_params>(#args_head #args_tail)); + let maybe_response = #core::_ops::queue_async_op( + ctx, + scope, + #deferred, + promise_id, + fut, + ); + } + } + (true, false) => { + quote! { + let fut = #core::_ops::map_async_op2(ctx, Self::call::<#type_params>(#args_head #args_tail)); + let maybe_response = #core::_ops::queue_async_op( + ctx, + scope, + #deferred, + promise_id, + fut, + ); + } + } + (false, true) => { + quote! { + let fut = #core::_ops::map_async_op3(ctx, Self::call::<#type_params>(#args_head #args_tail)); + let maybe_response = #core::_ops::queue_async_op( + ctx, + scope, + #deferred, + promise_id, + fut, + ); + } + } + (false, false) => { + quote! { + let fut = #core::_ops::map_async_op4(ctx, Self::call::<#type_params>(#args_head #args_tail)); + let maybe_response = #core::_ops::queue_async_op( + ctx, + scope, + #deferred, + promise_id, + fut, + ); } } - false => quote! { let result = Ok(result); }, }; quote! { @@ -293,8 +316,6 @@ fn codegen_v8_async( &*(#core::v8::Local::<#core::v8::External>::cast(args.data()).value() as *const #core::_ops::OpCtx) }; - let op_id = ctx.id; - let realm_idx = ctx.realm_idx; let promise_id = args.get(0); let promise_id = #core::v8::Local::<#core::v8::Integer>::try_from(promise_id) @@ -310,20 +331,7 @@ fn codegen_v8_async( }; #arg_decls - - // Track async call & get copy of get_error_class_fn - let get_class = { - let state = ::std::cell::RefCell::borrow(&ctx.state); - state.tracker.track_async(op_id); - state.get_error_class_fn - }; - - #pre_result - let maybe_response = #core::_ops::queue_async_op(ctx, scope, #deferred, async move { - let result = #result_fut - #result_wrapper - (realm_idx, promise_id, op_id, #core::_ops::to_op_result(get_class, result)) - }); + #wrapper if let Some(response) = maybe_response { rv.set(response); diff --git a/ops/optimizer_tests/async_nop.out b/ops/optimizer_tests/async_nop.out index 7782b5970d..d267338258 100644 --- a/ops/optimizer_tests/async_nop.out +++ b/ops/optimizer_tests/async_nop.out @@ -56,8 +56,6 @@ impl op_void_async { &*(deno_core::v8::Local::::cast(args.data()).value() as *const deno_core::_ops::OpCtx) }; - let op_id = ctx.id; - let realm_idx = ctx.realm_idx; let promise_id = args.get(0); let promise_id = deno_core::v8::Local::< deno_core::v8::Integer, @@ -74,25 +72,13 @@ impl op_void_async { return; } }; - let get_class = { - let state = ::std::cell::RefCell::borrow(&ctx.state); - state.tracker.track_async(op_id); - state.get_error_class_fn - }; + let fut = deno_core::_ops::map_async_op2(ctx, Self::call()); let maybe_response = deno_core::_ops::queue_async_op( ctx, scope, false, - async move { - let result = Self::call().await; - let result = Ok(result); - ( - realm_idx, - promise_id, - op_id, - deno_core::_ops::to_op_result(get_class, result), - ) - }, + promise_id, + fut, ); if let Some(response) = maybe_response { rv.set(response); @@ -116,16 +102,10 @@ fn op_void_async_fast_fn<'scope>( }; let op_state = __ctx.state.clone(); let result = op_void_async::call(); - let __op_id = __ctx.id; - let __state = ::std::cell::RefCell::borrow(&__ctx.state); - __state.tracker.track_async(__op_id); - let realm_idx = __ctx.realm_idx; let result = _ops::queue_fast_async_op( __ctx, - async move { - let result = result.await; - (realm_idx, __promise_id, __op_id, _ops::OpResult::Ok(result.into())) - }, + __promise_id, + async move { Ok(result.await) }, ); result } diff --git a/ops/optimizer_tests/async_result.out b/ops/optimizer_tests/async_result.out index c3bb433f1a..4494bf22ae 100644 --- a/ops/optimizer_tests/async_result.out +++ b/ops/optimizer_tests/async_result.out @@ -56,8 +56,6 @@ impl op_async_result { &*(deno_core::v8::Local::::cast(args.data()).value() as *const deno_core::_ops::OpCtx) }; - let op_id = ctx.id; - let realm_idx = ctx.realm_idx; let promise_id = args.get(0); let promise_id = deno_core::v8::Local::< deno_core::v8::Integer, @@ -85,24 +83,16 @@ impl op_async_result { return deno_core::_ops::throw_type_error(scope, msg); } }; - let get_class = { - let state = ::std::cell::RefCell::borrow(&ctx.state); - state.tracker.track_async(op_id); - state.get_error_class_fn - }; + let fut = deno_core::_ops::map_async_op1( + ctx, + Self::call(ctx.state.clone(), arg_0), + ); let maybe_response = deno_core::_ops::queue_async_op( ctx, scope, false, - async move { - let result = Self::call(ctx.state.clone(), arg_0).await; - ( - realm_idx, - promise_id, - op_id, - deno_core::_ops::to_op_result(get_class, result), - ) - }, + promise_id, + fut, ); if let Some(response) = maybe_response { rv.set(response); @@ -127,16 +117,5 @@ fn op_async_result_fast_fn<'scope>( }; let state = __ctx.state.clone(); let result = op_async_result::call(state, rid); - let __op_id = __ctx.id; - let __state = ::std::cell::RefCell::borrow(&__ctx.state); - __state.tracker.track_async(__op_id); - let realm_idx = __ctx.realm_idx; - let __get_class = __state.get_error_class_fn; - let result = _ops::queue_fast_async_op( - __ctx, - async move { - let result = result.await; - (realm_idx, __promise_id, __op_id, _ops::to_op_result(__get_class, result)) - }, - ); + let result = _ops::queue_fast_async_op(__ctx, __promise_id, result); } diff --git a/ops/optimizer_tests/issue16934.out b/ops/optimizer_tests/issue16934.out index 68f59ef438..e92510038c 100644 --- a/ops/optimizer_tests/issue16934.out +++ b/ops/optimizer_tests/issue16934.out @@ -50,8 +50,6 @@ impl send_stdin { &*(deno_core::v8::Local::::cast(args.data()).value() as *const deno_core::_ops::OpCtx) }; - let op_id = ctx.id; - let realm_idx = ctx.realm_idx; let promise_id = args.get(0); let promise_id = deno_core::v8::Local::< deno_core::v8::Integer, @@ -79,28 +77,19 @@ impl send_stdin { ); } }; - let get_class = { - let state = ::std::cell::RefCell::borrow(&ctx.state); - state.tracker.track_async(op_id); - state.get_error_class_fn - }; + let fut = deno_core::_ops::map_async_op1( + ctx, + Self::call( + compile_error!("mutable opstate is not supported in async ops"), + arg_0, + ), + ); let maybe_response = deno_core::_ops::queue_async_op( ctx, scope, false, - async move { - let result = Self::call( - compile_error!("mutable opstate is not supported in async ops"), - arg_0, - ) - .await; - ( - realm_idx, - promise_id, - op_id, - deno_core::_ops::to_op_result(get_class, result), - ) - }, + promise_id, + fut, ); if let Some(response) = maybe_response { rv.set(response); diff --git a/ops/optimizer_tests/issue16934_fast.out b/ops/optimizer_tests/issue16934_fast.out index 7a4a39f348..2a16d1b626 100644 --- a/ops/optimizer_tests/issue16934_fast.out +++ b/ops/optimizer_tests/issue16934_fast.out @@ -48,8 +48,6 @@ impl send_stdin { &*(deno_core::v8::Local::::cast(args.data()).value() as *const deno_core::_ops::OpCtx) }; - let op_id = ctx.id; - let realm_idx = ctx.realm_idx; let promise_id = args.get(0); let promise_id = deno_core::v8::Local::< deno_core::v8::Integer, @@ -77,28 +75,19 @@ impl send_stdin { return deno_core::_ops::throw_type_error(scope, msg); } }; - let get_class = { - let state = ::std::cell::RefCell::borrow(&ctx.state); - state.tracker.track_async(op_id); - state.get_error_class_fn - }; + let fut = deno_core::_ops::map_async_op1( + ctx, + Self::call( + compile_error!("mutable opstate is not supported in async ops"), + arg_0, + ), + ); let maybe_response = deno_core::_ops::queue_async_op( ctx, scope, false, - async move { - let result = Self::call( - compile_error!("mutable opstate is not supported in async ops"), - arg_0, - ) - .await; - ( - realm_idx, - promise_id, - op_id, - deno_core::_ops::to_op_result(get_class, result), - ) - }, + promise_id, + fut, ); if let Some(response) = maybe_response { rv.set(response); From c2f5c096925e2fc303f6ea1c36cdba38748c9217 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Wed, 26 Apr 2023 15:09:28 -0400 Subject: [PATCH 052/320] chore: fix benchmarks (#18863) --- cli/bench/websocket/deno_echo.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cli/bench/websocket/deno_echo.js b/cli/bench/websocket/deno_echo.js index 70e64dcbe5..1cb63eb7ff 100644 --- a/cli/bench/websocket/deno_echo.js +++ b/cli/bench/websocket/deno_echo.js @@ -22,4 +22,4 @@ function handler(request) { return response; } -serve(handler, { port: parseInt(port), hostname: "0.0.0.0" }); +serve({ port: parseInt(port), hostname: "0.0.0.0" }, handler); From 3d8a4d3b81e107bbb152ad69047f64d16ca800f3 Mon Sep 17 00:00:00 2001 From: Nayeem Rahman Date: Wed, 26 Apr 2023 21:23:28 +0100 Subject: [PATCH 053/320] feat(cli): don't check permissions for statically analyzable dynamic imports (#18713) Closes #17697 Closes #17658 --- cli/cache/mod.rs | 17 +++++------------ cli/graph_util.rs | 11 +++-------- cli/lsp/testing/execution.rs | 1 - cli/module_loader.rs | 17 +++-------------- cli/tests/integration/run_tests.rs | 5 +++++ cli/tests/testdata/dynamic_import/empty_1.ts | 0 cli/tests/testdata/dynamic_import/empty_2.ts | 0 .../dynamic_import/permissions_remote_remote.ts | 2 +- .../static_analysis_no_permissions.ts | 13 +++++++++++++ .../static_analysis_no_permissions.ts.out | 2 ++ .../run/error_015_dynamic_import_permissions.js | 2 +- .../error_015_dynamic_import_permissions.out | 2 +- cli/tests/testdata/workers/dynamic_remote.ts | 2 +- .../workers/permissions_dynamic_remote.ts.out | 2 +- cli/tools/bench.rs | 7 ++----- cli/tools/test.rs | 9 ++------- 16 files changed, 40 insertions(+), 52 deletions(-) create mode 100644 cli/tests/testdata/dynamic_import/empty_1.ts create mode 100644 cli/tests/testdata/dynamic_import/empty_2.ts create mode 100644 cli/tests/testdata/dynamic_import/static_analysis_no_permissions.ts create mode 100644 cli/tests/testdata/dynamic_import/static_analysis_no_permissions.ts.out diff --git a/cli/cache/mod.rs b/cli/cache/mod.rs index 24712d08ae..40d74ff66b 100644 --- a/cli/cache/mod.rs +++ b/cli/cache/mod.rs @@ -45,10 +45,9 @@ pub const CACHE_PERM: u32 = 0o644; /// a concise interface to the DENO_DIR when building module graphs. pub struct FetchCacher { emit_cache: EmitCache, - dynamic_permissions: PermissionsContainer, file_fetcher: Arc, file_header_overrides: HashMap>, - root_permissions: PermissionsContainer, + permissions: PermissionsContainer, cache_info_enabled: bool, maybe_local_node_modules_url: Option, } @@ -58,16 +57,14 @@ impl FetchCacher { emit_cache: EmitCache, file_fetcher: Arc, file_header_overrides: HashMap>, - root_permissions: PermissionsContainer, - dynamic_permissions: PermissionsContainer, + permissions: PermissionsContainer, maybe_local_node_modules_url: Option, ) -> Self { Self { emit_cache, - dynamic_permissions, file_fetcher, file_header_overrides, - root_permissions, + permissions, cache_info_enabled: false, maybe_local_node_modules_url, } @@ -105,7 +102,7 @@ impl Loader for FetchCacher { fn load( &mut self, specifier: &ModuleSpecifier, - is_dynamic: bool, + _is_dynamic: bool, ) -> LoadFuture { if let Some(node_modules_url) = self.maybe_local_node_modules_url.as_ref() { // The specifier might be in a completely different symlinked tree than @@ -124,11 +121,7 @@ impl Loader for FetchCacher { } } - let permissions = if is_dynamic { - self.dynamic_permissions.clone() - } else { - self.root_permissions.clone() - }; + let permissions = self.permissions.clone(); let file_fetcher = self.file_fetcher.clone(); let file_header_overrides = self.file_header_overrides.clone(); let specifier = specifier.clone(); diff --git a/cli/graph_util.rs b/cli/graph_util.rs index d5bc6ac0d5..f9dafbb573 100644 --- a/cli/graph_util.rs +++ b/cli/graph_util.rs @@ -314,23 +314,18 @@ impl ModuleGraphBuilder { /// Creates the default loader used for creating a graph. pub fn create_graph_loader(&self) -> cache::FetchCacher { - self.create_fetch_cacher( - PermissionsContainer::allow_all(), - PermissionsContainer::allow_all(), - ) + self.create_fetch_cacher(PermissionsContainer::allow_all()) } pub fn create_fetch_cacher( &self, - root_permissions: PermissionsContainer, - dynamic_permissions: PermissionsContainer, + permissions: PermissionsContainer, ) -> cache::FetchCacher { cache::FetchCacher::new( self.emit_cache.clone(), self.file_fetcher.clone(), self.options.resolve_file_header_overrides(), - root_permissions, - dynamic_permissions, + permissions, self.options.node_modules_dir_specifier(), ) } diff --git a/cli/lsp/testing/execution.rs b/cli/lsp/testing/execution.rs index 020dd5c08a..5e5a3788af 100644 --- a/cli/lsp/testing/execution.rs +++ b/cli/lsp/testing/execution.rs @@ -226,7 +226,6 @@ impl TestRun { Permissions::from_options(&ps.options.permissions_options())?; test::check_specifiers( &ps, - permissions.clone(), self .queue .iter() diff --git a/cli/module_loader.rs b/cli/module_loader.rs index 9798983748..e4b8b616d7 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -109,15 +109,12 @@ impl ModuleLoadPreparer { roots: Vec, is_dynamic: bool, lib: TsTypeLib, - root_permissions: PermissionsContainer, - dynamic_permissions: PermissionsContainer, + permissions: PermissionsContainer, ) -> Result<(), AnyError> { log::debug!("Preparing module load."); let _pb_clear_guard = self.progress_bar.clear_guard(); - let mut cache = self - .module_graph_builder - .create_fetch_cacher(root_permissions, dynamic_permissions); + let mut cache = self.module_graph_builder.create_fetch_cacher(permissions); let maybe_imports = self.options.to_maybe_imports()?; let graph_resolver = self.resolver.as_graph_resolver(); let graph_npm_resolver = self.resolver.as_graph_npm_resolver(); @@ -216,7 +213,6 @@ impl ModuleLoadPreparer { false, lib, PermissionsContainer::allow_all(), - PermissionsContainer::allow_all(), ) .await } @@ -537,7 +533,6 @@ impl ModuleLoader for CliModuleLoader { let specifier = specifier.clone(); let module_load_preparer = self.module_load_preparer.clone(); - let dynamic_permissions = self.dynamic_permissions.clone(); let root_permissions = if is_dynamic { self.dynamic_permissions.clone() } else { @@ -547,13 +542,7 @@ impl ModuleLoader for CliModuleLoader { async move { module_load_preparer - .prepare_module_load( - vec![specifier], - is_dynamic, - lib, - root_permissions, - dynamic_permissions, - ) + .prepare_module_load(vec![specifier], is_dynamic, lib, root_permissions) .await } .boxed_local() diff --git a/cli/tests/integration/run_tests.rs b/cli/tests/integration/run_tests.rs index cc37cf523a..d946b6a1c5 100644 --- a/cli/tests/integration/run_tests.rs +++ b/cli/tests/integration/run_tests.rs @@ -2661,6 +2661,11 @@ mod permissions { }); } + itest!(dynamic_import_static_analysis_no_permissions { + args: "run --quiet --reload --no-prompt dynamic_import/static_analysis_no_permissions.ts", + output: "dynamic_import/static_analysis_no_permissions.ts.out", + }); + itest!(dynamic_import_permissions_remote_remote { args: "run --quiet --reload --allow-net=localhost:4545 dynamic_import/permissions_remote_remote.ts", output: "dynamic_import/permissions_remote_remote.ts.out", diff --git a/cli/tests/testdata/dynamic_import/empty_1.ts b/cli/tests/testdata/dynamic_import/empty_1.ts new file mode 100644 index 0000000000..e69de29bb2 diff --git a/cli/tests/testdata/dynamic_import/empty_2.ts b/cli/tests/testdata/dynamic_import/empty_2.ts new file mode 100644 index 0000000000..e69de29bb2 diff --git a/cli/tests/testdata/dynamic_import/permissions_remote_remote.ts b/cli/tests/testdata/dynamic_import/permissions_remote_remote.ts index 0033bcccce..65a2541910 100644 --- a/cli/tests/testdata/dynamic_import/permissions_remote_remote.ts +++ b/cli/tests/testdata/dynamic_import/permissions_remote_remote.ts @@ -1,3 +1,3 @@ await import( - "http://localhost:4545/dynamic_import/static_remote.ts" + "" + "http://localhost:4545/dynamic_import/static_remote.ts" ); diff --git a/cli/tests/testdata/dynamic_import/static_analysis_no_permissions.ts b/cli/tests/testdata/dynamic_import/static_analysis_no_permissions.ts new file mode 100644 index 0000000000..de75ba87b6 --- /dev/null +++ b/cli/tests/testdata/dynamic_import/static_analysis_no_permissions.ts @@ -0,0 +1,13 @@ +try { + await import("./empty_1.ts"); + console.log("✅ Succeeded importing statically analyzable specifier"); +} catch { + console.log("❌ Failed importing statically analyzable specifier"); +} + +try { + await import("" + "./empty_2.ts"); + console.log("❌ Succeeded importing non-statically analyzable specifier"); +} catch { + console.log("✅ Failed importing non-statically analyzable specifier"); +} diff --git a/cli/tests/testdata/dynamic_import/static_analysis_no_permissions.ts.out b/cli/tests/testdata/dynamic_import/static_analysis_no_permissions.ts.out new file mode 100644 index 0000000000..ba9249ab0b --- /dev/null +++ b/cli/tests/testdata/dynamic_import/static_analysis_no_permissions.ts.out @@ -0,0 +1,2 @@ +✅ Succeeded importing statically analyzable specifier +✅ Failed importing non-statically analyzable specifier diff --git a/cli/tests/testdata/run/error_015_dynamic_import_permissions.js b/cli/tests/testdata/run/error_015_dynamic_import_permissions.js index 73da56fd89..47961cf63b 100644 --- a/cli/tests/testdata/run/error_015_dynamic_import_permissions.js +++ b/cli/tests/testdata/run/error_015_dynamic_import_permissions.js @@ -1,3 +1,3 @@ (async () => { - await import("http://localhost:4545/subdir/mod4.js"); + await import("" + "http://localhost:4545/subdir/mod4.js"); })(); diff --git a/cli/tests/testdata/run/error_015_dynamic_import_permissions.out b/cli/tests/testdata/run/error_015_dynamic_import_permissions.out index ef54f331b0..87ce43e9cd 100644 --- a/cli/tests/testdata/run/error_015_dynamic_import_permissions.out +++ b/cli/tests/testdata/run/error_015_dynamic_import_permissions.out @@ -1,4 +1,4 @@ error: Uncaught (in promise) TypeError: Requires net access to "localhost:4545", run again with the --allow-net flag - await import("http://localhost:4545/subdir/mod4.js"); + await import("" + "http://localhost:4545/subdir/mod4.js"); ^ at async file://[WILDCARD]/error_015_dynamic_import_permissions.js:2:3 diff --git a/cli/tests/testdata/workers/dynamic_remote.ts b/cli/tests/testdata/workers/dynamic_remote.ts index 381c7f374c..54e4a4714e 100644 --- a/cli/tests/testdata/workers/dynamic_remote.ts +++ b/cli/tests/testdata/workers/dynamic_remote.ts @@ -1,2 +1,2 @@ // This file doesn't really exist, but it doesn't matter, a "PermissionsDenied" error should be thrown. -await import("https://example.com/some/file.ts"); +await import("" + "https://example.com/some/file.ts"); diff --git a/cli/tests/testdata/workers/permissions_dynamic_remote.ts.out b/cli/tests/testdata/workers/permissions_dynamic_remote.ts.out index cd1884c7e6..91f3cc6d5b 100644 --- a/cli/tests/testdata/workers/permissions_dynamic_remote.ts.out +++ b/cli/tests/testdata/workers/permissions_dynamic_remote.ts.out @@ -1,5 +1,5 @@ error: Uncaught (in worker "") (in promise) TypeError: Requires net access to "example.com", run again with the --allow-net flag -await import("https://example.com/some/file.ts"); +await import("" + "https://example.com/some/file.ts"); ^ at async http://localhost:4545/workers/dynamic_remote.ts:2:1 [WILDCARD]error: Uncaught (in promise) Error: Unhandled error in child worker. diff --git a/cli/tools/bench.rs b/cli/tools/bench.rs index 9930bcc771..962b1ac174 100644 --- a/cli/tools/bench.rs +++ b/cli/tools/bench.rs @@ -418,7 +418,6 @@ impl BenchReporter for ConsoleReporter { /// Type check a collection of module and document specifiers. async fn check_specifiers( ps: &ProcState, - permissions: Permissions, specifiers: Vec, ) -> Result<(), AnyError> { let lib = ps.options.ts_type_lib_window(); @@ -428,10 +427,8 @@ async fn check_specifiers( false, lib, PermissionsContainer::allow_all(), - PermissionsContainer::new(permissions), ) .await?; - Ok(()) } @@ -654,7 +651,7 @@ pub async fn run_benchmarks( return Err(generic_error("No bench modules found")); } - check_specifiers(&ps, permissions.clone(), specifiers.clone()).await?; + check_specifiers(&ps, specifiers.clone()).await?; if bench_options.no_run { return Ok(()); @@ -813,7 +810,7 @@ pub async fn run_benchmarks_with_watch( .filter(|specifier| modules_to_reload.contains(specifier)) .collect::>(); - check_specifiers(&ps, permissions.clone(), specifiers.clone()).await?; + check_specifiers(&ps, specifiers.clone()).await?; if bench_options.no_run { return Ok(()); diff --git a/cli/tools/test.rs b/cli/tools/test.rs index 17d1cebf32..268f3b4b9e 100644 --- a/cli/tools/test.rs +++ b/cli/tools/test.rs @@ -1230,7 +1230,6 @@ async fn fetch_inline_files( /// Type check a collection of module and document specifiers. pub async fn check_specifiers( ps: &ProcState, - permissions: Permissions, specifiers: Vec<(ModuleSpecifier, TestMode)>, ) -> Result<(), AnyError> { let lib = ps.options.ts_type_lib_window(); @@ -1265,7 +1264,6 @@ pub async fn check_specifiers( false, lib, PermissionsContainer::new(Permissions::allow_all()), - PermissionsContainer::new(permissions.clone()), ) .await?; } @@ -1287,7 +1285,6 @@ pub async fn check_specifiers( false, lib, PermissionsContainer::allow_all(), - PermissionsContainer::new(permissions), ) .await?; @@ -1648,8 +1645,7 @@ pub async fn run_tests( return Err(generic_error("No test modules found")); } - check_specifiers(&ps, permissions.clone(), specifiers_with_mode.clone()) - .await?; + check_specifiers(&ps, specifiers_with_mode.clone()).await?; if test_options.no_run { return Ok(()); @@ -1821,8 +1817,7 @@ pub async fn run_tests_with_watch( .filter(|(specifier, _)| modules_to_reload.contains(specifier)) .collect::>(); - check_specifiers(&ps, permissions.clone(), specifiers_with_mode.clone()) - .await?; + check_specifiers(&ps, specifiers_with_mode.clone()).await?; if test_options.no_run { return Ok(()); From 7415aff983333ae45badfd43c3db35d39ad37b79 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Wed, 26 Apr 2023 23:59:32 +0200 Subject: [PATCH 054/320] bench: fix more benchmarks (#18864) --- cli/bench/http.rs | 1 + cli/bench/http/deno_flash_send_file.js | 2 +- cli/bench/http/deno_http_serve.js | 2 +- cli/bench/http/deno_http_serve_https.js | 2 +- cli/bench/testdata/deno_upgrade_http.js | 2 +- 5 files changed, 5 insertions(+), 4 deletions(-) diff --git a/cli/bench/http.rs b/cli/bench/http.rs index 4f8ab968ff..031e9801cc 100644 --- a/cli/bench/http.rs +++ b/cli/bench/http.rs @@ -99,6 +99,7 @@ pub fn benchmark( "run", "--allow-all", "--unstable", + "--enable-testing-features-do-not-use", path, &server_addr(port), ], diff --git a/cli/bench/http/deno_flash_send_file.js b/cli/bench/http/deno_flash_send_file.js index b613a61640..979b80bf46 100644 --- a/cli/bench/http/deno_flash_send_file.js +++ b/cli/bench/http/deno_flash_send_file.js @@ -11,4 +11,4 @@ function handler() { return new Response(file.readable); } -serve(handler, { hostname, port: Number(port) }); +serve({ hostname, port: Number(port) }, handler); diff --git a/cli/bench/http/deno_http_serve.js b/cli/bench/http/deno_http_serve.js index a0db62630b..989dc82e8a 100644 --- a/cli/bench/http/deno_http_serve.js +++ b/cli/bench/http/deno_http_serve.js @@ -8,4 +8,4 @@ function handler() { return new Response("Hello World"); } -serve(handler, { hostname, port, reusePort: true }); +serve({ hostname, port, reusePort: true }, handler); diff --git a/cli/bench/http/deno_http_serve_https.js b/cli/bench/http/deno_http_serve_https.js index cea659e093..17b4033945 100644 --- a/cli/bench/http/deno_http_serve_https.js +++ b/cli/bench/http/deno_http_serve_https.js @@ -15,4 +15,4 @@ function handler() { return new Response("Hello World"); } -serve(handler, { hostname, port, reusePort: true, cert: CERT, key: KEY }); +serve({ hostname, port, reusePort: true, cert: CERT, key: KEY }, handler); diff --git a/cli/bench/testdata/deno_upgrade_http.js b/cli/bench/testdata/deno_upgrade_http.js index ca55334113..a959846ce6 100644 --- a/cli/bench/testdata/deno_upgrade_http.js +++ b/cli/bench/testdata/deno_upgrade_http.js @@ -9,4 +9,4 @@ async function handler(req) { await conn.close(); } -serve(handler, { hostname: "127.0.0.1", port: 9000 }); +serve({ hostname: "127.0.0.1", port: 9000 }, handler); From a8b4e346b4477e340f36a59f83a0974afd541f4b Mon Sep 17 00:00:00 2001 From: David Sherret Date: Wed, 26 Apr 2023 18:53:13 -0400 Subject: [PATCH 055/320] refactor(ext/node): use a snapshottable global name for Node's globalThis (#18860) --- ext/node/analyze.rs | 11 +++++------ ext/node/build.rs | 10 ++++++++++ ext/node/lib.rs | 14 ++------------ 3 files changed, 17 insertions(+), 18 deletions(-) create mode 100644 ext/node/build.rs diff --git a/ext/node/analyze.rs b/ext/node/analyze.rs index c7181c4aca..2622ce8dab 100644 --- a/ext/node/analyze.rs +++ b/ext/node/analyze.rs @@ -341,7 +341,7 @@ fn esm_code_from_top_level_decls( } let mut result = String::new(); - let global_this_expr = NODE_GLOBAL_THIS_NAME.as_str(); + let global_this_expr = NODE_GLOBAL_THIS_NAME; let global_this_expr = if has_global_this { global_this_expr } else { @@ -506,10 +506,9 @@ mod tests { "export const x = 1;", &HashSet::from(["x".to_string()]), ); - assert!(r.contains(&format!( - "var globalThis = {};", - NODE_GLOBAL_THIS_NAME.as_str() - ))); + assert!( + r.contains(&format!("var globalThis = {};", NODE_GLOBAL_THIS_NAME,)) + ); assert!(r.contains("var process = globalThis.process;")); assert!(r.contains("export const x = 1;")); } @@ -533,7 +532,7 @@ mod tests { "var setTimeout = globalThis.setTimeout;\n", "export const x = 1;" ), - NODE_GLOBAL_THIS_NAME.as_str(), + NODE_GLOBAL_THIS_NAME, ) ); } diff --git a/ext/node/build.rs b/ext/node/build.rs new file mode 100644 index 0000000000..e9b960cab2 --- /dev/null +++ b/ext/node/build.rs @@ -0,0 +1,10 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +fn main() { + // we use a changing variable name to make it harder to depend on this + let crate_version = env!("CARGO_PKG_VERSION"); + println!( + "cargo:rustc-env=NODE_GLOBAL_THIS_NAME=__DENO_NODE_GLOBAL_THIS_{}__", + crate_version.replace('.', "_") + ); +} diff --git a/ext/node/lib.rs b/ext/node/lib.rs index 84530423f3..cc4afb2b80 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -169,15 +169,7 @@ pub trait NpmResolver: std::fmt::Debug + Send + Sync { ) -> Result<(), AnyError>; } -pub static NODE_GLOBAL_THIS_NAME: Lazy = Lazy::new(|| { - let now = std::time::SystemTime::now(); - let seconds = now - .duration_since(std::time::SystemTime::UNIX_EPOCH) - .unwrap() - .as_secs(); - // use a changing variable name to make it hard to depend on this - format!("__DENO_NODE_GLOBAL_THIS_{seconds}__") -}); +pub const NODE_GLOBAL_THIS_NAME: &str = env!("NODE_GLOBAL_THIS_NAME"); pub static NODE_ENV_VAR_ALLOWLIST: Lazy> = Lazy::new(|| { // The full list of environment variables supported by Node.js is available @@ -557,9 +549,7 @@ pub fn initialize_runtime( argv0 ); }})('{}', {}, {});"#, - NODE_GLOBAL_THIS_NAME.as_str(), - uses_local_node_modules_dir, - argv0 + NODE_GLOBAL_THIS_NAME, uses_local_node_modules_dir, argv0 ); js_runtime.execute_script(located_script_name!(), source_code.into())?; From e2761df3fe2a457948948dcd38fb4f7e02cd350e Mon Sep 17 00:00:00 2001 From: Matt Mastracci Date: Thu, 27 Apr 2023 00:58:18 +0200 Subject: [PATCH 056/320] fix(ext/http): internal upgradeHttpRaw works with "Deno.serve()" API (#18859) Fix internal "upgradeHttpRaw" API restoring capability to upgrade HTTP connection in polyfilles "node:http" API. --- cli/bench/testdata/deno_upgrade_http.js | 12 -- cli/tests/unit/serve_test.ts | 84 +++++++++++ cli/tsc/dts/lib.deno.unstable.d.ts | 19 --- ext/http/00_serve.js | 38 ++++- ext/http/01_http.js | 13 +- ext/http/http_next.rs | 135 ++++++++++++++++- ext/http/lib.rs | 190 ------------------------ ext/http/websocket_upgrade.rs | 17 ++- ext/node/polyfills/http.ts | 4 +- 9 files changed, 264 insertions(+), 248 deletions(-) delete mode 100644 cli/bench/testdata/deno_upgrade_http.js diff --git a/cli/bench/testdata/deno_upgrade_http.js b/cli/bench/testdata/deno_upgrade_http.js deleted file mode 100644 index a959846ce6..0000000000 --- a/cli/bench/testdata/deno_upgrade_http.js +++ /dev/null @@ -1,12 +0,0 @@ -const { serve, upgradeHttpRaw } = Deno; -const u8 = Deno[Deno.internal].core.encode( - "HTTP/1.1 101 Switching Protocols\r\n\r\n", -); - -async function handler(req) { - const [conn, _firstPacket] = upgradeHttpRaw(req); - await conn.write(u8); - await conn.close(); -} - -serve({ hostname: "127.0.0.1", port: 9000 }, handler); diff --git a/cli/tests/unit/serve_test.ts b/cli/tests/unit/serve_test.ts index 6158f587e6..5d5d0428f9 100644 --- a/cli/tests/unit/serve_test.ts +++ b/cli/tests/unit/serve_test.ts @@ -17,6 +17,11 @@ import { } from "./test_util.ts"; import { consoleSize } from "../../../runtime/js/40_tty.js"; +const { + upgradeHttpRaw, + // @ts-expect-error TypeScript (as of 3.7) does not support indexing namespaces by symbol +} = Deno[Deno.internal]; + function createOnErrorCb(ac: AbortController): (err: unknown) => Response { return (err) => { console.error(err); @@ -803,6 +808,85 @@ Deno.test({ permissions: { net: true } }, async function httpServerWebSocket() { await server; }); +Deno.test( + { permissions: { net: true } }, + async function httpServerWebSocketRaw() { + const ac = new AbortController(); + const listeningPromise = deferred(); + const server = Deno.serve({ + handler: async (request) => { + const { conn, response } = upgradeHttpRaw(request); + const buf = new Uint8Array(1024); + let read; + + // Write our fake HTTP upgrade + await conn.write( + new TextEncoder().encode( + "HTTP/1.1 101 Switching Protocols\r\nConnection: Upgraded\r\n\r\nExtra", + ), + ); + + // Upgrade data + read = await conn.read(buf); + assertEquals( + new TextDecoder().decode(buf.subarray(0, read!)), + "Upgrade data", + ); + // Read the packet to echo + read = await conn.read(buf); + // Echo + await conn.write(buf.subarray(0, read!)); + + conn.close(); + return response; + }, + port: 4501, + signal: ac.signal, + onListen: onListen(listeningPromise), + onError: createOnErrorCb(ac), + }); + + await listeningPromise; + + const conn = await Deno.connect({ port: 4501 }); + await conn.write( + new TextEncoder().encode( + "GET / HTTP/1.1\r\nConnection: Upgrade\r\nUpgrade: websocket\r\n\r\nUpgrade data", + ), + ); + const buf = new Uint8Array(1024); + let len; + + // Headers + let headers = ""; + for (let i = 0; i < 2; i++) { + len = await conn.read(buf); + headers += new TextDecoder().decode(buf.subarray(0, len!)); + if (headers.endsWith("Extra")) { + break; + } + } + assertMatch( + headers, + /HTTP\/1\.1 101 Switching Protocols[ ,.A-Za-z:0-9\r\n]*Extra/im, + ); + + // Data to echo + await conn.write(new TextEncoder().encode("buffer data")); + + // Echo + len = await conn.read(buf); + assertEquals( + new TextDecoder().decode(buf.subarray(0, len!)), + "buffer data", + ); + + conn.close(); + ac.abort(); + await server; + }, +); + Deno.test( { permissions: { net: true } }, async function httpServerWebSocketUpgradeTwice() { diff --git a/cli/tsc/dts/lib.deno.unstable.d.ts b/cli/tsc/dts/lib.deno.unstable.d.ts index dc3bfcfc01..f169e0254b 100644 --- a/cli/tsc/dts/lib.deno.unstable.d.ts +++ b/cli/tsc/dts/lib.deno.unstable.d.ts @@ -1516,25 +1516,6 @@ declare namespace Deno { request: Request, ): Promise<[Deno.Conn, Uint8Array]>; - /** **UNSTABLE**: New API, yet to be vetted. - * - * Allows "hijacking" the connection that the request is associated with. - * This can be used to implement protocols that build on top of HTTP (eg. - * {@linkcode WebSocket}). - * - * Unlike {@linkcode Deno.upgradeHttp} this function does not require that you - * respond to the request with a {@linkcode Response} object. Instead this - * function returns the underlying connection and first packet received - * immediately, and then the caller is responsible for writing the response to - * the connection. - * - * This method can only be called on requests originating the - * {@linkcode Deno.serve} server. - * - * @category HTTP Server - */ - export function upgradeHttpRaw(request: Request): [Deno.Conn, Uint8Array]; - /** **UNSTABLE**: New API, yet to be vetted. * * Open a new {@linkcode Deno.Kv} connection to persist data. diff --git a/ext/http/00_serve.js b/ext/http/00_serve.js index 8518e8d621..0b2c605388 100644 --- a/ext/http/00_serve.js +++ b/ext/http/00_serve.js @@ -1,6 +1,7 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. const core = globalThis.Deno.core; const primordials = globalThis.__bootstrap.primordials; +const internals = globalThis.__bootstrap.internals; const { BadResourcePrototype } = core; import { InnerBody } from "ext:deno_fetch/22_body.js"; @@ -10,7 +11,7 @@ import { newInnerResponse, toInnerResponse, } from "ext:deno_fetch/23_response.js"; -import { fromInnerRequest } from "ext:deno_fetch/23_request.js"; +import { fromInnerRequest, toInnerRequest } from "ext:deno_fetch/23_request.js"; import { AbortController } from "ext:deno_web/03_abort_signal.js"; import { _eventLoop, @@ -32,6 +33,7 @@ import { readableStreamForRid, ReadableStreamPrototype, } from "ext:deno_web/06_streams.js"; +import { TcpConn } from "ext:deno_net/01_net.js"; const { ObjectPrototypeIsPrototypeOf, SafeSet, @@ -82,6 +84,14 @@ const UPGRADE_RESPONSE_SENTINEL = fromInnerResponse( "immutable", ); +function upgradeHttpRaw(req, conn) { + const inner = toInnerRequest(req); + if (inner._wantsUpgrade) { + return inner._wantsUpgrade("upgradeHttpRaw", conn); + } + throw new TypeError("upgradeHttpRaw may only be used with Deno.serve"); +} + class InnerRequest { #slabId; #context; @@ -122,10 +132,26 @@ class InnerRequest { throw "upgradeHttp is unavailable in Deno.serve at this time"; } - // upgradeHttpRaw is async - // TODO(mmastrac) + // upgradeHttpRaw is sync if (upgradeType == "upgradeHttpRaw") { - throw "upgradeHttp is unavailable in Deno.serve at this time"; + const slabId = this.#slabId; + const underlyingConn = originalArgs[0]; + + this.url(); + this.headerList; + this.close(); + + this.#upgraded = () => {}; + + const upgradeRid = core.ops.op_upgrade_raw(slabId); + + const conn = new TcpConn( + upgradeRid, + underlyingConn?.remoteAddr, + underlyingConn?.localAddr, + ); + + return { response: UPGRADE_RESPONSE_SENTINEL, conn }; } // upgradeWebSocket is sync @@ -623,4 +649,6 @@ async function serve(arg1, arg2) { } } -export { serve }; +internals.upgradeHttpRaw = upgradeHttpRaw; + +export { serve, upgradeHttpRaw }; diff --git a/ext/http/01_http.js b/ext/http/01_http.js index 95e2cee740..0048eedebb 100644 --- a/ext/http/01_http.js +++ b/ext/http/01_http.js @@ -64,7 +64,6 @@ const { } = primordials; const connErrorSymbol = Symbol("connError"); -const streamRid = Symbol("streamRid"); const _deferred = Symbol("upgradeHttpDeferred"); class HttpConn { @@ -482,16 +481,6 @@ function upgradeHttp(req) { return req[_deferred].promise; } -async function upgradeHttpRaw(req, tcpConn) { - const inner = toInnerRequest(req); - if (inner._wantsUpgrade) { - return inner._wantsUpgrade("upgradeHttpRaw", arguments); - } - - const res = await core.opAsync("op_http_upgrade_early", inner[streamRid]); - return new TcpConn(res, tcpConn.remoteAddr, tcpConn.localAddr); -} - const spaceCharCode = StringPrototypeCharCodeAt(" ", 0); const tabCharCode = StringPrototypeCharCodeAt("\t", 0); const commaCharCode = StringPrototypeCharCodeAt(",", 0); @@ -566,4 +555,4 @@ function buildCaseInsensitiveCommaValueFinder(checkText) { internals.buildCaseInsensitiveCommaValueFinder = buildCaseInsensitiveCommaValueFinder; -export { _ws, HttpConn, serve, upgradeHttp, upgradeHttpRaw, upgradeWebSocket }; +export { _ws, HttpConn, serve, upgradeHttp, upgradeWebSocket }; diff --git a/ext/http/http_next.rs b/ext/http/http_next.rs index 1c2a232e20..593a9c8166 100644 --- a/ext/http/http_next.rs +++ b/ext/http/http_next.rs @@ -10,11 +10,13 @@ use crate::response_body::CompletionHandle; use crate::response_body::ResponseBytes; use crate::response_body::ResponseBytesInner; use crate::response_body::V8StreamHttpResponseBody; +use crate::websocket_upgrade::WebSocketUpgrade; use crate::LocalExecutor; use deno_core::error::AnyError; use deno_core::futures::TryFutureExt; use deno_core::op; use deno_core::AsyncRefCell; +use deno_core::AsyncResult; use deno_core::BufView; use deno_core::ByteString; use deno_core::CancelFuture; @@ -39,6 +41,7 @@ use hyper1::server::conn::http2; use hyper1::service::service_fn; use hyper1::service::HttpService; use hyper1::upgrade::OnUpgrade; + use hyper1::StatusCode; use pin_project::pin_project; use pin_project::pinned_drop; @@ -52,6 +55,10 @@ use std::net::SocketAddr; use std::net::SocketAddrV4; use std::pin::Pin; use std::rc::Rc; + +use tokio::io::AsyncReadExt; +use tokio::io::AsyncWriteExt; + use tokio::task::spawn_local; use tokio::task::JoinHandle; @@ -228,7 +235,79 @@ fn slab_insert( } #[op] -pub fn op_upgrade_raw(_index: usize) {} +pub fn op_upgrade_raw( + state: &mut OpState, + index: u32, +) -> Result { + // Stage 1: extract the upgrade future + let upgrade = with_http_mut(index, |http| { + // Manually perform the upgrade. We're peeking into hyper's underlying machinery here a bit + http + .request_parts + .extensions + .remove::() + .ok_or_else(|| AnyError::msg("upgrade unavailable")) + })?; + + let (read, write) = tokio::io::duplex(1024); + let (read_rx, write_tx) = tokio::io::split(read); + let (mut write_rx, mut read_tx) = tokio::io::split(write); + + spawn_local(async move { + let mut upgrade_stream = WebSocketUpgrade::::default(); + + // Stage 2: Extract the Upgraded connection + let mut buf = [0; 1024]; + let upgraded = loop { + let read = Pin::new(&mut write_rx).read(&mut buf).await?; + match upgrade_stream.write(&buf[..read]) { + Ok(None) => continue, + Ok(Some((response, bytes))) => { + with_resp_mut(index, |resp| *resp = Some(response)); + with_promise_mut(index, |promise| promise.complete(true)); + let mut upgraded = upgrade.await?; + upgraded.write_all(&bytes).await?; + break upgraded; + } + Err(err) => return Err(err), + } + }; + + // Stage 3: Pump the data + let (mut upgraded_rx, mut upgraded_tx) = tokio::io::split(upgraded); + + spawn_local(async move { + let mut buf = [0; 1024]; + loop { + let read = upgraded_rx.read(&mut buf).await?; + if read == 0 { + break; + } + read_tx.write_all(&buf[..read]).await?; + } + Ok::<_, AnyError>(()) + }); + spawn_local(async move { + let mut buf = [0; 1024]; + loop { + let read = write_rx.read(&mut buf).await?; + if read == 0 { + break; + } + upgraded_tx.write_all(&buf[..read]).await?; + } + Ok::<_, AnyError>(()) + }); + + Ok(()) + }); + + Ok( + state + .resource_table + .add(UpgradeStream::new(read_rx, write_tx)), + ) +} #[op] pub async fn op_upgrade( @@ -825,3 +904,57 @@ pub async fn op_http_wait( Ok(u32::MAX) } + +struct UpgradeStream { + read: AsyncRefCell>, + write: AsyncRefCell>, + cancel_handle: CancelHandle, +} + +impl UpgradeStream { + pub fn new( + read: tokio::io::ReadHalf, + write: tokio::io::WriteHalf, + ) -> Self { + Self { + read: AsyncRefCell::new(read), + write: AsyncRefCell::new(write), + cancel_handle: CancelHandle::new(), + } + } + + async fn read(self: Rc, buf: &mut [u8]) -> Result { + let cancel_handle = RcRef::map(self.clone(), |this| &this.cancel_handle); + async { + let read = RcRef::map(self, |this| &this.read); + let mut read = read.borrow_mut().await; + Ok(Pin::new(&mut *read).read(buf).await?) + } + .try_or_cancel(cancel_handle) + .await + } + + async fn write(self: Rc, buf: &[u8]) -> Result { + let cancel_handle = RcRef::map(self.clone(), |this| &this.cancel_handle); + async { + let write = RcRef::map(self, |this| &this.write); + let mut write = write.borrow_mut().await; + Ok(Pin::new(&mut *write).write(buf).await?) + } + .try_or_cancel(cancel_handle) + .await + } +} + +impl Resource for UpgradeStream { + fn name(&self) -> Cow { + "httpRawUpgradeStream".into() + } + + deno_core::impl_readable_byob!(); + deno_core::impl_writable!(); + + fn close(self: Rc) { + self.cancel_handle.cancel(); + } +} diff --git a/ext/http/lib.rs b/ext/http/lib.rs index d5404d189a..cde15af88c 100644 --- a/ext/http/lib.rs +++ b/ext/http/lib.rs @@ -32,7 +32,6 @@ use deno_core::RcRef; use deno_core::Resource; use deno_core::ResourceId; use deno_core::StringOrBuffer; -use deno_core::WriteOutcome; use deno_core::ZeroCopyBuf; use deno_net::raw::NetworkStream; use deno_websocket::ws_create_server_stream; @@ -67,11 +66,9 @@ use std::sync::Arc; use std::task::Context; use std::task::Poll; use tokio::io::AsyncRead; -use tokio::io::AsyncReadExt; use tokio::io::AsyncWrite; use tokio::io::AsyncWriteExt; use tokio::task::spawn_local; -use websocket_upgrade::WebSocketUpgrade; use crate::network_buffered_stream::NetworkBufferedStream; use crate::reader_stream::ExternallyAbortableReaderStream; @@ -97,7 +94,6 @@ deno_core::extension!( op_http_write_resource, op_http_shutdown, op_http_websocket_accept_header, - op_http_upgrade_early, op_http_upgrade_websocket, http_next::op_serve_http, http_next::op_serve_http_on, @@ -967,192 +963,6 @@ fn op_http_websocket_accept_header(key: String) -> Result { Ok(base64::encode(digest)) } -struct EarlyUpgradeSocket(AsyncRefCell, CancelHandle); - -enum EarlyUpgradeSocketInner { - PreResponse( - Rc, - WebSocketUpgrade, - // Readers need to block in this state, so they can wait here for the broadcast. - tokio::sync::broadcast::Sender< - Rc>>, - >, - ), - PostResponse( - Rc>>, - Rc>>, - ), -} - -impl EarlyUpgradeSocket { - /// Gets a reader without holding the lock. - async fn get_reader( - self: Rc, - ) -> Result< - Rc>>, - AnyError, - > { - let mut borrow = RcRef::map(self.clone(), |x| &x.0).borrow_mut().await; - let cancel = RcRef::map(self, |x| &x.1); - let inner = &mut *borrow; - match inner { - EarlyUpgradeSocketInner::PreResponse(_, _, tx) => { - let mut rx = tx.subscribe(); - // Ensure we're not borrowing self here - drop(borrow); - Ok( - rx.recv() - .map_err(AnyError::from) - .try_or_cancel(&cancel) - .await?, - ) - } - EarlyUpgradeSocketInner::PostResponse(rx, _) => Ok(rx.clone()), - } - } - - async fn read(self: Rc, data: &mut [u8]) -> Result { - let reader = self.clone().get_reader().await?; - let cancel = RcRef::map(self, |x| &x.1); - Ok( - reader - .borrow_mut() - .await - .read(data) - .try_or_cancel(&cancel) - .await?, - ) - } - - /// Write all the data provided, only holding the lock while we see if the connection needs to be - /// upgraded. - async fn write_all(self: Rc, buf: &[u8]) -> Result<(), AnyError> { - let mut borrow = RcRef::map(self.clone(), |x| &x.0).borrow_mut().await; - let cancel = RcRef::map(self, |x| &x.1); - let inner = &mut *borrow; - match inner { - EarlyUpgradeSocketInner::PreResponse(stream, upgrade, rx_tx) => { - if let Some((resp, extra)) = upgrade.write(buf)? { - let new_wr = HttpResponseWriter::Closed; - let mut old_wr = - RcRef::map(stream.clone(), |r| &r.wr).borrow_mut().await; - let response_tx = match replace(&mut *old_wr, new_wr) { - HttpResponseWriter::Headers(response_tx) => response_tx, - _ => return Err(http_error("response headers already sent")), - }; - - if response_tx.send(resp).is_err() { - stream.conn.closed().await?; - return Err(http_error("connection closed while sending response")); - }; - - let mut old_rd = - RcRef::map(stream.clone(), |r| &r.rd).borrow_mut().await; - let new_rd = HttpRequestReader::Closed; - let upgraded = match replace(&mut *old_rd, new_rd) { - HttpRequestReader::Headers(request) => { - hyper::upgrade::on(request) - .map_err(AnyError::from) - .try_or_cancel(&cancel) - .await? - } - _ => { - return Err(http_error("response already started")); - } - }; - - let (rx, tx) = tokio::io::split(upgraded); - let rx = Rc::new(AsyncRefCell::new(rx)); - let tx = Rc::new(AsyncRefCell::new(tx)); - - // Take the tx and rx lock before we allow anything else to happen because we want to control - // the order of reads and writes. - let mut tx_lock = tx.clone().borrow_mut().await; - let rx_lock = rx.clone().borrow_mut().await; - - // Allow all the pending readers to go now. We still have the lock on inner, so no more - // pending readers can show up. We intentionally ignore errors here, as there may be - // nobody waiting on a read. - _ = rx_tx.send(rx.clone()); - - // We swap out inner here, so once the lock is gone, readers will acquire rx directly. - // We also fully release our lock. - *inner = EarlyUpgradeSocketInner::PostResponse(rx, tx); - drop(borrow); - - // We've updated inner and unlocked it, reads are free to go in-order. - drop(rx_lock); - - // If we had extra data after the response, write that to the upgraded connection - if !extra.is_empty() { - tx_lock.write_all(&extra).try_or_cancel(&cancel).await?; - } - } - } - EarlyUpgradeSocketInner::PostResponse(_, tx) => { - let tx = tx.clone(); - drop(borrow); - tx.borrow_mut() - .await - .write_all(buf) - .try_or_cancel(&cancel) - .await?; - } - }; - Ok(()) - } -} - -impl Resource for EarlyUpgradeSocket { - fn name(&self) -> Cow { - "upgradedHttpConnection".into() - } - - deno_core::impl_readable_byob!(); - - fn write( - self: Rc, - buf: BufView, - ) -> AsyncResult { - Box::pin(async move { - let nwritten = buf.len(); - Self::write_all(self, &buf).await?; - Ok(WriteOutcome::Full { nwritten }) - }) - } - - fn write_all(self: Rc, buf: BufView) -> AsyncResult<()> { - Box::pin(async move { Self::write_all(self, &buf).await }) - } - - fn close(self: Rc) { - self.1.cancel() - } -} - -#[op] -async fn op_http_upgrade_early( - state: Rc>, - rid: ResourceId, -) -> Result { - let stream = state - .borrow_mut() - .resource_table - .get::(rid)?; - let resources = &mut state.borrow_mut().resource_table; - let (tx, _rx) = tokio::sync::broadcast::channel(1); - let socket = EarlyUpgradeSocketInner::PreResponse( - stream, - WebSocketUpgrade::default(), - tx, - ); - let rid = resources.add(EarlyUpgradeSocket( - AsyncRefCell::new(socket), - CancelHandle::new(), - )); - Ok(rid) -} - #[op] async fn op_http_upgrade_websocket( state: Rc>, diff --git a/ext/http/websocket_upgrade.rs b/ext/http/websocket_upgrade.rs index 042a467219..70ad785267 100644 --- a/ext/http/websocket_upgrade.rs +++ b/ext/http/websocket_upgrade.rs @@ -1,12 +1,13 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use std::marker::PhantomData; + use bytes::Bytes; use bytes::BytesMut; use deno_core::error::AnyError; use httparse::Status; use hyper::http::HeaderName; use hyper::http::HeaderValue; -use hyper::Body; use hyper::Response; use memmem::Searcher; use memmem::TwoWaySearcher; @@ -15,14 +16,14 @@ use once_cell::sync::OnceCell; use crate::http_error; /// Given a buffer that ends in `\n\n` or `\r\n\r\n`, returns a parsed [`Request`]. -fn parse_response( +fn parse_response( header_bytes: &[u8], -) -> Result<(usize, Response), AnyError> { +) -> Result<(usize, Response), AnyError> { let mut headers = [httparse::EMPTY_HEADER; 16]; let status = httparse::parse_headers(header_bytes, &mut headers)?; match status { Status::Complete((index, parsed)) => { - let mut resp = Response::builder().status(101).body(Body::empty())?; + let mut resp = Response::builder().status(101).body(T::default())?; for header in parsed.iter() { resp.headers_mut().append( HeaderName::from_bytes(header.name.as_bytes())?, @@ -59,12 +60,13 @@ static HEADER_SEARCHER: OnceCell = OnceCell::new(); static HEADER_SEARCHER2: OnceCell = OnceCell::new(); #[derive(Default)] -pub struct WebSocketUpgrade { +pub struct WebSocketUpgrade { state: WebSocketUpgradeState, buf: BytesMut, + _t: PhantomData, } -impl WebSocketUpgrade { +impl WebSocketUpgrade { /// Ensures that the status line starts with "HTTP/1.1 101 " which matches all of the node.js /// WebSocket libraries that are known. We don't care about the trailing status text. fn validate_status(&self, status: &[u8]) -> Result<(), AnyError> { @@ -80,7 +82,7 @@ impl WebSocketUpgrade { pub fn write( &mut self, bytes: &[u8], - ) -> Result, Bytes)>, AnyError> { + ) -> Result, Bytes)>, AnyError> { use WebSocketUpgradeState::*; match self.state { @@ -153,6 +155,7 @@ impl WebSocketUpgrade { #[cfg(test)] mod tests { use super::*; + use hyper::Body; type ExpectedResponseAndHead = Option<(Response, &'static [u8])>; diff --git a/ext/node/polyfills/http.ts b/ext/node/polyfills/http.ts index d8ec7650bc..785bbaab3a 100644 --- a/ext/node/polyfills/http.ts +++ b/ext/node/polyfills/http.ts @@ -16,7 +16,7 @@ import { Agent } from "ext:deno_node/_http_agent.mjs"; import { chunkExpression as RE_TE_CHUNKED } from "ext:deno_node/_http_common.ts"; import { urlToHttpOptions } from "ext:deno_node/internal/url.ts"; import { constants, TCP } from "ext:deno_node/internal_binding/tcp_wrap.ts"; -import * as denoHttp from "ext:deno_http/01_http.js"; +import { upgradeHttpRaw } from "ext:deno_http/00_serve.js"; import * as httpRuntime from "ext:runtime/40_http.js"; import { connResetException } from "ext:deno_node/internal/errors.ts"; @@ -704,7 +704,7 @@ class ServerImpl extends EventEmitter { } const req = new IncomingMessageForServer(reqEvent.request, tcpConn); if (req.upgrade && this.listenerCount("upgrade") > 0) { - const conn = await denoHttp.upgradeHttpRaw( + const conn = await upgradeHttpRaw( reqEvent.request, tcpConn, ) as Deno.Conn; From f4e442da4d948126e1db8cbb9905c5d140d94e1d Mon Sep 17 00:00:00 2001 From: David Sherret Date: Wed, 26 Apr 2023 19:15:25 -0400 Subject: [PATCH 057/320] fix(dts): `URLPatternComponentResult` groups should have possibly undefined key values (#18643) Closes #18640 --- cli/tests/unit/urlpattern_test.ts | 5 +++++ cli/tsc/dts/lib.dom.extras.d.ts | 2 +- ext/url/lib.deno_url.d.ts | 2 +- 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/cli/tests/unit/urlpattern_test.ts b/cli/tests/unit/urlpattern_test.ts index 9bed092355..cb5fc76c53 100644 --- a/cli/tests/unit/urlpattern_test.ts +++ b/cli/tests/unit/urlpattern_test.ts @@ -1,5 +1,6 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. import { assert, assertEquals } from "./test_util.ts"; +import { assertType, IsExact } from "../../../test_util/std/testing/types.ts"; Deno.test(function urlPatternFromString() { const pattern = new URLPattern("https://deno.land/foo/:bar"); @@ -13,6 +14,10 @@ Deno.test(function urlPatternFromString() { assert(match); assertEquals(match.pathname.input, "/foo/x"); assertEquals(match.pathname.groups, { bar: "x" }); + + // group values should be nullable + const val = match.pathname.groups.val; + assertType>(true); }); Deno.test(function urlPatternFromStringWithBase() { diff --git a/cli/tsc/dts/lib.dom.extras.d.ts b/cli/tsc/dts/lib.dom.extras.d.ts index 2c593b2cbe..9116596a6a 100644 --- a/cli/tsc/dts/lib.dom.extras.d.ts +++ b/cli/tsc/dts/lib.dom.extras.d.ts @@ -23,7 +23,7 @@ declare type URLPatternInput = string | URLPatternInit; declare interface URLPatternComponentResult { input: string; - groups: Record; + groups: Record; } /** `URLPatternResult` is the object returned from `URLPattern.exec`. */ diff --git a/ext/url/lib.deno_url.d.ts b/ext/url/lib.deno_url.d.ts index 1d5f840199..9a8c155d99 100644 --- a/ext/url/lib.deno_url.d.ts +++ b/ext/url/lib.deno_url.d.ts @@ -206,7 +206,7 @@ declare type URLPatternInput = string | URLPatternInit; /** @category Web APIs */ declare interface URLPatternComponentResult { input: string; - groups: Record; + groups: Record; } /** `URLPatternResult` is the object returned from `URLPattern.exec`. From 09b6dbc0a63cc55d0f65bd51416a04e577e08490 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Thu, 27 Apr 2023 02:11:23 +0200 Subject: [PATCH 058/320] feat: Deprecate Deno.run API in favor of Deno.Command (#17630) (#18866) This commit adds `@deprecated` comments to `Deno.run` API declarations. Since stabilization of `Deno.Command` API in [Deno v1.31](https://deno.com/blog/v1.31#api-stabilizations), `Deno.Command` is the preferred (more reliable) API to interact with subprocesses. This is the preparation for the removal of `Deno.run` API in Deno 2.0. --- cli/tsc/dts/lib.deno.ns.d.ts | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/cli/tsc/dts/lib.deno.ns.d.ts b/cli/tsc/dts/lib.deno.ns.d.ts index 4d41aea436..74d3ffb0b4 100644 --- a/cli/tsc/dts/lib.deno.ns.d.ts +++ b/cli/tsc/dts/lib.deno.ns.d.ts @@ -3691,7 +3691,10 @@ declare namespace Deno { options?: { recursive: boolean }, ): FsWatcher; - /** Options which can be used with {@linkcode Deno.run}. + /** + * @deprecated Use {@linkcode Deno.Command} instead. + * + * Options which can be used with {@linkcode Deno.run}. * * @category Sub Process */ export interface RunOptions { @@ -3749,7 +3752,10 @@ declare namespace Deno { stdin?: "inherit" | "piped" | "null" | number; } - /** The status resolved from the `.status()` method of a + /** + * @deprecated Use {@linkcode Deno.Command} instead. + * + * The status resolved from the `.status()` method of a * {@linkcode Deno.Process} instance. * * If `success` is `true`, then `code` will be `0`, but if `success` is @@ -3769,6 +3775,8 @@ declare namespace Deno { }; /** + * * @deprecated Use {@linkcode Deno.Command} instead. + * * Represents an instance of a sub process that is returned from * {@linkcode Deno.run} which can be used to manage the sub-process. * @@ -3925,7 +3933,10 @@ declare namespace Deno { handler: () => void, ): void; - /** Spawns new subprocess. RunOptions must contain at a minimum the `opt.cmd`, + /** + * @deprecated Use {@linkcode Deno.Command} instead. + * + * Spawns new subprocess. RunOptions must contain at a minimum the `opt.cmd`, * an array of program arguments, the first of which is the binary. * * ```ts From 1054723a4b8bcbfe904980acd2193c0fb2458001 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Thu, 27 Apr 2023 02:12:39 +0200 Subject: [PATCH 059/320] feat(serde_v8): better error output (#18815) The type that was received is now printed as part of a message. --- core/runtime.rs | 2 +- serde_v8/de.rs | 21 +++-- serde_v8/error.rs | 145 ++++++++++++++++++++++++----- serde_v8/magic/bigint.rs | 3 +- serde_v8/magic/bytestring.rs | 3 +- serde_v8/magic/detached_buffer.rs | 7 +- serde_v8/magic/external_pointer.rs | 4 +- serde_v8/magic/string_or_buffer.rs | 3 +- serde_v8/magic/u16string.rs | 3 +- serde_v8/magic/v8slice.rs | 4 +- serde_v8/tests/de.rs | 2 +- 11 files changed, 152 insertions(+), 45 deletions(-) diff --git a/core/runtime.rs b/core/runtime.rs index 3723a917ac..9ead489af7 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -4385,7 +4385,7 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { let sum = Deno.core.ops.op_sum_take(w32.subarray(0, 2)); return false; } catch(e) { - return e.message.includes('invalid type, expected: detachable'); + return e.message.includes('invalid type; expected: detachable'); } }); if (!assertWasmThrow()) { diff --git a/serde_v8/de.rs b/serde_v8/de.rs index d593ffbc56..edb1263ebb 100644 --- a/serde_v8/de.rs +++ b/serde_v8/de.rs @@ -4,6 +4,7 @@ use serde::de::Visitor; use serde::de::{self}; use serde::Deserialize; +use crate::error::value_to_type_str; use crate::error::Error; use crate::error::Result; use crate::keys::v8_struct_key; @@ -84,7 +85,7 @@ macro_rules! deserialize_signed { } else if let Some(x) = self.input.to_big_int(self.scope) { x.i64_value().0 as $t } else { - return Err(Error::ExpectedInteger); + return Err(Error::ExpectedInteger(value_to_type_str(self.input))); }, ) } @@ -107,7 +108,7 @@ macro_rules! deserialize_unsigned { } else if let Some(x) = self.input.to_big_int(self.scope) { x.u64_value().0 as $t } else { - return Err(Error::ExpectedInteger); + return Err(Error::ExpectedInteger(value_to_type_str(self.input))); }, ) } @@ -188,7 +189,7 @@ impl<'de, 'a, 'b, 's, 'x> de::Deserializer<'de> } else if let Some(x) = self.input.to_big_int(self.scope) { bigint_to_f64(x) } else { - return Err(Error::ExpectedNumber); + return Err(Error::ExpectedNumber(value_to_type_str(self.input))); }, ) } @@ -216,7 +217,7 @@ impl<'de, 'a, 'b, 's, 'x> de::Deserializer<'de> let string = to_utf8(v8_string, self.scope); visitor.visit_string(string) } else { - Err(Error::ExpectedString) + Err(Error::ExpectedString(value_to_type_str(self.input))) } } @@ -268,7 +269,7 @@ impl<'de, 'a, 'b, 's, 'x> de::Deserializer<'de> V: Visitor<'de>, { let arr = v8::Local::::try_from(self.input) - .map_err(|_| Error::ExpectedArray)?; + .map_err(|_| Error::ExpectedArray(value_to_type_str(self.input)))?; visitor.visit_seq(SeqAccess::new(arr.into(), self.scope, 0..arr.length())) } @@ -308,7 +309,7 @@ impl<'de, 'a, 'b, 's, 'x> de::Deserializer<'de> { // Assume object, then get_own_property_names let obj = v8::Local::::try_from(self.input) - .map_err(|_| Error::ExpectedObject)?; + .map_err(|_| Error::ExpectedObject(value_to_type_str(self.input)))?; if v8::Local::::try_from(self.input).is_ok() { let pairs_array = v8::Local::::try_from(self.input) @@ -363,7 +364,7 @@ impl<'de, 'a, 'b, 's, 'x> de::Deserializer<'de> _ => { // Regular struct let obj = v8::Local::::try_from(self.input) - .or(Err(Error::ExpectedObject))?; + .map_err(|_| Error::ExpectedObject(value_to_type_str(self.input)))?; // Fields names are a hint and must be inferred when not provided if fields.is_empty() { @@ -409,7 +410,8 @@ impl<'de, 'a, 'b, 's, 'x> de::Deserializer<'de> let tag = { let prop_names = obj.get_own_property_names(self.scope, Default::default()); - let prop_names = prop_names.ok_or(Error::ExpectedEnum)?; + let prop_names = prop_names + .ok_or_else(|| Error::ExpectedEnum(value_to_type_str(self.input)))?; let prop_names_len = prop_names.length(); if prop_names_len != 1 { return Err(Error::LengthMismatch(prop_names_len as usize, 1)); @@ -424,8 +426,7 @@ impl<'de, 'a, 'b, 's, 'x> de::Deserializer<'de> payload, }) } else { - // TODO: improve error - Err(Error::ExpectedEnum) + Err(Error::ExpectedEnum(value_to_type_str(self.input))) } } diff --git a/serde_v8/error.rs b/serde_v8/error.rs index 94ac3c0a54..aa2d92bf8e 100644 --- a/serde_v8/error.rs +++ b/serde_v8/error.rs @@ -9,30 +9,41 @@ pub enum Error { #[error("{0}")] Message(String), - #[error("serde_v8 error: invalid type, expected: boolean")] - ExpectedBoolean, - #[error("serde_v8 error: invalid type, expected: integer")] - ExpectedInteger, - #[error("serde_v8 error: invalid type, expected: number")] - ExpectedNumber, - #[error("serde_v8 error: invalid type, expected: string")] - ExpectedString, - #[error("serde_v8 error: invalid type, expected: array")] - ExpectedArray, - #[error("serde_v8 error: invalid type, expected: map")] - ExpectedMap, - #[error("serde_v8 error: invalid type, expected: enum")] - ExpectedEnum, - #[error("serde_v8 error: invalid type, expected: object")] - ExpectedObject, - #[error("serde_v8 error: invalid type, expected: buffer")] - ExpectedBuffer, - #[error("serde_v8 error: invalid type, expected: detachable")] - ExpectedDetachable, - #[error("serde_v8 error: invalid type, expected: external")] - ExpectedExternal, - #[error("serde_v8 error: invalid type, expected: bigint")] - ExpectedBigInt, + #[error("serde_v8 error: invalid type; expected: boolean, got: {0}")] + ExpectedBoolean(&'static str), + + #[error("serde_v8 error: invalid type; expected: integer, got: {0}")] + ExpectedInteger(&'static str), + + #[error("serde_v8 error: invalid type; expected: number, got: {0}")] + ExpectedNumber(&'static str), + + #[error("serde_v8 error: invalid type; expected: string, got: {0}")] + ExpectedString(&'static str), + + #[error("serde_v8 error: invalid type; expected: array, got: {0}")] + ExpectedArray(&'static str), + + #[error("serde_v8 error: invalid type; expected: map, got: {0}")] + ExpectedMap(&'static str), + + #[error("serde_v8 error: invalid type; expected: enum, got: {0}")] + ExpectedEnum(&'static str), + + #[error("serde_v8 error: invalid type; expected: object, got: {0}")] + ExpectedObject(&'static str), + + #[error("serde_v8 error: invalid type; expected: buffer, got: {0}")] + ExpectedBuffer(&'static str), + + #[error("serde_v8 error: invalid type; expected: detachable, got: {0}")] + ExpectedDetachable(&'static str), + + #[error("serde_v8 error: invalid type; expected: external, got: {0}")] + ExpectedExternal(&'static str), + + #[error("serde_v8 error: invalid type; expected: bigint, got: {0}")] + ExpectedBigInt(&'static str), #[error("serde_v8 error: invalid type, expected: utf8")] ExpectedUtf8, @@ -57,3 +68,89 @@ impl serde::de::Error for Error { Error::Message(msg.to_string()) } } + +pub(crate) fn value_to_type_str(value: v8::Local) -> &'static str { + if value.is_module_namespace_object() { + "Module" + } else if value.is_wasm_module_object() { + "WASM module" + } else if value.is_wasm_memory_object() { + "WASM memory object" + } else if value.is_proxy() { + "Proxy" + } else if value.is_shared_array_buffer() { + "SharedArrayBuffer" + } else if value.is_data_view() { + "DataView" + } else if value.is_big_uint64_array() { + "BigUint64Array" + } else if value.is_big_int64_array() { + "BigInt64Array" + } else if value.is_float64_array() { + "Float64Array" + } else if value.is_float32_array() { + "Float32Array" + } else if value.is_int32_array() { + "Int32Array" + } else if value.is_uint32_array() { + "Uint32Array" + } else if value.is_int16_array() { + "Int16Array" + } else if value.is_uint16_array() { + "Uint16Array" + } else if value.is_int8_array() { + "Int8Array" + } else if value.is_uint8_clamped_array() { + "Uint8ClampedArray" + } else if value.is_uint8_array() { + "Uint8Array" + } else if value.is_typed_array() { + "TypedArray" + } else if value.is_array_buffer_view() { + "ArrayBufferView" + } else if value.is_array_buffer() { + "ArrayBuffer" + } else if value.is_weak_set() { + "WeakSet" + } else if value.is_weak_map() { + "WeakMap" + } else if value.is_set_iterator() { + "Set Iterator" + } else if value.is_map_iterator() { + "Map Iterator" + } else if value.is_set() { + "Set" + } else if value.is_map() { + "Map" + } else if value.is_promise() { + "Promise" + } else if value.is_generator_function() { + "Generator function" + } else if value.is_async_function() { + "Async function" + } else if value.is_reg_exp() { + "RegExp" + } else if value.is_date() { + "Date" + } else if value.is_number() { + "Number" + } else if value.is_boolean() { + "Boolean" + } else if value.is_big_int() { + "bigint" + } else if value.is_array() { + "array" + } else if value.is_function() { + "function" + } else if value.is_symbol() { + "symbol" + } else if value.is_string() { + "string" + } else if value.is_null() { + "null" + } else if value.is_undefined() { + "undefined" + } else { + "unknown" + } +} diff --git a/serde_v8/magic/bigint.rs b/serde_v8/magic/bigint.rs index 69828747ff..330803daf8 100644 --- a/serde_v8/magic/bigint.rs +++ b/serde_v8/magic/bigint.rs @@ -5,6 +5,7 @@ use smallvec::SmallVec; use super::transl8::FromV8; use super::transl8::ToV8; +use crate::error::value_to_type_str; use crate::magic::transl8::impl_magic; use crate::Error; @@ -42,7 +43,7 @@ impl FromV8 for BigInt { value: v8::Local, ) -> Result { let v8bigint = v8::Local::::try_from(value) - .map_err(|_| Error::ExpectedBigInt)?; + .map_err(|_| Error::ExpectedBigInt(value_to_type_str(value)))?; let word_count = v8bigint.word_count(); let mut words: SmallVec<[u64; 1]> = smallvec![0u64; word_count]; let (sign_bit, _words) = v8bigint.to_words_array(&mut words); diff --git a/serde_v8/magic/bytestring.rs b/serde_v8/magic/bytestring.rs index 77771698f5..3baa704e5f 100644 --- a/serde_v8/magic/bytestring.rs +++ b/serde_v8/magic/bytestring.rs @@ -1,6 +1,7 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. use super::transl8::FromV8; use super::transl8::ToV8; +use crate::error::value_to_type_str; use crate::magic::transl8::impl_magic; use crate::Error; use smallvec::SmallVec; @@ -49,7 +50,7 @@ impl FromV8 for ByteString { value: v8::Local, ) -> Result { let v8str = v8::Local::::try_from(value) - .map_err(|_| Error::ExpectedString)?; + .map_err(|_| Error::ExpectedString(value_to_type_str(value)))?; if !v8str.contains_only_onebyte() { return Err(Error::ExpectedLatin1); } diff --git a/serde_v8/magic/detached_buffer.rs b/serde_v8/magic/detached_buffer.rs index 7ee4dfb214..bc4b3de677 100644 --- a/serde_v8/magic/detached_buffer.rs +++ b/serde_v8/magic/detached_buffer.rs @@ -8,6 +8,7 @@ use super::transl8::FromV8; use super::transl8::ToV8; use super::v8slice::to_ranged_buffer; use super::v8slice::V8Slice; +use crate::error::value_to_type_str; use crate::magic::transl8::impl_magic; // A buffer that detaches when deserialized from JS @@ -57,10 +58,10 @@ impl FromV8 for DetachedBuffer { scope: &mut v8::HandleScope, value: v8::Local, ) -> Result { - let (b, range) = - to_ranged_buffer(scope, value).or(Err(crate::Error::ExpectedBuffer))?; + let (b, range) = to_ranged_buffer(scope, value) + .map_err(|_| crate::Error::ExpectedBuffer(value_to_type_str(value)))?; if !b.is_detachable() { - return Err(crate::Error::ExpectedDetachable); + return Err(crate::Error::ExpectedDetachable(value_to_type_str(value))); } let store = b.get_backing_store(); b.detach(None); // Detach diff --git a/serde_v8/magic/external_pointer.rs b/serde_v8/magic/external_pointer.rs index fca6028d67..e22e41a010 100644 --- a/serde_v8/magic/external_pointer.rs +++ b/serde_v8/magic/external_pointer.rs @@ -2,6 +2,8 @@ use std::ffi::c_void; +use crate::error::value_to_type_str; + use super::transl8::impl_magic; use super::transl8::FromV8; use super::transl8::ToV8; @@ -38,7 +40,7 @@ impl FromV8 for ExternalPointer { } else if let Ok(external) = v8::Local::::try_from(value) { Ok(ExternalPointer(external.value())) } else { - Err(crate::Error::ExpectedExternal) + Err(crate::Error::ExpectedExternal(value_to_type_str(value))) } } } diff --git a/serde_v8/magic/string_or_buffer.rs b/serde_v8/magic/string_or_buffer.rs index 04ce08be22..8b1a06dbc2 100644 --- a/serde_v8/magic/string_or_buffer.rs +++ b/serde_v8/magic/string_or_buffer.rs @@ -2,6 +2,7 @@ use super::buffer::ZeroCopyBuf; use super::transl8::FromV8; use super::transl8::ToV8; +use crate::error::value_to_type_str; use crate::magic::transl8::impl_magic; use crate::Error; use std::ops::Deref; @@ -73,7 +74,7 @@ impl FromV8 for StringOrBuffer { } else if let Ok(s) = crate::from_v8(scope, value) { return Ok(Self::String(s)); } - Err(Error::ExpectedBuffer) + Err(Error::ExpectedBuffer(value_to_type_str(value))) } } diff --git a/serde_v8/magic/u16string.rs b/serde_v8/magic/u16string.rs index 57e3fd0f66..04d742da96 100644 --- a/serde_v8/magic/u16string.rs +++ b/serde_v8/magic/u16string.rs @@ -1,5 +1,6 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use crate::error::value_to_type_str; use crate::Error; use super::transl8::impl_magic; @@ -37,7 +38,7 @@ impl FromV8 for U16String { value: v8::Local, ) -> Result { let v8str = v8::Local::::try_from(value) - .map_err(|_| Error::ExpectedString)?; + .map_err(|_| Error::ExpectedString(value_to_type_str(value)))?; let len = v8str.length(); let mut buffer = Vec::with_capacity(len); #[allow(clippy::uninit_vec)] diff --git a/serde_v8/magic/v8slice.rs b/serde_v8/magic/v8slice.rs index 384ccf5c52..073e752355 100644 --- a/serde_v8/magic/v8slice.rs +++ b/serde_v8/magic/v8slice.rs @@ -5,6 +5,8 @@ use std::ops::DerefMut; use std::ops::Range; use std::rc::Rc; +use crate::error::value_to_type_str; + use super::rawbytes; use super::transl8::FromV8; @@ -91,7 +93,7 @@ impl FromV8 for V8Slice { ) -> Result { to_ranged_buffer(scope, value) .and_then(|(b, r)| Self::from_buffer(b, r)) - .map_err(|_| crate::Error::ExpectedBuffer) + .map_err(|_| crate::Error::ExpectedBuffer(value_to_type_str(value))) } } diff --git a/serde_v8/tests/de.rs b/serde_v8/tests/de.rs index eae30f5404..4e5e1e4b99 100644 --- a/serde_v8/tests/de.rs +++ b/serde_v8/tests/de.rs @@ -408,7 +408,7 @@ detest!( ); defail!(defail_struct, MathOp, "123", |e| e - == Err(Error::ExpectedObject)); + == Err(Error::ExpectedObject("Number"))); #[derive(Eq, PartialEq, Debug, Deserialize)] pub struct SomeThing { From a16ad526e94304063f8efba710503bfd288d0248 Mon Sep 17 00:00:00 2001 From: Kevin Whinnery Date: Wed, 26 Apr 2023 20:49:59 -0400 Subject: [PATCH 060/320] docs: Improve inline docs for permissions (deno run --help) (#18757) Hey there! I took a crack at improving these embedded docs [as requested here](https://github.com/denoland/deno/issues/18685). These should accurately reflect the functionality of the permission-related flags for `deno run`. ### Highlights * Adds human-readable argument string in the format [prescribed in the docs](https://docs.rs/clap/latest/clap/struct.Arg.html#method.value_name) * Keeps text description terse, but includes a relevant copy/pasteable docs link * Includes example argument usage/formatting --- cli/args/flags.rs | 109 ++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 100 insertions(+), 9 deletions(-) diff --git a/cli/args/flags.rs b/cli/args/flags.rs index 0efaa5ea3d..3d88cda913 100644 --- a/cli/args/flags.rs +++ b/cli/args/flags.rs @@ -1882,6 +1882,90 @@ fn compile_args_without_check_args(app: Command) -> Command { .arg(ca_file_arg()) } +static ALLOW_READ_HELP: &str = concat!( + "Allow file system read access. Optionally specify allowed paths.\n", + "Docs: https://deno.land/manual@v", + env!("CARGO_PKG_VERSION"), + "/basics/permissions\n", + "Examples:\n", + " --allow-read\n", + " --allow-read=\"/etc,/var/log.txt\"" +); + +static ALLOW_WRITE_HELP: &str = concat!( + "Allow file system write access. Optionally specify allowed paths.\n", + "Docs: https://deno.land/manual@v", + env!("CARGO_PKG_VERSION"), + "/basics/permissions\n", + "Examples:\n", + " --allow-write\n", + " --allow-write=\"/etc,/var/log.txt\"" +); + +static ALLOW_NET_HELP: &str = concat!( + "Allow network access. Optionally specify allowed IP addresses and host names, with ports as necessary.\n", + "Docs: https://deno.land/manual@v", + env!("CARGO_PKG_VERSION"), + "/basics/permissions\n", + "Examples:\n", + " --allow-net\n", + " --allow-net=\"localhost:8080,deno.land\"" +); + +static ALLOW_ENV_HELP: &str = concat!( + "Allow access to system environment information. Optionally specify accessible environment variables.\n", + "Docs: https://deno.land/manual@v", + env!("CARGO_PKG_VERSION"), + "/basics/permissions\n", + "Examples:\n", + " --allow-env\n", + " --allow-env=\"PORT,HOME,PATH\"" +); + +static ALLOW_SYS_HELP: &str = concat!( + "Allow access to OS information. Optionally allow specific APIs by function name.\n", + "Docs: https://deno.land/manual@v", + env!("CARGO_PKG_VERSION"), + "/basics/permissions\n", + "Examples:\n", + " --allow-sys\n", + " --allow-sys=\"systemMemoryInfo,osRelease\"" +); + +static ALLOW_RUN_HELP: &str = concat!( + "Allow running subprocesses. Optionally specify allowed runnable program names.\n", + "Docs: https://deno.land/manual@v", + env!("CARGO_PKG_VERSION"), + "/basics/permissions\n", + "Examples:\n", + " --allow-run\n", + " --allow-run=\"whoami,ps\"" +); + +static ALLOW_FFI_HELP: &str = concat!( + "(Unstable) Allow loading dynamic libraries. Optionally specify allowed directories or files.\n", + "Docs: https://deno.land/manual@v", + env!("CARGO_PKG_VERSION"), + "/basics/permissions\n", + "Examples:\n", + " --allow-ffi\n", + " --allow-ffi=\"./libfoo.so\"" +); + +static ALLOW_HRTIME_HELP: &str = concat!( + "Allow high-resolution time measurement. Note: this can enable timing attacks and fingerprinting.\n", + "Docs: https://deno.land/manual@v", + env!("CARGO_PKG_VERSION"), + "/basics/permissions\n" +); + +static ALLOW_ALL_HELP: &str = concat!( + "Allow all permissions. Learn more about permissions in Deno:\n", + "https://deno.land/manual@v", + env!("CARGO_PKG_VERSION"), + "/basics/permissions\n" +); + fn permission_args(app: Command) -> Command { app .arg( @@ -1890,7 +1974,8 @@ fn permission_args(app: Command) -> Command { .num_args(0..) .use_value_delimiter(true) .require_equals(true) - .help("Allow file system read access") + .value_name("PATH") + .help(ALLOW_READ_HELP) .value_parser(value_parser!(PathBuf)) .value_hint(ValueHint::AnyPath), ) @@ -1900,7 +1985,8 @@ fn permission_args(app: Command) -> Command { .num_args(0..) .use_value_delimiter(true) .require_equals(true) - .help("Allow file system write access") + .value_name("PATH") + .help(ALLOW_WRITE_HELP) .value_parser(value_parser!(PathBuf)) .value_hint(ValueHint::AnyPath), ) @@ -1910,7 +1996,8 @@ fn permission_args(app: Command) -> Command { .num_args(0..) .use_value_delimiter(true) .require_equals(true) - .help("Allow network access") + .value_name("IP_OR_HOSTNAME") + .help(ALLOW_NET_HELP) .value_parser(flags_allow_net::validator), ) .arg(unsafely_ignore_certificate_errors_arg()) @@ -1920,7 +2007,8 @@ fn permission_args(app: Command) -> Command { .num_args(0..) .use_value_delimiter(true) .require_equals(true) - .help("Allow environment access") + .value_name("VARIABLE_NAME") + .help(ALLOW_ENV_HELP) .value_parser(|key: &str| { if key.is_empty() || key.contains(&['=', '\0'] as &[char]) { return Err(format!("invalid key \"{key}\"")); @@ -1939,7 +2027,8 @@ fn permission_args(app: Command) -> Command { .num_args(0..) .use_value_delimiter(true) .require_equals(true) - .help("Allow access to system info") + .value_name("API_NAME") + .help(ALLOW_SYS_HELP) .value_parser(|key: &str| parse_sys_kind(key).map(ToString::to_string)), ) .arg( @@ -1948,7 +2037,8 @@ fn permission_args(app: Command) -> Command { .num_args(0..) .use_value_delimiter(true) .require_equals(true) - .help("Allow running subprocesses"), + .value_name("PROGRAM_NAME") + .help(ALLOW_RUN_HELP), ) .arg( Arg::new("allow-ffi") @@ -1956,7 +2046,8 @@ fn permission_args(app: Command) -> Command { .num_args(0..) .use_value_delimiter(true) .require_equals(true) - .help("Allow loading dynamic libraries") + .value_name("PATH") + .help(ALLOW_FFI_HELP) .value_parser(value_parser!(PathBuf)) .value_hint(ValueHint::AnyPath), ) @@ -1964,14 +2055,14 @@ fn permission_args(app: Command) -> Command { Arg::new("allow-hrtime") .long("allow-hrtime") .action(ArgAction::SetTrue) - .help("Allow high resolution time measurement"), + .help(ALLOW_HRTIME_HELP), ) .arg( Arg::new("allow-all") .short('A') .long("allow-all") .action(ArgAction::SetTrue) - .help("Allow all permissions"), + .help(ALLOW_ALL_HELP), ) .arg( Arg::new("prompt") From 4192978c3afc943b93d9fae0f65822a2c4edfa62 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Thu, 27 Apr 2023 04:52:52 +0200 Subject: [PATCH 061/320] feat(lint): add `Deno.run` to `no-deprecated-deno-api` (#18869) This upgrade includes a warning for the deprecated "Deno.run()" API. --------- Co-authored-by: David Sherret --- Cargo.lock | 4 +-- cli/Cargo.toml | 2 +- cli/tests/testdata/coverage/complex_test.ts | 2 ++ cli/tests/testdata/test/captured_output.ts | 1 + cli/tests/unit/http_test.ts | 2 ++ cli/tests/unit/process_test.ts | 29 +++++++++++++++++++++ third_party | 2 +- 7 files changed, 38 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d515dbd72d..352ca75106 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1062,9 +1062,9 @@ dependencies = [ [[package]] name = "deno_lint" -version = "0.44.0" +version = "0.45.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8314e893e86e7f66cf06926d684a5d8708d737a28056472c9d7d78ef1c00691b" +checksum = "3867178bfb6579aaf9ed79599d3181d134f13dfcd38fdd93cae7d53a37bece8d" dependencies = [ "anyhow", "deno_ast", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index a806b70934..a75fb2dcef 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -45,7 +45,7 @@ deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] deno_doc = "0.62.0" deno_emit = "0.20.0" deno_graph = "=0.48.1" -deno_lint = { version = "0.44.0", features = ["docs"] } +deno_lint = { version = "0.45.0", features = ["docs"] } deno_lockfile.workspace = true deno_npm.workspace = true deno_runtime = { workspace = true, features = ["dont_create_runtime_snapshot", "include_js_files_for_snapshotting"] } diff --git a/cli/tests/testdata/coverage/complex_test.ts b/cli/tests/testdata/coverage/complex_test.ts index 1202289cb5..d6e9c26910 100644 --- a/cli/tests/testdata/coverage/complex_test.ts +++ b/cli/tests/testdata/coverage/complex_test.ts @@ -7,6 +7,7 @@ Deno.test("complex", function () { Deno.test("sub process with stdin", async () => { // ensure launching deno run with stdin doesn't affect coverage const code = "console.log('5')"; + // deno-lint-ignore no-deprecated-deno-api const p = await Deno.run({ cmd: [Deno.execPath(), "run", "-"], stdin: "piped", @@ -25,6 +26,7 @@ Deno.test("sub process with stdin", async () => { Deno.test("sub process with deno eval", async () => { // ensure launching deno eval doesn't affect coverage const code = "console.log('5')"; + // deno-lint-ignore no-deprecated-deno-api const p = await Deno.run({ cmd: [Deno.execPath(), "eval", code], stdout: "piped", diff --git a/cli/tests/testdata/test/captured_output.ts b/cli/tests/testdata/test/captured_output.ts index 43295f027b..905156fd41 100644 --- a/cli/tests/testdata/test/captured_output.ts +++ b/cli/tests/testdata/test/captured_output.ts @@ -1,4 +1,5 @@ Deno.test("output", async () => { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [Deno.execPath(), "eval", "console.log(0); console.error(1);"], }); diff --git a/cli/tests/unit/http_test.ts b/cli/tests/unit/http_test.ts index f407c9186e..d9d1655fa3 100644 --- a/cli/tests/unit/http_test.ts +++ b/cli/tests/unit/http_test.ts @@ -2085,6 +2085,7 @@ Deno.test({ "--header", "Accept-Encoding: deflate, gzip", ]; + // deno-lint-ignore no-deprecated-deno-api const proc = Deno.run({ cmd, stdout: "piped", stderr: "null" }); const status = await proc.status(); assert(status.success); @@ -2147,6 +2148,7 @@ Deno.test({ "--header", "Accept-Encoding: deflate, gzip", ]; + // deno-lint-ignore no-deprecated-deno-api const proc = Deno.run({ cmd, stdout: "piped", stderr: "null" }); const status = await proc.status(); assert(status.success); diff --git a/cli/tests/unit/process_test.ts b/cli/tests/unit/process_test.ts index e6c4bfe595..54ebb07b22 100644 --- a/cli/tests/unit/process_test.ts +++ b/cli/tests/unit/process_test.ts @@ -11,6 +11,7 @@ Deno.test( { permissions: { read: true, run: false } }, function runPermissions() { assertThrows(() => { + // deno-lint-ignore no-deprecated-deno-api Deno.run({ cmd: [Deno.execPath(), "eval", "console.log('hello world')"], }); @@ -21,6 +22,7 @@ Deno.test( Deno.test( { permissions: { run: true, read: true } }, async function runSuccess() { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ // freeze the array to ensure it's not modified cmd: Object.freeze([ @@ -43,6 +45,7 @@ Deno.test( Deno.test( { permissions: { run: true, read: true } }, async function runUrl() { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ new URL(`file:///${Deno.execPath()}`), @@ -66,6 +69,7 @@ Deno.test( async function runStdinRid0(): Promise< void > { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [Deno.execPath(), "eval", "console.log('hello world')"], stdin: 0, @@ -85,6 +89,7 @@ Deno.test( { permissions: { run: true, read: true } }, function runInvalidStdio() { assertThrows(() => + // deno-lint-ignore no-deprecated-deno-api Deno.run({ cmd: [Deno.execPath(), "eval", "console.log('hello world')"], // @ts-expect-error because Deno.run should throw on invalid stdin. @@ -92,6 +97,7 @@ Deno.test( }) ); assertThrows(() => + // deno-lint-ignore no-deprecated-deno-api Deno.run({ cmd: [Deno.execPath(), "eval", "console.log('hello world')"], // @ts-expect-error because Deno.run should throw on invalid stdout. @@ -99,6 +105,7 @@ Deno.test( }) ); assertThrows(() => + // deno-lint-ignore no-deprecated-deno-api Deno.run({ cmd: [Deno.execPath(), "eval", "console.log('hello world')"], // @ts-expect-error because Deno.run should throw on invalid stderr. @@ -111,6 +118,7 @@ Deno.test( Deno.test( { permissions: { run: true, read: true } }, async function runCommandFailedWithCode() { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [Deno.execPath(), "eval", "Deno.exit(41 + 1)"], }); @@ -127,6 +135,7 @@ Deno.test( permissions: { run: true, read: true }, }, async function runCommandFailedWithSignal() { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ Deno.execPath(), @@ -150,6 +159,7 @@ Deno.test( Deno.test({ permissions: { run: true } }, function runNotFound() { let error; try { + // deno-lint-ignore no-deprecated-deno-api Deno.run({ cmd: ["this file hopefully doesn't exist"] }); } catch (e) { error = e; @@ -181,6 +191,7 @@ tryExit(); `; Deno.writeFileSync(`${cwd}/${programFile}`, enc.encode(program)); + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cwd, cmd: [Deno.execPath(), "run", "--allow-read", programFile], @@ -204,6 +215,7 @@ Deno.test( async function runStdinPiped(): Promise< void > { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ Deno.execPath(), @@ -235,6 +247,7 @@ Deno.test( async function runStdoutPiped(): Promise< void > { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ Deno.execPath(), @@ -271,6 +284,7 @@ Deno.test( async function runStderrPiped(): Promise< void > { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ Deno.execPath(), @@ -305,6 +319,7 @@ Deno.test( Deno.test( { permissions: { run: true, read: true } }, async function runOutput() { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ Deno.execPath(), @@ -325,6 +340,7 @@ Deno.test( async function runStderrOutput(): Promise< void > { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ Deno.execPath(), @@ -350,6 +366,7 @@ Deno.test( write: true, }); + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ Deno.execPath(), @@ -382,6 +399,7 @@ Deno.test( await Deno.writeFile(fileName, encoder.encode("hello")); const file = await Deno.open(fileName); + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ Deno.execPath(), @@ -401,6 +419,7 @@ Deno.test( Deno.test( { permissions: { run: true, read: true } }, async function runEnv() { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ Deno.execPath(), @@ -423,6 +442,7 @@ Deno.test( Deno.test( { permissions: { run: true, read: true } }, async function runClose() { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ Deno.execPath(), @@ -446,6 +466,7 @@ Deno.test( Deno.test( { permissions: { run: true, read: true } }, async function runKillAfterStatus() { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [Deno.execPath(), "eval", 'console.log("hello")'], }); @@ -502,6 +523,7 @@ Deno.test( Deno.test( { permissions: { run: true, read: true } }, async function killSuccess() { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [Deno.execPath(), "eval", "setTimeout(() => {}, 10000)"], }); @@ -525,6 +547,7 @@ Deno.test( ); Deno.test({ permissions: { run: true, read: true } }, function killFailed() { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [Deno.execPath(), "eval", "setTimeout(() => {}, 10000)"], }); @@ -542,6 +565,7 @@ Deno.test({ permissions: { run: true, read: true } }, function killFailed() { Deno.test( { permissions: { run: true, read: true, env: true } }, async function clearEnv(): Promise { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ Deno.execPath(), @@ -574,6 +598,7 @@ Deno.test( ignore: Deno.build.os === "windows", }, async function uid(): Promise { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ "id", @@ -587,6 +612,7 @@ Deno.test( if (currentUid !== "0") { assertThrows(() => { + // deno-lint-ignore no-deprecated-deno-api Deno.run({ cmd: [ "echo", @@ -605,6 +631,7 @@ Deno.test( ignore: Deno.build.os === "windows", }, async function gid(): Promise { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ "id", @@ -618,6 +645,7 @@ Deno.test( if (currentGid !== "0") { assertThrows(() => { + // deno-lint-ignore no-deprecated-deno-api Deno.run({ cmd: [ "echo", @@ -636,6 +664,7 @@ Deno.test( ignore: Deno.build.os === "windows", }, async function non_existent_cwd(): Promise { + // deno-lint-ignore no-deprecated-deno-api const p = Deno.run({ cmd: [ Deno.execPath(), diff --git a/third_party b/third_party index fef5eaa2e3..ee59830ca2 160000 --- a/third_party +++ b/third_party @@ -1 +1 @@ -Subproject commit fef5eaa2e364db431cfbf8089afdd81f71fd46d2 +Subproject commit ee59830ca23fd0aa423a3905005835c586e73e77 From 90a5ef5e343bedc0f6f5326b14b6851b71733bea Mon Sep 17 00:00:00 2001 From: scarf Date: Thu, 27 Apr 2023 12:02:36 +0900 Subject: [PATCH 062/320] feat(cli): flatten deno.json configuaration (#17799) --- cli/args/config_file.rs | 406 +++++++++++++++--- cli/schemas/config-file.v1.json | 90 ++++ cli/tests/integration/fmt_tests.rs | 6 + cli/tests/integration/test_tests.rs | 6 + cli/tests/testdata/bench/collect/deno.jsonc | 8 +- .../bench/collect/deno.malformed.jsonc | 6 +- cli/tests/testdata/bench/collect/deno2.jsonc | 10 +- .../bench/collect_with_malformed_config.out | 2 +- cli/tests/testdata/fmt/deno.malformed.jsonc | 6 +- cli/tests/testdata/fmt/deno.malformed2.jsonc | 8 +- .../fmt/fmt_with_deprecated_config.out | 3 + .../fmt/fmt_with_malformed_config.out | 2 +- .../fmt/fmt_with_malformed_config2.out | 2 +- .../fmt/with_config/deno.deprecated.jsonc | 20 + cli/tests/testdata/fmt/with_config/deno.jsonc | 28 +- .../testdata/lint/Deno.compact.format.jsonc | 6 +- cli/tests/testdata/lint/Deno.jsonc | 6 +- cli/tests/testdata/lint/Deno.malformed.jsonc | 6 +- cli/tests/testdata/lint/Deno.malformed2.jsonc | 8 +- cli/tests/testdata/lint/Deno.no_tags.jsonc | 14 +- .../testdata/lint/with_malformed_config.out | 2 +- .../testdata/lint/with_malformed_config2.out | 2 +- .../testdata/lsp/deno.lint.exclude.jsonc | 8 +- .../testdata/test/collect.deprecated.out | 10 + .../test/collect/deno.deprecated.jsonc | 7 + cli/tests/testdata/test/collect/deno.jsonc | 4 +- cli/tests/testdata/test/collect/deno2.jsonc | 6 +- .../test/collect_with_malformed_config.out | 2 +- 28 files changed, 535 insertions(+), 149 deletions(-) create mode 100644 cli/tests/testdata/fmt/fmt_with_deprecated_config.out create mode 100644 cli/tests/testdata/fmt/with_config/deno.deprecated.jsonc create mode 100644 cli/tests/testdata/test/collect.deprecated.out create mode 100644 cli/tests/testdata/test/collect/deno.deprecated.jsonc diff --git a/cli/args/config_file.rs b/cli/args/config_file.rs index 0dda0db7d7..2855199b97 100644 --- a/cli/args/config_file.rs +++ b/cli/args/config_file.rs @@ -279,7 +279,7 @@ impl Serialize for TsConfig { } } -#[derive(Clone, Debug, Default, Deserialize)] +#[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[serde(default, deny_unknown_fields)] pub struct LintRulesConfig { pub tags: Option>, @@ -287,7 +287,7 @@ pub struct LintRulesConfig { pub exclude: Option>, } -#[derive(Clone, Debug, Default, Deserialize)] +#[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[serde(default, deny_unknown_fields)] struct SerializedFilesConfig { pub include: Vec, @@ -319,6 +319,10 @@ impl SerializedFilesConfig { .collect::, _>>()?, }) } + + pub fn is_empty(&self) -> bool { + self.include.is_empty() && self.exclude.is_empty() + } } #[derive(Clone, Debug, Default, Eq, PartialEq)] @@ -346,11 +350,57 @@ impl FilesConfig { } } -#[derive(Clone, Debug, Default, Deserialize)] +/// Choose between flat and nested files configuration. +/// +/// `files` has precedence over `deprecated_files`. +/// when `deprecated_files` is present, a warning is logged. +/// +/// caveat: due to default values, it's not possible to distinguish between +/// an empty configuration and a configuration with default values. +/// `{ "files": {} }` is equivalent to `{ "files": { "include": [], "exclude": [] } }` +/// and it wouldn't be able to emit warning for `{ "files": {}, "exclude": [] }`. +/// +/// # Arguments +/// +/// * `files` - Flat configuration. +/// * `deprecated_files` - Nested configuration. ("Files") +fn choose_files( + files: SerializedFilesConfig, + deprecated_files: SerializedFilesConfig, +) -> SerializedFilesConfig { + const DEPRECATED_FILES: &str = + "Warning: \"files\" configuration is deprecated"; + const FLAT_CONFIG: &str = "\"include\" and \"exclude\""; + + let (files_nonempty, deprecated_files_nonempty) = + (!files.is_empty(), !deprecated_files.is_empty()); + + match (files_nonempty, deprecated_files_nonempty) { + (true, true) => { + log::warn!("{DEPRECATED_FILES} and ignored by {FLAT_CONFIG}."); + files + } + (true, false) => files, + (false, true) => { + log::warn!("{DEPRECATED_FILES}. Please use {FLAT_CONFIG} instead."); + deprecated_files + } + (false, false) => SerializedFilesConfig::default(), + } +} + +/// `lint` config representation for serde +/// +/// fields `include` and `exclude` are expanded from [SerializedFilesConfig]. +#[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[serde(default, deny_unknown_fields)] struct SerializedLintConfig { pub rules: LintRulesConfig, - pub files: SerializedFilesConfig, + pub include: Vec, + pub exclude: Vec, + + #[serde(rename = "files")] + pub deprecated_files: SerializedFilesConfig, pub report: Option, } @@ -359,22 +409,26 @@ impl SerializedLintConfig { self, config_file_specifier: &ModuleSpecifier, ) -> Result { + let (include, exclude) = (self.include, self.exclude); + let files = SerializedFilesConfig { include, exclude }; + Ok(LintConfig { rules: self.rules, - files: self.files.into_resolved(config_file_specifier)?, + files: choose_files(files, self.deprecated_files) + .into_resolved(config_file_specifier)?, report: self.report, }) } } -#[derive(Clone, Debug, Default)] +#[derive(Clone, Debug, Default, PartialEq)] pub struct LintConfig { pub rules: LintRulesConfig, pub files: FilesConfig, pub report: Option, } -#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq)] #[serde(deny_unknown_fields, rename_all = "camelCase")] pub enum ProseWrap { Always, @@ -382,7 +436,7 @@ pub enum ProseWrap { Preserve, } -#[derive(Clone, Debug, Default, Serialize, Deserialize)] +#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)] #[serde(default, deny_unknown_fields, rename_all = "camelCase")] pub struct FmtOptionsConfig { pub use_tabs: Option, @@ -393,11 +447,75 @@ pub struct FmtOptionsConfig { pub semi_colons: Option, } -#[derive(Clone, Debug, Default, Deserialize)] -#[serde(default, deny_unknown_fields)] +impl FmtOptionsConfig { + pub fn is_empty(&self) -> bool { + self.use_tabs.is_none() + && self.line_width.is_none() + && self.indent_width.is_none() + && self.single_quote.is_none() + && self.prose_wrap.is_none() + && self.semi_colons.is_none() + } +} + +/// Choose between flat and nested fmt options. +/// +/// `options` has precedence over `deprecated_options`. +/// when `deprecated_options` is present, a warning is logged. +/// +/// caveat: due to default values, it's not possible to distinguish between +/// an empty configuration and a configuration with default values. +/// `{ "fmt": {} } is equivalent to `{ "fmt": { "options": {} } }` +/// and it wouldn't be able to emit warning for `{ "fmt": { "options": {}, "semiColons": "false" } }`. +/// +/// # Arguments +/// +/// * `options` - Flat options. +/// * `deprecated_options` - Nested files configuration ("option"). +fn choose_fmt_options( + options: FmtOptionsConfig, + deprecated_options: FmtOptionsConfig, +) -> FmtOptionsConfig { + const DEPRECATED_OPTIONS: &str = + "Warning: \"options\" configuration is deprecated"; + const FLAT_OPTION: &str = "\"flat\" options"; + + let (options_nonempty, deprecated_options_nonempty) = + (!options.is_empty(), !deprecated_options.is_empty()); + + match (options_nonempty, deprecated_options_nonempty) { + (true, true) => { + log::warn!("{DEPRECATED_OPTIONS} and ignored by {FLAT_OPTION}."); + options + } + (true, false) => options, + (false, true) => { + log::warn!("{DEPRECATED_OPTIONS}. Please use {FLAT_OPTION} instead."); + deprecated_options + } + (false, false) => FmtOptionsConfig::default(), + } +} + +/// `fmt` config representation for serde +/// +/// fields from `use_tabs`..`semi_colons` are expanded from [FmtOptionsConfig]. +/// fields `include` and `exclude` are expanded from [SerializedFilesConfig]. +#[derive(Clone, Debug, Default, Deserialize, PartialEq)] +#[serde(default, deny_unknown_fields, rename_all = "camelCase")] struct SerializedFmtConfig { - pub options: FmtOptionsConfig, - pub files: SerializedFilesConfig, + pub use_tabs: Option, + pub line_width: Option, + pub indent_width: Option, + pub single_quote: Option, + pub prose_wrap: Option, + pub semi_colons: Option, + #[serde(rename = "options")] + pub deprecated_options: FmtOptionsConfig, + pub include: Vec, + pub exclude: Vec, + #[serde(rename = "files")] + pub deprecated_files: SerializedFilesConfig, } impl SerializedFmtConfig { @@ -405,23 +523,41 @@ impl SerializedFmtConfig { self, config_file_specifier: &ModuleSpecifier, ) -> Result { + let (include, exclude) = (self.include, self.exclude); + let files = SerializedFilesConfig { include, exclude }; + let options = FmtOptionsConfig { + use_tabs: self.use_tabs, + line_width: self.line_width, + indent_width: self.indent_width, + single_quote: self.single_quote, + prose_wrap: self.prose_wrap, + semi_colons: self.semi_colons, + }; + Ok(FmtConfig { - options: self.options, - files: self.files.into_resolved(config_file_specifier)?, + options: choose_fmt_options(options, self.deprecated_options), + files: choose_files(files, self.deprecated_files) + .into_resolved(config_file_specifier)?, }) } } -#[derive(Clone, Debug, Default)] +#[derive(Clone, Debug, Default, PartialEq)] pub struct FmtConfig { pub options: FmtOptionsConfig, pub files: FilesConfig, } -#[derive(Clone, Debug, Default, Deserialize)] +/// `test` config representation for serde +/// +/// fields `include` and `exclude` are expanded from [SerializedFilesConfig]. +#[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[serde(default, deny_unknown_fields)] struct SerializedTestConfig { - pub files: SerializedFilesConfig, + pub include: Vec, + pub exclude: Vec, + #[serde(rename = "files")] + pub deprecated_files: SerializedFilesConfig, } impl SerializedTestConfig { @@ -429,21 +565,31 @@ impl SerializedTestConfig { self, config_file_specifier: &ModuleSpecifier, ) -> Result { + let (include, exclude) = (self.include, self.exclude); + let files = SerializedFilesConfig { include, exclude }; + Ok(TestConfig { - files: self.files.into_resolved(config_file_specifier)?, + files: choose_files(files, self.deprecated_files) + .into_resolved(config_file_specifier)?, }) } } -#[derive(Clone, Debug, Default)] +#[derive(Clone, Debug, Default, PartialEq)] pub struct TestConfig { pub files: FilesConfig, } -#[derive(Clone, Debug, Default, Deserialize)] +/// `bench` config representation for serde +/// +/// fields `include` and `exclude` are expanded from [SerializedFilesConfig]. +#[derive(Clone, Debug, Default, Deserialize, PartialEq)] #[serde(default, deny_unknown_fields)] struct SerializedBenchConfig { - pub files: SerializedFilesConfig, + pub include: Vec, + pub exclude: Vec, + #[serde(rename = "files")] + pub deprecated_files: SerializedFilesConfig, } impl SerializedBenchConfig { @@ -451,18 +597,22 @@ impl SerializedBenchConfig { self, config_file_specifier: &ModuleSpecifier, ) -> Result { + let (include, exclude) = (self.include, self.exclude); + let files = SerializedFilesConfig { include, exclude }; + Ok(BenchConfig { - files: self.files.into_resolved(config_file_specifier)?, + files: choose_files(files, self.deprecated_files) + .into_resolved(config_file_specifier)?, }) } } -#[derive(Clone, Debug, Default)] +#[derive(Clone, Debug, Default, PartialEq)] pub struct BenchConfig { pub files: FilesConfig, } -#[derive(Clone, Debug, Deserialize)] +#[derive(Clone, Debug, Deserialize, PartialEq)] #[serde(untagged)] pub enum LockConfig { Bool(bool), @@ -999,6 +1149,12 @@ mod tests { use deno_core::serde_json::json; use pretty_assertions::assert_eq; + fn unpack_object(result: Result, AnyError>, name: &str) -> T { + result + .unwrap_or_else(|err| panic!("error parsing {name} object but got {err}")) + .unwrap_or_else(|| panic!("{name} object should be defined")) + } + #[test] fn read_config_file_absolute() { let path = test_util::testdata_path().join("module_graph/tsconfig.json"); @@ -1043,27 +1199,21 @@ mod tests { "strict": true }, "lint": { - "files": { - "include": ["src/"], - "exclude": ["src/testdata/"] - }, + "include": ["src/"], + "exclude": ["src/testdata/"], "rules": { "tags": ["recommended"], "include": ["ban-untagged-todo"] } }, "fmt": { - "files": { - "include": ["src/"], - "exclude": ["src/testdata/"] - }, - "options": { - "useTabs": true, - "lineWidth": 80, - "indentWidth": 4, - "singleQuote": true, - "proseWrap": "preserve" - } + "include": ["src/"], + "exclude": ["src/testdata/"], + "useTabs": true, + "lineWidth": 80, + "indentWidth": 4, + "singleQuote": true, + "proseWrap": "preserve" }, "tasks": { "build": "deno run --allow-read --allow-write build.ts", @@ -1087,38 +1237,38 @@ mod tests { }), ); - let lint_config = config_file - .to_lint_config() - .expect("error parsing lint object") - .expect("lint object should be defined"); - assert_eq!(lint_config.files.include, vec![PathBuf::from("/deno/src/")]); assert_eq!( - lint_config.files.exclude, - vec![PathBuf::from("/deno/src/testdata/")] + unpack_object(config_file.to_lint_config(), "lint"), + LintConfig { + files: FilesConfig { + include: vec![PathBuf::from("/deno/src/")], + exclude: vec![PathBuf::from("/deno/src/testdata/")], + }, + rules: LintRulesConfig { + include: Some(vec!["ban-untagged-todo".to_string()]), + exclude: None, + tags: Some(vec!["recommended".to_string()]), + }, + ..Default::default() + } ); assert_eq!( - lint_config.rules.include, - Some(vec!["ban-untagged-todo".to_string()]) + unpack_object(config_file.to_fmt_config(), "fmt"), + FmtConfig { + files: FilesConfig { + include: vec![PathBuf::from("/deno/src/")], + exclude: vec![PathBuf::from("/deno/src/testdata/")], + }, + options: FmtOptionsConfig { + use_tabs: Some(true), + line_width: Some(80), + indent_width: Some(4), + single_quote: Some(true), + prose_wrap: Some(ProseWrap::Preserve), + ..Default::default() + }, + } ); - assert_eq!( - lint_config.rules.tags, - Some(vec!["recommended".to_string()]) - ); - assert!(lint_config.rules.exclude.is_none()); - - let fmt_config = config_file - .to_fmt_config() - .expect("error parsing fmt object") - .expect("fmt object should be defined"); - assert_eq!(fmt_config.files.include, vec![PathBuf::from("/deno/src/")]); - assert_eq!( - fmt_config.files.exclude, - vec![PathBuf::from("/deno/src/testdata/")], - ); - assert_eq!(fmt_config.options.use_tabs, Some(true)); - assert_eq!(fmt_config.options.line_width, Some(80)); - assert_eq!(fmt_config.options.indent_width, Some(4)); - assert_eq!(fmt_config.options.single_quote, Some(true)); let tasks_config = config_file.to_tasks_config().unwrap().unwrap(); assert_eq!( @@ -1131,6 +1281,128 @@ mod tests { ); } + /// if either "include" or "exclude" is specified, "files" is ignored + #[test] + fn test_parse_config_with_deprecated_files_field() { + let config_text = r#"{ + "lint": { + "files": { "include": ["foo/"], "exclude": ["bar/"] }, + "include": ["src/"] + }, + "fmt": { + "files": { "include": ["foo/"], "exclude": ["bar/"] }, + "exclude": ["dist/"] + }, + "bench": { + "files": { "include": ["foo/"] }, + "include": ["src/"] + }, + "test": { + "files": { "include": ["foo/"] }, + "include": ["src/"] + } + }"#; + let config_dir = ModuleSpecifier::parse("file:///deno/").unwrap(); + let config_specifier = config_dir.join("tsconfig.json").unwrap(); + let config_file = ConfigFile::new(config_text, &config_specifier).unwrap(); + + let lint_files = unpack_object(config_file.to_lint_config(), "lint").files; + assert_eq!( + lint_files, + FilesConfig { + include: vec![PathBuf::from("/deno/src/")], + exclude: vec![], + } + ); + + let fmt_files = unpack_object(config_file.to_fmt_config(), "fmt").files; + assert_eq!( + fmt_files, + FilesConfig { + exclude: vec![PathBuf::from("/deno/dist/")], + include: vec![], + } + ); + + let test_include = unpack_object(config_file.to_test_config(), "test") + .files + .include; + assert_eq!(test_include, vec![PathBuf::from("/deno/src/")]); + + let bench_include = unpack_object(config_file.to_bench_config(), "bench") + .files + .include; + assert_eq!(bench_include, vec![PathBuf::from("/deno/src/")]); + } + + #[test] + fn test_parse_config_with_deprecated_files_field_only() { + let config_text = r#"{ + "lint": { "files": { "include": ["src/"] } }, + "fmt": { "files": { "include": ["src/"] } }, + "test": { "files": { "exclude": ["dist/"] } }, + "bench": { "files": { "exclude": ["dist/"] } } + }"#; + let config_dir = ModuleSpecifier::parse("file:///deno/").unwrap(); + let config_specifier = config_dir.join("tsconfig.json").unwrap(); + let config_file = ConfigFile::new(config_text, &config_specifier).unwrap(); + + let lint_include = unpack_object(config_file.to_lint_config(), "lint") + .files + .include; + assert_eq!(lint_include, vec![PathBuf::from("/deno/src/")]); + + let fmt_include = unpack_object(config_file.to_fmt_config(), "fmt") + .files + .include; + assert_eq!(fmt_include, vec![PathBuf::from("/deno/src/")]); + + let test_exclude = unpack_object(config_file.to_test_config(), "test") + .files + .exclude; + assert_eq!(test_exclude, vec![PathBuf::from("/deno/dist/")]); + + let bench_exclude = unpack_object(config_file.to_bench_config(), "bench") + .files + .exclude; + assert_eq!(bench_exclude, vec![PathBuf::from("/deno/dist/")]); + } + + #[test] + fn test_parse_config_with_deprecated_fmt_options() { + let config_text_both = r#"{ + "fmt": { + "options": { + "semiColons": true + }, + "semiColons": false + } + }"#; + let config_text_deprecated = r#"{ + "fmt": { + "options": { + "semiColons": true + } + } + }"#; + let config_specifier = + ModuleSpecifier::parse("file:///deno/tsconfig.json").unwrap(); + let config_file_both = + ConfigFile::new(config_text_both, &config_specifier).unwrap(); + let config_file_deprecated = + ConfigFile::new(config_text_deprecated, &config_specifier).unwrap(); + + fn unpack_options(config_file: ConfigFile) -> FmtOptionsConfig { + unpack_object(config_file.to_fmt_config(), "fmt").options + } + + let fmt_options_both = unpack_options(config_file_both); + assert_eq!(fmt_options_both.semi_colons, Some(false)); + + let fmt_options_deprecated = unpack_options(config_file_deprecated); + assert_eq!(fmt_options_deprecated.semi_colons, Some(true)); + } + #[test] fn test_parse_config_with_empty_file() { let config_text = ""; diff --git a/cli/schemas/config-file.v1.json b/cli/schemas/config-file.v1.json index f0b4967207..7978a25977 100644 --- a/cli/schemas/config-file.v1.json +++ b/cli/schemas/config-file.v1.json @@ -227,6 +227,20 @@ "description": "Configuration for linter", "type": "object", "properties": { + "include": { + "type": "array", + "description": "List of files or directories that will be linted.", + "items": { + "type": "string" + } + }, + "exclude": { + "type": "array", + "description": "List of files or directories that will not be linted.", + "items": { + "type": "string" + } + }, "files": { "type": "object", "properties": { @@ -293,6 +307,20 @@ "description": "Configuration for formatter", "type": "object", "properties": { + "include": { + "type": "array", + "description": "List of files or directories that will be formatted.", + "items": { + "type": "string" + } + }, + "exclude": { + "type": "array", + "description": "List of files or directories that will not be formatted.", + "items": { + "type": "string" + } + }, "files": { "type": "object", "properties": { @@ -312,6 +340,40 @@ } } }, + "useTabs": { + "description": "Whether to use tabs (true) or spaces (false) for indentation.", + "type": "boolean", + "default": false + }, + "lineWidth": { + "description": "The width of a line the printer will try to stay under. Note that the printer may exceed this width in certain cases.", + "type": "number", + "default": 80 + }, + "indentWidth": { + "description": "The number of characters for an indent.", + "type": "number", + "default": 2 + }, + "singleQuote": { + "type": "boolean", + "description": "Whether to use single quote (true) or double quote (false) for quotation.", + "default": false + }, + "proseWrap": { + "description": "Define how prose should be wrapped in Markdown files.", + "default": "always", + "enum": [ + "always", + "never", + "preserve" + ] + }, + "semiColons": { + "description": "Whether to prefer using semicolons.", + "type": "boolean", + "default": true + }, "options": { "type": "object", "properties": { @@ -368,6 +430,20 @@ "description": "Configuration for deno test", "type": "object", "properties": { + "include": { + "type": "array", + "description": "List of files or directories that will be searched for tests.", + "items": { + "type": "string" + } + }, + "exclude": { + "type": "array", + "description": "List of files or directories that will not be searched for tests.", + "items": { + "type": "string" + } + }, "files": { "type": "object", "properties": { @@ -393,6 +469,20 @@ "description": "Configuration for deno bench", "type": "object", "properties": { + "include": { + "type": "array", + "description": "List of files or directories that will be searched for benchmarks.", + "items": { + "type": "string" + } + }, + "exclude": { + "type": "array", + "description": "List of files or directories that will not be searched for benchmarks.", + "items": { + "type": "string" + } + }, "files": { "type": "object", "properties": { diff --git a/cli/tests/integration/fmt_tests.rs b/cli/tests/integration/fmt_tests.rs index 7812175a7b..e47311cf0b 100644 --- a/cli/tests/integration/fmt_tests.rs +++ b/cli/tests/integration/fmt_tests.rs @@ -229,6 +229,12 @@ itest!(fmt_with_config { output: "fmt/fmt_with_config.out", }); +itest!(fmt_with_deprecated_config { + args: + "fmt --config fmt/with_config/deno.deprecated.jsonc fmt/with_config/subdir", + output: "fmt/fmt_with_deprecated_config.out", +}); + itest!(fmt_with_config_default { args: "fmt fmt/with_config/subdir", output: "fmt/fmt_with_config.out", diff --git a/cli/tests/integration/test_tests.rs b/cli/tests/integration/test_tests.rs index cf16652897..0e1a39deb5 100644 --- a/cli/tests/integration/test_tests.rs +++ b/cli/tests/integration/test_tests.rs @@ -77,6 +77,12 @@ itest!(test_with_config2 { output: "test/collect2.out", }); +itest!(test_with_deprecated_config { + args: "test --config test/collect/deno.deprecated.jsonc test/collect", + exit_code: 0, + output: "test/collect.deprecated.out", +}); + itest!(test_with_malformed_config { args: "test --config test/collect/deno.malformed.jsonc", exit_code: 1, diff --git a/cli/tests/testdata/bench/collect/deno.jsonc b/cli/tests/testdata/bench/collect/deno.jsonc index f88d137781..7f8f190d3c 100644 --- a/cli/tests/testdata/bench/collect/deno.jsonc +++ b/cli/tests/testdata/bench/collect/deno.jsonc @@ -1,7 +1,5 @@ { - "bench": { - "files": { - "exclude": ["./ignore"] - } - } + "bench": { + "exclude": ["./ignore"] + } } diff --git a/cli/tests/testdata/bench/collect/deno.malformed.jsonc b/cli/tests/testdata/bench/collect/deno.malformed.jsonc index 02744bc111..8e558fbcf2 100644 --- a/cli/tests/testdata/bench/collect/deno.malformed.jsonc +++ b/cli/tests/testdata/bench/collect/deno.malformed.jsonc @@ -1,5 +1,5 @@ { - "bench": { - "dont_know_this_field": {} - } + "bench": { + "dont_know_this_field": {} + } } diff --git a/cli/tests/testdata/bench/collect/deno2.jsonc b/cli/tests/testdata/bench/collect/deno2.jsonc index f24da50491..653ab1e318 100644 --- a/cli/tests/testdata/bench/collect/deno2.jsonc +++ b/cli/tests/testdata/bench/collect/deno2.jsonc @@ -1,8 +1,6 @@ { - "bench": { - "files": { - "include": ["./include/"], - "exclude": ["./ignore", "./include/2_bench.ts"] - } - } + "bench": { + "include": ["./include/"], + "exclude": ["./ignore", "./include/2_bench.ts"] + } } diff --git a/cli/tests/testdata/bench/collect_with_malformed_config.out b/cli/tests/testdata/bench/collect_with_malformed_config.out index 10e64707ca..92e5e29d20 100644 --- a/cli/tests/testdata/bench/collect_with_malformed_config.out +++ b/cli/tests/testdata/bench/collect_with_malformed_config.out @@ -1,4 +1,4 @@ error: Failed to parse "bench" configuration Caused by: - unknown field `dont_know_this_field`, expected `files` + unknown field `dont_know_this_field`, expected one of `include`, `exclude`, `files` diff --git a/cli/tests/testdata/fmt/deno.malformed.jsonc b/cli/tests/testdata/fmt/deno.malformed.jsonc index c6200c4ee0..e326edb1f8 100644 --- a/cli/tests/testdata/fmt/deno.malformed.jsonc +++ b/cli/tests/testdata/fmt/deno.malformed.jsonc @@ -1,9 +1,7 @@ { "fmt": { - "files": { - "include": ["fmt_with_config/"], - "exclude": ["fmt_with_config/b.ts"] - }, + "include": ["fmt_with_config/"], + "exclude": ["fmt_with_config/b.ts"], "dont_know_this_field": {}, "options": { "useTabs": true diff --git a/cli/tests/testdata/fmt/deno.malformed2.jsonc b/cli/tests/testdata/fmt/deno.malformed2.jsonc index 4d6e99ae22..e326edb1f8 100644 --- a/cli/tests/testdata/fmt/deno.malformed2.jsonc +++ b/cli/tests/testdata/fmt/deno.malformed2.jsonc @@ -1,10 +1,8 @@ { "fmt": { - "files": { - "include": ["fmt_with_config/"], - "exclude": ["fmt_with_config/b.ts"], - "dont_know_this_field": {} - }, + "include": ["fmt_with_config/"], + "exclude": ["fmt_with_config/b.ts"], + "dont_know_this_field": {}, "options": { "useTabs": true } diff --git a/cli/tests/testdata/fmt/fmt_with_deprecated_config.out b/cli/tests/testdata/fmt/fmt_with_deprecated_config.out new file mode 100644 index 0000000000..793fac1bc4 --- /dev/null +++ b/cli/tests/testdata/fmt/fmt_with_deprecated_config.out @@ -0,0 +1,3 @@ +Warning: "options" configuration is deprecated. Please use "flat" options instead. +Warning: "files" configuration is deprecated. Please use "include" and "exclude" instead. +Checked 2 files diff --git a/cli/tests/testdata/fmt/fmt_with_malformed_config.out b/cli/tests/testdata/fmt/fmt_with_malformed_config.out index 1a55613ef2..c269053a66 100644 --- a/cli/tests/testdata/fmt/fmt_with_malformed_config.out +++ b/cli/tests/testdata/fmt/fmt_with_malformed_config.out @@ -1,4 +1,4 @@ error: Failed to parse "fmt" configuration Caused by: - unknown field `dont_know_this_field`, expected `options` or `files` + unknown field `dont_know_this_field`, expected one of `useTabs`, `lineWidth`, `indentWidth`, `singleQuote`, `proseWrap`, `semiColons`, `options`, `include`, `exclude`, `files` diff --git a/cli/tests/testdata/fmt/fmt_with_malformed_config2.out b/cli/tests/testdata/fmt/fmt_with_malformed_config2.out index 948b6b5b85..c269053a66 100644 --- a/cli/tests/testdata/fmt/fmt_with_malformed_config2.out +++ b/cli/tests/testdata/fmt/fmt_with_malformed_config2.out @@ -1,4 +1,4 @@ error: Failed to parse "fmt" configuration Caused by: - unknown field `dont_know_this_field`, expected `include` or `exclude` + unknown field `dont_know_this_field`, expected one of `useTabs`, `lineWidth`, `indentWidth`, `singleQuote`, `proseWrap`, `semiColons`, `options`, `include`, `exclude`, `files` diff --git a/cli/tests/testdata/fmt/with_config/deno.deprecated.jsonc b/cli/tests/testdata/fmt/with_config/deno.deprecated.jsonc new file mode 100644 index 0000000000..e053233fd2 --- /dev/null +++ b/cli/tests/testdata/fmt/with_config/deno.deprecated.jsonc @@ -0,0 +1,20 @@ +{ + "fmt": { + "files": { + "include": [ + "./subdir/" + ], + "exclude": [ + "./subdir/b.ts" + ] + }, + "options": { + "useTabs": true, + "lineWidth": 40, + "indentWidth": 8, + "singleQuote": true, + "proseWrap": "always", + "semiColons": false + } + } +} diff --git a/cli/tests/testdata/fmt/with_config/deno.jsonc b/cli/tests/testdata/fmt/with_config/deno.jsonc index 44e3f9a997..ffd265dcdf 100644 --- a/cli/tests/testdata/fmt/with_config/deno.jsonc +++ b/cli/tests/testdata/fmt/with_config/deno.jsonc @@ -1,20 +1,16 @@ { "fmt": { - "files": { - "include": [ - "./subdir/" - ], - "exclude": [ - "./subdir/b.ts" - ] - }, - "options": { - "useTabs": true, - "lineWidth": 40, - "indentWidth": 8, - "singleQuote": true, - "proseWrap": "always", - "semiColons": false - } + "include": [ + "./subdir/" + ], + "exclude": [ + "./subdir/b.ts" + ], + "useTabs": true, + "lineWidth": 40, + "indentWidth": 8, + "singleQuote": true, + "proseWrap": "always", + "semiColons": false } } diff --git a/cli/tests/testdata/lint/Deno.compact.format.jsonc b/cli/tests/testdata/lint/Deno.compact.format.jsonc index 24b159ca6b..f3487501a8 100644 --- a/cli/tests/testdata/lint/Deno.compact.format.jsonc +++ b/cli/tests/testdata/lint/Deno.compact.format.jsonc @@ -1,9 +1,7 @@ { "lint": { - "files": { - "include": ["with_config/"], - "exclude": ["with_config/b.ts"] - }, + "include": ["with_config/"], + "exclude": ["with_config/b.ts"], "rules": { "tags": ["recommended"], "include": ["ban-untagged-todo"] diff --git a/cli/tests/testdata/lint/Deno.jsonc b/cli/tests/testdata/lint/Deno.jsonc index 24db221a7e..e9c03cca4e 100644 --- a/cli/tests/testdata/lint/Deno.jsonc +++ b/cli/tests/testdata/lint/Deno.jsonc @@ -1,9 +1,7 @@ { "lint": { - "files": { - "include": ["with_config/"], - "exclude": ["with_config/b.ts"] - }, + "include": ["with_config/"], + "exclude": ["with_config/b.ts"], "rules": { "tags": ["recommended"], "include": ["ban-untagged-todo"] diff --git a/cli/tests/testdata/lint/Deno.malformed.jsonc b/cli/tests/testdata/lint/Deno.malformed.jsonc index 4534a1fe81..fa71cd851c 100644 --- a/cli/tests/testdata/lint/Deno.malformed.jsonc +++ b/cli/tests/testdata/lint/Deno.malformed.jsonc @@ -1,9 +1,7 @@ { "lint": { - "files": { - "include": ["with_config/"], - "exclude": ["with_config/b.ts"] - }, + "include": ["with_config/"], + "exclude": ["with_config/b.ts"], "dont_know_this_field": {}, "rules": { "tags": ["recommended"], diff --git a/cli/tests/testdata/lint/Deno.malformed2.jsonc b/cli/tests/testdata/lint/Deno.malformed2.jsonc index 335fcdc235..fa71cd851c 100644 --- a/cli/tests/testdata/lint/Deno.malformed2.jsonc +++ b/cli/tests/testdata/lint/Deno.malformed2.jsonc @@ -1,10 +1,8 @@ { "lint": { - "files": { - "include": ["with_config/"], - "exclude": ["with_config/b.ts"], - "dont_know_this_field": {} - }, + "include": ["with_config/"], + "exclude": ["with_config/b.ts"], + "dont_know_this_field": {}, "rules": { "tags": ["recommended"], "include": ["ban-untagged-todo"] diff --git a/cli/tests/testdata/lint/Deno.no_tags.jsonc b/cli/tests/testdata/lint/Deno.no_tags.jsonc index 4771b0b737..b63600a909 100644 --- a/cli/tests/testdata/lint/Deno.no_tags.jsonc +++ b/cli/tests/testdata/lint/Deno.no_tags.jsonc @@ -1,13 +1,11 @@ { "lint": { - "files": { - "include": [ - "with_config/" - ], - "exclude": [ - "with_config/b.ts" - ] - }, + "include": [ + "with_config/" + ], + "exclude": [ + "with_config/b.ts" + ], "rules": { "include": [ "ban-untagged-todo" diff --git a/cli/tests/testdata/lint/with_malformed_config.out b/cli/tests/testdata/lint/with_malformed_config.out index 3aa4910653..1c0f0fff6e 100644 --- a/cli/tests/testdata/lint/with_malformed_config.out +++ b/cli/tests/testdata/lint/with_malformed_config.out @@ -1,4 +1,4 @@ error: Failed to parse "lint" configuration Caused by: - unknown field `dont_know_this_field`, expected one of `rules`, `files`, `report` + unknown field `dont_know_this_field`, expected one of `rules`, `include`, `exclude`, `files`, `report` diff --git a/cli/tests/testdata/lint/with_malformed_config2.out b/cli/tests/testdata/lint/with_malformed_config2.out index 11e878f005..1c0f0fff6e 100644 --- a/cli/tests/testdata/lint/with_malformed_config2.out +++ b/cli/tests/testdata/lint/with_malformed_config2.out @@ -1,4 +1,4 @@ error: Failed to parse "lint" configuration Caused by: - unknown field `dont_know_this_field`, expected `include` or `exclude` + unknown field `dont_know_this_field`, expected one of `rules`, `include`, `exclude`, `files`, `report` diff --git a/cli/tests/testdata/lsp/deno.lint.exclude.jsonc b/cli/tests/testdata/lsp/deno.lint.exclude.jsonc index 89f6108ecf..9d4ba52ada 100644 --- a/cli/tests/testdata/lsp/deno.lint.exclude.jsonc +++ b/cli/tests/testdata/lsp/deno.lint.exclude.jsonc @@ -1,10 +1,8 @@ { "lint": { - "files": { - "exclude": [ - "ignored.ts" - ] - }, + "exclude": [ + "ignored.ts" + ], "rules": { "exclude": [ "camelcase" diff --git a/cli/tests/testdata/test/collect.deprecated.out b/cli/tests/testdata/test/collect.deprecated.out new file mode 100644 index 0000000000..9bf68807cf --- /dev/null +++ b/cli/tests/testdata/test/collect.deprecated.out @@ -0,0 +1,10 @@ +Warning: "files" configuration is deprecated. Please use "include" and "exclude" instead. +Check [WILDCARD]/test/collect/include/2_test.ts +Check [WILDCARD]/test/collect/include/test.ts +Check [WILDCARD]/test/collect/test.ts +running 0 tests from ./test/collect/include/2_test.ts +running 0 tests from ./test/collect/include/test.ts +running 0 tests from ./test/collect/test.ts + +ok | 0 passed | 0 failed ([WILDCARD]) + diff --git a/cli/tests/testdata/test/collect/deno.deprecated.jsonc b/cli/tests/testdata/test/collect/deno.deprecated.jsonc new file mode 100644 index 0000000000..b8acda27d0 --- /dev/null +++ b/cli/tests/testdata/test/collect/deno.deprecated.jsonc @@ -0,0 +1,7 @@ +{ + "test": { + "files": { + "exclude": ["./ignore"] + } + } +} diff --git a/cli/tests/testdata/test/collect/deno.jsonc b/cli/tests/testdata/test/collect/deno.jsonc index b8acda27d0..e14ce86da0 100644 --- a/cli/tests/testdata/test/collect/deno.jsonc +++ b/cli/tests/testdata/test/collect/deno.jsonc @@ -1,7 +1,5 @@ { "test": { - "files": { - "exclude": ["./ignore"] - } + "exclude": ["./ignore"] } } diff --git a/cli/tests/testdata/test/collect/deno2.jsonc b/cli/tests/testdata/test/collect/deno2.jsonc index a4d244e31e..b7af09d1c0 100644 --- a/cli/tests/testdata/test/collect/deno2.jsonc +++ b/cli/tests/testdata/test/collect/deno2.jsonc @@ -1,8 +1,6 @@ { "test": { - "files": { - "include": ["./include/"], - "exclude": ["./ignore", "./include/2_test.ts"] - } + "include": ["./include/"], + "exclude": ["./ignore", "./include/2_test.ts"] } } diff --git a/cli/tests/testdata/test/collect_with_malformed_config.out b/cli/tests/testdata/test/collect_with_malformed_config.out index 25c34406fd..b31b18e6a2 100644 --- a/cli/tests/testdata/test/collect_with_malformed_config.out +++ b/cli/tests/testdata/test/collect_with_malformed_config.out @@ -1,4 +1,4 @@ error: Failed to parse "test" configuration Caused by: - unknown field `dont_know_this_field`, expected `files` + unknown field `dont_know_this_field`, expected one of `include`, `exclude`, `files` From 1e331a4873cacadc9633e7daeaabdb2749950fee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Thu, 27 Apr 2023 12:45:13 +0200 Subject: [PATCH 063/320] refactor(ext/node): migrate back to using "Deno.serve" API for HTTP server (#18865) This commit fixes "node:http" API to properly handle "upgrade" requests and thus marking Vite work again. This is done by migrating back to "Deno.serve()" and internal "upgradeHttpRaw" APIs for "node:http" module polyfill. --- ext/node/polyfills/http.ts | 148 ++++++++++++++++--------------------- 1 file changed, 62 insertions(+), 86 deletions(-) diff --git a/ext/node/polyfills/http.ts b/ext/node/polyfills/http.ts index 785bbaab3a..1a585f74ce 100644 --- a/ext/node/polyfills/http.ts +++ b/ext/node/polyfills/http.ts @@ -1,6 +1,7 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. import { TextEncoder } from "ext:deno_web/08_text_encoding.js"; +import { type Deferred, deferred } from "ext:deno_node/_util/async.ts"; import { _normalizeArgs, ListenOptions, Socket } from "ext:deno_node/net.ts"; import { Buffer } from "ext:deno_node/buffer.ts"; import { ERR_SERVER_NOT_RUNNING } from "ext:deno_node/internal/errors.ts"; @@ -16,9 +17,8 @@ import { Agent } from "ext:deno_node/_http_agent.mjs"; import { chunkExpression as RE_TE_CHUNKED } from "ext:deno_node/_http_common.ts"; import { urlToHttpOptions } from "ext:deno_node/internal/url.ts"; import { constants, TCP } from "ext:deno_node/internal_binding/tcp_wrap.ts"; -import { upgradeHttpRaw } from "ext:deno_http/00_serve.js"; -import * as httpRuntime from "ext:runtime/40_http.js"; import { connResetException } from "ext:deno_node/internal/errors.ts"; +import { serve, upgradeHttpRaw } from "ext:deno_http/00_serve.js"; enum STATUS_CODES { /** RFC 7231, 6.2.1 */ @@ -427,7 +427,7 @@ export class ServerResponse extends NodeWritable { finished = false; headersSent = false; #firstChunk: Chunk | null = null; - #reqEvent?: Deno.RequestEvent; + #resolve: (value: Response | PromiseLike) => void; static #enqueue(controller: ReadableStreamDefaultController, chunk: Chunk) { if (typeof chunk === "string") { @@ -443,7 +443,7 @@ export class ServerResponse extends NodeWritable { return status === 101 || status === 204 || status === 205 || status === 304; } - constructor(reqEvent: undefined | Deno.RequestEvent) { + constructor(resolve: (value: Response | PromiseLike) => void) { let controller: ReadableByteStreamController; const readable = new ReadableStream({ start(c) { @@ -485,7 +485,7 @@ export class ServerResponse extends NodeWritable { }, }); this.#readable = readable; - this.#reqEvent = reqEvent; + this.#resolve = resolve; } setHeader(name: string, value: string) { @@ -536,16 +536,13 @@ export class ServerResponse extends NodeWritable { if (ServerResponse.#bodyShouldBeNull(this.statusCode!)) { body = null; } - this.#reqEvent!.respondWith( + this.#resolve( new Response(body, { headers: this.#headers, status: this.statusCode, statusText: this.statusMessage, }), - ).catch(() => { - // TODO(bartlomieju): this error should be handled somehow - // ignore this error - }); + ); } // deno-lint-ignore no-explicit-any @@ -577,7 +574,7 @@ export class IncomingMessageForServer extends NodeReadable { // These properties are used by `npm:forwarded` for example. socket: { remoteAddress: string; remotePort: number }; - constructor(req: Request, conn: Deno.Conn) { + constructor(req: Request, remoteAddr: { hostname: string; port: number }) { // Check if no body (GET/HEAD/OPTIONS/...) const reader = req.body?.getReader(); super({ @@ -605,8 +602,8 @@ export class IncomingMessageForServer extends NodeReadable { this.url = req.url?.slice(req.url.indexOf("/", 8)); this.method = req.method; this.socket = { - remoteAddress: conn.remoteAddr.hostname, - remotePort: conn.remoteAddr.port, + remoteAddress: remoteAddr.hostname, + remotePort: remoteAddr.port, }; this.#req = req; } @@ -648,10 +645,17 @@ export function Server(handler?: ServerHandler): ServerImpl { class ServerImpl extends EventEmitter { #httpConnections: Set = new Set(); #listener?: Deno.Listener; + + #addr: Deno.NetAddr; + #hasClosed = false; + #ac?: AbortController; + #servePromise: Deferred; listening = false; constructor(handler?: ServerHandler) { super(); + this.#servePromise = deferred(); + this.#servePromise.then(() => this.emit("close")); if (handler !== undefined) { this.on("request", handler); } @@ -676,70 +680,52 @@ class ServerImpl extends EventEmitter { // TODO(bnoordhuis) Node prefers [::] when host is omitted, // we on the other hand default to 0.0.0.0. + const hostname = options.host ?? "0.0.0.0"; + this.#addr = { + hostname, + port, + } as Deno.NetAddr; this.listening = true; - const hostname = options.host ?? ""; - this.#listener = Deno.listen({ port, hostname }); - nextTick(() => this.#listenLoop()); + nextTick(() => this.#serve()); return this; } - async #listenLoop() { - const go = async (tcpConn: Deno.Conn, httpConn: Deno.HttpConn) => { - try { - for (;;) { - let reqEvent = null; - try { - // Note: httpConn.nextRequest() calls httpConn.close() on error. - reqEvent = await httpConn.nextRequest(); - } catch { - // Connection closed. - // TODO(bnoordhuis) Emit "clientError" event on the http.Server - // instance? Node emits it when request parsing fails and expects - // the listener to send a raw 4xx HTTP response on the underlying - // net.Socket but we don't have one to pass to the listener. - } - if (reqEvent === null) { - break; - } - const req = new IncomingMessageForServer(reqEvent.request, tcpConn); - if (req.upgrade && this.listenerCount("upgrade") > 0) { - const conn = await upgradeHttpRaw( - reqEvent.request, - tcpConn, - ) as Deno.Conn; - const socket = new Socket({ - handle: new TCP(constants.SERVER, conn), - }); - this.emit("upgrade", req, socket, Buffer.from([])); - return; - } else { - const res = new ServerResponse(reqEvent); - this.emit("request", req, res); - } - } - } finally { - this.#httpConnections.delete(httpConn); + #serve() { + const ac = new AbortController(); + const handler = (request: Request, info: Deno.ServeHandlerInfo) => { + const req = new IncomingMessageForServer(request, info.remoteAddr); + if (req.upgrade && this.listenerCount("upgrade") > 0) { + const { conn, response } = upgradeHttpRaw(request); + const socket = new Socket({ + handle: new TCP(constants.SERVER, conn), + }); + this.emit("upgrade", req, socket, Buffer.from([])); + return response; + } else { + return new Promise((resolve): void => { + const res = new ServerResponse(resolve); + this.emit("request", req, res); + }); } }; - const listener = this.#listener; - - if (listener !== undefined) { - this.emit("listening"); - - for await (const conn of listener) { - let httpConn: Deno.HttpConn; - try { - httpConn = httpRuntime.serveHttp(conn); - } catch { - continue; /// Connection closed. - } - - this.#httpConnections.add(httpConn); - go(conn, httpConn); - } + if (this.#hasClosed) { + return; } + this.#ac = ac; + serve( + { + handler: handler as Deno.ServeHandler, + ...this.#addr, + signal: ac.signal, + // @ts-ignore Might be any without `--unstable` flag + onListen: ({ port }) => { + this.#addr!.port = port; + this.emit("listening"); + }, + }, + ).then(() => this.#servePromise!.resolve()); } setTimeout() { @@ -750,6 +736,7 @@ class ServerImpl extends EventEmitter { const listening = this.listening; this.listening = false; + this.#hasClosed = true; if (typeof cb === "function") { if (listening) { this.once("close", cb); @@ -760,31 +747,20 @@ class ServerImpl extends EventEmitter { } } - nextTick(() => this.emit("close")); - - if (listening) { - this.#listener!.close(); - this.#listener = undefined; - - for (const httpConn of this.#httpConnections) { - try { - httpConn.close(); - } catch { - // Already closed. - } - } - - this.#httpConnections.clear(); + if (listening && this.#ac) { + this.#ac.abort(); + this.#ac = undefined; + } else { + this.#servePromise!.resolve(); } return this; } address() { - const addr = this.#listener!.addr as Deno.NetAddr; return { - port: addr.port, - address: addr.hostname, + port: this.#addr.port, + address: this.#addr.hostname, }; } } From d043a6d72cbf683c70f7eb4b9b3c09003afd2683 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Thu, 27 Apr 2023 12:47:52 +0200 Subject: [PATCH 064/320] perf(ext/websocket): various performance improvements (#18862) - No need to wrap buffer in a `new DataView()` - Deferred ops are still eagerly polled, but resolved on the next tick of the event loop, we don't want them to be eagerly polled - Using "core.opAsync"/"core.opAsync2" incurs additional cost of looking up these functions on each call. Similarly with "ops.*" --------- Co-authored-by: Divy Srivastava --- ext/websocket/01_websocket.js | 30 +++++++++++++++--------------- ext/websocket/lib.rs | 2 +- 2 files changed, 16 insertions(+), 16 deletions(-) diff --git a/ext/websocket/01_websocket.js b/ext/websocket/01_websocket.js index cb9f756d21..7b23df80f0 100644 --- a/ext/websocket/01_websocket.js +++ b/ext/websocket/01_websocket.js @@ -3,7 +3,10 @@ /// const core = globalThis.Deno.core; -const ops = core.ops; +const { opAsync, opAsync2 } = core; +// deno-lint-ignore camelcase +const op_ws_check_permission_and_cancel_handle = + core.ops.op_ws_check_permission_and_cancel_handle; import { URL } from "ext:deno_url/00_url.js"; import * as webidl from "ext:deno_webidl/00_webidl.js"; import { HTTP_TOKEN_CODE_POINT_RE } from "ext:deno_web/00_infra.js"; @@ -210,7 +213,7 @@ class WebSocket extends EventTarget { this[_url] = wsURL.href; this[_role] = CLIENT; - ops.op_ws_check_permission_and_cancel_handle( + op_ws_check_permission_and_cancel_handle( "WebSocket.abort()", this[_url], false, @@ -247,7 +250,7 @@ class WebSocket extends EventTarget { } PromisePrototypeThen( - core.opAsync( + opAsync( "op_ws_create", "new WebSocket()", wsURL.href, @@ -260,7 +263,7 @@ class WebSocket extends EventTarget { if (this[_readyState] === CLOSING) { PromisePrototypeThen( - core.opAsync("op_ws_close", this[_rid]), + opAsync("op_ws_close", this[_rid]), () => { this[_readyState] = CLOSED; @@ -316,7 +319,7 @@ class WebSocket extends EventTarget { const sendTypedArray = (view, byteLength) => { this[_bufferedAmount] += byteLength; PromisePrototypeThen( - core.opAsync2( + opAsync2( "op_ws_send_binary", this[_rid], view, @@ -345,16 +348,13 @@ class WebSocket extends EventTarget { sendTypedArray(data, TypedArrayPrototypeGetByteLength(data)); } } else if (ObjectPrototypeIsPrototypeOf(ArrayBufferPrototype, data)) { - sendTypedArray( - new DataView(data), - ArrayBufferPrototypeGetByteLength(data), - ); + sendTypedArray(data, ArrayBufferPrototypeGetByteLength(data)); } else { const string = String(data); const d = core.encode(string); this[_bufferedAmount] += TypedArrayPrototypeGetByteLength(d); PromisePrototypeThen( - core.opAsync2( + opAsync2( "op_ws_send_text", this[_rid], string, @@ -413,7 +413,7 @@ class WebSocket extends EventTarget { this[_readyState] = CLOSING; PromisePrototypeCatch( - core.opAsync( + opAsync( "op_ws_close", this[_rid], code, @@ -438,7 +438,7 @@ class WebSocket extends EventTarget { async [_eventLoop]() { while (this[_readyState] !== CLOSED) { - const { 0: kind, 1: value } = await core.opAsync2( + const { 0: kind, 1: value } = await opAsync2( "op_ws_next_event", this[_rid], ); @@ -501,7 +501,7 @@ class WebSocket extends EventTarget { if (prevState === OPEN) { try { - await core.opAsync( + await opAsync( "op_ws_close", this[_rid], code, @@ -530,12 +530,12 @@ class WebSocket extends EventTarget { clearTimeout(this[_idleTimeoutTimeout]); this[_idleTimeoutTimeout] = setTimeout(async () => { if (this[_readyState] === OPEN) { - await core.opAsync("op_ws_send_ping", this[_rid]); + await opAsync("op_ws_send_ping", this[_rid]); this[_idleTimeoutTimeout] = setTimeout(async () => { if (this[_readyState] === OPEN) { this[_readyState] = CLOSING; const reason = "No response from ping frame."; - await core.opAsync( + await opAsync( "op_ws_close", this[_rid], 1001, diff --git a/ext/websocket/lib.rs b/ext/websocket/lib.rs index df4127d273..9ea341fbb6 100644 --- a/ext/websocket/lib.rs +++ b/ext/websocket/lib.rs @@ -427,7 +427,7 @@ pub async fn op_ws_close( Ok(()) } -#[op(deferred)] +#[op(fast)] pub async fn op_ws_next_event( state: Rc>, rid: ResourceId, From 03132e19da6c8e34e8100c6a57cd911b43900950 Mon Sep 17 00:00:00 2001 From: Nayeem Rahman Date: Thu, 27 Apr 2023 13:40:03 +0100 Subject: [PATCH 065/320] fix(test): handle dispatched exceptions from test functions (#18853) Fixes #18852. --- cli/tests/integration/test_tests.rs | 6 ++ cli/tests/testdata/test/report_error.out | 23 ++++++++ cli/tests/testdata/test/report_error.ts | 6 ++ cli/tools/bench.rs | 9 +-- cli/tools/test.rs | 9 +-- core/inspector.rs | 23 +++----- core/ops_builtin_v8.rs | 23 +++----- core/runtime.rs | 71 ++++++++++++++---------- 8 files changed, 98 insertions(+), 72 deletions(-) create mode 100644 cli/tests/testdata/test/report_error.out create mode 100644 cli/tests/testdata/test/report_error.ts diff --git a/cli/tests/integration/test_tests.rs b/cli/tests/integration/test_tests.rs index 0e1a39deb5..223c02e244 100644 --- a/cli/tests/integration/test_tests.rs +++ b/cli/tests/integration/test_tests.rs @@ -425,6 +425,12 @@ itest!(uncaught_errors { exit_code: 1, }); +itest!(report_error { + args: "test --quiet test/report_error.ts", + output: "test/report_error.out", + exit_code: 1, +}); + itest!(check_local_by_default { args: "test --quiet test/check_local_by_default.ts", output: "test/check_local_by_default.out", diff --git a/cli/tests/testdata/test/report_error.out b/cli/tests/testdata/test/report_error.out new file mode 100644 index 0000000000..698550f97d --- /dev/null +++ b/cli/tests/testdata/test/report_error.out @@ -0,0 +1,23 @@ +running 2 tests from [WILDCARD]/report_error.ts +foo ... +Uncaught error from [WILDCARD]/report_error.ts FAILED +foo ... cancelled (0ms) +bar ... cancelled (0ms) + + ERRORS + +[WILDCARD]/report_error.ts (uncaught error) +error: Error: foo + reportError(new Error("foo")); + ^ + at [WILDCARD]/report_error.ts:2:15 +This error was not caught from a test and caused the test runner to fail on the referenced module. +It most likely originated from a dangling promise, event/timeout handler or top-level code. + + FAILURES + +[WILDCARD]/report_error.ts (uncaught error) + +FAILED | 0 passed | 3 failed ([WILDCARD]) + +error: Test failed diff --git a/cli/tests/testdata/test/report_error.ts b/cli/tests/testdata/test/report_error.ts new file mode 100644 index 0000000000..56b6db26c8 --- /dev/null +++ b/cli/tests/testdata/test/report_error.ts @@ -0,0 +1,6 @@ +Deno.test("foo", () => { + reportError(new Error("foo")); + console.log(1); +}); + +Deno.test("bar", () => {}); diff --git a/cli/tools/bench.rs b/cli/tools/bench.rs index 962b1ac174..5f467bc6e2 100644 --- a/cli/tools/bench.rs +++ b/cli/tools/bench.rs @@ -489,14 +489,7 @@ async fn bench_specifier( }))?; for (desc, function) in benchmarks { sender.send(BenchEvent::Wait(desc.id))?; - let promise = { - let scope = &mut worker.js_runtime.handle_scope(); - let cb = function.open(scope); - let this = v8::undefined(scope).into(); - let promise = cb.call(scope, this, &[]).unwrap(); - v8::Global::new(scope, promise) - }; - let result = worker.js_runtime.resolve_value(promise).await?; + let result = worker.js_runtime.call_and_await(&function).await?; let scope = &mut worker.js_runtime.handle_scope(); let result = v8::Local::new(scope, result); let result = serde_v8::from_v8::(scope, result)?; diff --git a/cli/tools/test.rs b/cli/tools/test.rs index 268f3b4b9e..62a104733d 100644 --- a/cli/tools/test.rs +++ b/cli/tools/test.rs @@ -997,14 +997,7 @@ pub async fn test_specifier( } sender.send(TestEvent::Wait(desc.id))?; let earlier = SystemTime::now(); - let promise = { - let scope = &mut worker.js_runtime.handle_scope(); - let cb = function.open(scope); - let this = v8::undefined(scope).into(); - let promise = cb.call(scope, this, &[]).unwrap(); - v8::Global::new(scope, promise) - }; - let result = match worker.js_runtime.resolve_value(promise).await { + let result = match worker.js_runtime.call_and_await(&function).await { Ok(r) => r, Err(error) => { if error.is::() { diff --git a/core/inspector.rs b/core/inspector.rs index c83784fe38..b0a55cf12b 100644 --- a/core/inspector.rs +++ b/core/inspector.rs @@ -11,7 +11,6 @@ use crate::futures::channel::mpsc::UnboundedSender; use crate::futures::channel::oneshot; use crate::futures::future::select; use crate::futures::future::Either; -use crate::futures::future::Future; use crate::futures::prelude::*; use crate::futures::stream::SelectAll; use crate::futures::stream::StreamExt; @@ -82,6 +81,7 @@ pub struct JsRuntimeInspector { flags: RefCell, waker: Arc, deregister_tx: Option>, + is_dispatching_message: RefCell, } impl Drop for JsRuntimeInspector { @@ -141,18 +141,6 @@ impl v8::inspector::V8InspectorClientImpl for JsRuntimeInspector { } } -/// Polling `JsRuntimeInspector` allows inspector to accept new incoming -/// connections and "pump" messages in different sessions. -/// -/// It should be polled on tick of event loop, ie. in `JsRuntime::poll_event_loop` -/// function. -impl Future for JsRuntimeInspector { - type Output = (); - fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll<()> { - self.poll_sessions(Some(cx)).unwrap() - } -} - impl JsRuntimeInspector { /// Currently Deno supports only a single context in `JsRuntime` /// and thus it's id is provided as an associated contant. @@ -182,6 +170,7 @@ impl JsRuntimeInspector { flags: Default::default(), waker, deregister_tx: None, + is_dispatching_message: Default::default(), })); let mut self_ = self__.borrow_mut(); self_.v8_inspector = Rc::new(RefCell::new( @@ -224,6 +213,10 @@ impl JsRuntimeInspector { self__ } + pub fn is_dispatching_message(&self) -> bool { + *self.is_dispatching_message.borrow() + } + pub fn context_destroyed( &mut self, scope: &mut HandleScope, @@ -246,7 +239,7 @@ impl JsRuntimeInspector { self.sessions.borrow().has_blocking_sessions() } - fn poll_sessions( + pub fn poll_sessions( &self, mut invoker_cx: Option<&mut Context>, ) -> Result, BorrowMutError> { @@ -304,7 +297,9 @@ impl JsRuntimeInspector { match sessions.established.poll_next_unpin(cx) { Poll::Ready(Some(session_stream_item)) => { let (v8_session_ptr, msg) = session_stream_item; + *self.is_dispatching_message.borrow_mut() = true; InspectorSession::dispatch_message(v8_session_ptr, msg); + *self.is_dispatching_message.borrow_mut() = false; continue; } Poll::Ready(None) => break, diff --git a/core/ops_builtin_v8.rs b/core/ops_builtin_v8.rs index 6e8b2efda0..f4133f3b8e 100644 --- a/core/ops_builtin_v8.rs +++ b/core/ops_builtin_v8.rs @@ -713,22 +713,17 @@ fn op_dispatch_exception( ) { let state_rc = JsRuntime::state(scope); let mut state = state_rc.borrow_mut(); - state - .dispatched_exceptions - .push_front(v8::Global::new(scope, exception.v8_value)); - // Only terminate execution if there are no inspector sessions. - if state.inspector.is_none() { - scope.terminate_execution(); - return; - } + if let Some(inspector) = &state.inspector { + let inspector = inspector.borrow(); + // TODO(nayeemrmn): Send exception message to inspector sessions here. - // FIXME(bartlomieju): I'm not sure if this assumption is valid... Maybe when - // inspector is polling on pause? - if state.inspector().try_borrow().is_ok() { - scope.terminate_execution(); - } else { - // If the inspector is borrowed at this time, assume an inspector is active. + // This indicates that the op is being called from a REPL. Skip termination. + if inspector.is_dispatching_message() { + return; + } } + state.dispatched_exception = Some(v8::Global::new(scope, exception.v8_value)); + scope.terminate_execution(); } #[op(v8)] diff --git a/core/runtime.rs b/core/runtime.rs index 9ead489af7..d8355ae6d6 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -44,7 +44,6 @@ use smallvec::SmallVec; use std::any::Any; use std::cell::RefCell; use std::collections::HashMap; -use std::collections::VecDeque; use std::ffi::c_void; use std::option::Option; use std::pin::Pin; @@ -176,7 +175,7 @@ pub struct JsRuntimeState { /// instead of any other exceptions. // TODO(nayeemrmn): This is polled in `exception_to_err_result()` which is // flimsy. Try to poll it similarly to `pending_promise_rejections`. - pub(crate) dispatched_exceptions: VecDeque>, + pub(crate) dispatched_exception: Option>, pub(crate) inspector: Option>>, waker: AtomicWaker, } @@ -349,7 +348,7 @@ impl JsRuntime { op_state: op_state.clone(), waker: AtomicWaker::new(), have_unpolled_ops: false, - dispatched_exceptions: Default::default(), + dispatched_exception: None, // Some fields are initialized later after isolate is created inspector: None, global_realm: None, @@ -946,6 +945,27 @@ impl JsRuntime { ) } + /// Call a function. If it returns a promise, run the event loop until that + /// promise is settled. If the promise rejects or there is an uncaught error + /// in the event loop, return `Err(error)`. Or return `Ok()`. + pub async fn call_and_await( + &mut self, + function: &v8::Global, + ) -> Result, Error> { + let promise = { + let scope = &mut self.handle_scope(); + let cb = function.open(scope); + let this = v8::undefined(scope).into(); + let promise = cb.call(scope, this, &[]); + if promise.is_none() || scope.is_execution_terminating() { + let undefined = v8::undefined(scope).into(); + return exception_to_err_result(scope, undefined, false); + } + v8::Global::new(scope, promise.unwrap()) + }; + self.resolve_value(promise).await + } + /// Takes a snapshot. The isolate should have been created with will_snapshot /// set to true. /// @@ -1195,7 +1215,7 @@ impl JsRuntime { if has_inspector { // We poll the inspector first. - let _ = self.inspector().borrow_mut().poll_unpin(cx); + let _ = self.inspector().borrow().poll_sessions(Some(cx)).unwrap(); } self.pump_v8_message_loop()?; @@ -1518,19 +1538,14 @@ pub(crate) fn exception_to_err_result( // to use the exception that was passed to it rather than the exception that // was passed to this function. let state = state_rc.borrow(); - exception = state - .dispatched_exceptions - .back() - .map(|exception| v8::Local::new(scope, exception.clone())) - .unwrap_or_else(|| { - // Maybe make a new exception object. - if was_terminating_execution && exception.is_null_or_undefined() { - let message = v8::String::new(scope, "execution terminated").unwrap(); - v8::Exception::error(scope, message) - } else { - exception - } - }); + exception = if let Some(exception) = &state.dispatched_exception { + v8::Local::new(scope, exception.clone()) + } else if was_terminating_execution && exception.is_null_or_undefined() { + let message = v8::String::new(scope, "execution terminated").unwrap(); + v8::Exception::error(scope, message) + } else { + exception + }; } let mut js_error = JsError::from_v8_exception(scope, exception); @@ -1738,7 +1753,7 @@ impl JsRuntime { status = module.get_status(); let has_dispatched_exception = - !state_rc.borrow_mut().dispatched_exceptions.is_empty(); + state_rc.borrow_mut().dispatched_exception.is_some(); if has_dispatched_exception { // This will be overrided in `exception_to_err_result()`. let exception = v8::undefined(tc_scope).into(); @@ -2659,7 +2674,7 @@ pub mod tests { .execute_script_static( "filename.js", r#" - + var promiseIdSymbol = Symbol.for("Deno.core.internalPromiseId"); var p1 = Deno.core.opAsync("op_test", 42); var p2 = Deno.core.opAsync("op_test", 42); @@ -2715,7 +2730,7 @@ pub mod tests { "filename.js", r#" let control = 42; - + Deno.core.opAsync("op_test", control); async function main() { Deno.core.opAsync("op_test", control); @@ -2734,7 +2749,7 @@ pub mod tests { .execute_script_static( "filename.js", r#" - + const p = Deno.core.opAsync("op_test", 42); if (p[Symbol.for("Deno.core.internalPromiseId")] == undefined) { throw new Error("missing id on returned promise"); @@ -2751,7 +2766,7 @@ pub mod tests { .execute_script_static( "filename.js", r#" - + Deno.core.opAsync("op_test"); "#, ) @@ -2766,7 +2781,7 @@ pub mod tests { .execute_script_static( "filename.js", r#" - + let zero_copy_a = new Uint8Array([0]); Deno.core.opAsync2("op_test", null, zero_copy_a); "#, @@ -3928,7 +3943,7 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { .execute_script_static( "macrotasks_and_nextticks.js", r#" - + (async function () { const results = []; Deno.core.setMacrotaskCallback(() => { @@ -4166,7 +4181,7 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { "", format!( r#" - + globalThis.rejectValue = undefined; Deno.core.setPromiseRejectCallback((_type, _promise, reason) => {{ globalThis.rejectValue = `{realm_name}/${{reason}}`; @@ -4604,7 +4619,7 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { runtime.v8_isolate(), "", r#" - + (async function () { const buf = await Deno.core.opAsync("op_test", false); let err; @@ -4657,7 +4672,7 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { runtime.v8_isolate(), "", r#" - + var promise = Deno.core.opAsync("op_pending"); "#, ) @@ -4667,7 +4682,7 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { runtime.v8_isolate(), "", r#" - + var promise = Deno.core.opAsync("op_pending"); "#, ) From 742cc3111ccb7c3c12c1b05904be052094657481 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Thu, 27 Apr 2023 10:05:20 -0400 Subject: [PATCH 066/320] refactor(cli): extract out ProcState from CliMainWorker (#18867) --- cli/args/mod.rs | 93 +++++--- cli/lsp/testing/execution.rs | 11 +- cli/module_loader.rs | 191 ++++++++------- cli/proc_state.rs | 88 +++++-- cli/tools/bench.rs | 48 ++-- cli/tools/repl/mod.rs | 42 ++-- cli/tools/repl/session.rs | 36 +-- cli/tools/run.rs | 38 +-- cli/tools/test.rs | 93 +++++--- cli/worker.rs | 437 ++++++++++++++++++++--------------- ext/node/lib.rs | 4 +- 11 files changed, 660 insertions(+), 421 deletions(-) diff --git a/cli/args/mod.rs b/cli/args/mod.rs index f83b339368..440403f62c 100644 --- a/cli/args/mod.rs +++ b/cli/args/mod.rs @@ -817,30 +817,6 @@ impl CliOptions { ) } - /// Resolves the storage key to use based on the current flags, config, or main module. - pub fn resolve_storage_key( - &self, - main_module: &ModuleSpecifier, - ) -> Option { - if let Some(location) = &self.flags.location { - // if a location is set, then the ascii serialization of the location is - // used, unless the origin is opaque, and then no storage origin is set, as - // we can't expect the origin to be reproducible - let storage_origin = location.origin(); - if storage_origin.is_tuple() { - Some(storage_origin.ascii_serialization()) - } else { - None - } - } else if let Some(config_file) = &self.maybe_config_file { - // otherwise we will use the path to the config file - Some(config_file.specifier.to_string()) - } else { - // otherwise we will use the path to the main module - Some(main_module.to_string()) - } - } - pub fn resolve_inspector_server(&self) -> Option { let maybe_inspect_host = self .flags @@ -1089,20 +1065,6 @@ impl CliOptions { &self.flags.subcommand } - pub fn trace_ops(&self) -> bool { - match self.sub_command() { - DenoSubcommand::Test(flags) => flags.trace_ops, - _ => false, - } - } - - pub fn shuffle_tests(&self) -> Option { - match self.sub_command() { - DenoSubcommand::Test(flags) => flags.shuffle, - _ => None, - } - } - pub fn type_check_mode(&self) -> TypeCheckMode { self.flags.type_check_mode } @@ -1216,6 +1178,44 @@ fn resolve_import_map_specifier( Ok(None) } +pub struct StorageKeyResolver(Option>); + +impl StorageKeyResolver { + pub fn from_options(options: &CliOptions) -> Self { + Self(if let Some(location) = &options.flags.location { + // if a location is set, then the ascii serialization of the location is + // used, unless the origin is opaque, and then no storage origin is set, as + // we can't expect the origin to be reproducible + let storage_origin = location.origin(); + if storage_origin.is_tuple() { + Some(Some(storage_origin.ascii_serialization())) + } else { + Some(None) + } + } else { + // otherwise we will use the path to the config file or None to + // fall back to using the main module's path + options + .maybe_config_file + .as_ref() + .map(|config_file| Some(config_file.specifier.to_string())) + }) + } + + /// Resolves the storage key to use based on the current flags, config, or main module. + pub fn resolve_storage_key( + &self, + main_module: &ModuleSpecifier, + ) -> Option { + // use the stored value or fall back to using the path of the main module. + if let Some(maybe_value) = &self.0 { + maybe_value.clone() + } else { + Some(main_module.to_string()) + } + } +} + /// Collect included and ignored files. CLI flags take precedence /// over config file, i.e. if there's `files.ignore` in config file /// and `--ignore` CLI flag, only the flag value is taken into account. @@ -1381,4 +1381,21 @@ mod test { let actual = actual.unwrap(); assert_eq!(actual, None); } + + #[test] + fn storage_key_resolver_test() { + let resolver = StorageKeyResolver(None); + let specifier = ModuleSpecifier::parse("file:///a.ts").unwrap(); + assert_eq!( + resolver.resolve_storage_key(&specifier), + Some(specifier.to_string()) + ); + let resolver = StorageKeyResolver(Some(None)); + assert_eq!(resolver.resolve_storage_key(&specifier), None); + let resolver = StorageKeyResolver(Some(Some("value".to_string()))); + assert_eq!( + resolver.resolve_storage_key(&specifier), + Some("value".to_string()) + ); + } } diff --git a/cli/lsp/testing/execution.rs b/cli/lsp/testing/execution.rs index 5e5a3788af..b7859ebda4 100644 --- a/cli/lsp/testing/execution.rs +++ b/cli/lsp/testing/execution.rs @@ -257,10 +257,11 @@ impl TestRun { let tests: Arc>> = Arc::new(RwLock::new(IndexMap::new())); let mut test_steps = IndexMap::new(); + let worker_factory = Arc::new(ps.into_cli_main_worker_factory()); let join_handles = queue.into_iter().map(move |specifier| { let specifier = specifier.clone(); - let ps = ps.clone(); + let worker_factory = worker_factory.clone(); let permissions = permissions.clone(); let mut sender = sender.clone(); let fail_fast_tracker = fail_fast_tracker.clone(); @@ -288,12 +289,16 @@ impl TestRun { Ok(()) } else { run_local(test::test_specifier( - &ps, + &worker_factory, permissions, specifier, sender.clone(), fail_fast_tracker, - filter, + &test::TestSpecifierOptions { + filter, + shuffle: None, + trace_ops: false, + }, )) }; if let Err(error) = file_result { diff --git a/cli/module_loader.rs b/cli/module_loader.rs index e4b8b616d7..7de45af28a 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -14,7 +14,6 @@ use crate::node; use crate::node::CliNodeCodeTranslator; use crate::proc_state::CjsResolutionStore; use crate::proc_state::FileWatcherReporter; -use crate::proc_state::ProcState; use crate::resolver::CliGraphResolver; use crate::tools::check; use crate::tools::check::TypeChecker; @@ -224,6 +223,88 @@ pub struct ModuleCodeSource { pub media_type: MediaType, } +struct SharedCliModuleLoaderState { + lib_window: TsTypeLib, + lib_worker: TsTypeLib, + is_inspecting: bool, + is_repl: bool, + emitter: Arc, + graph_container: Arc, + module_load_preparer: Arc, + parsed_source_cache: Arc, + resolver: Arc, + npm_module_loader: NpmModuleLoader, +} + +pub struct CliModuleLoaderFactory { + state: Arc, +} + +impl CliModuleLoaderFactory { + pub fn new( + options: &CliOptions, + emitter: Arc, + graph_container: Arc, + module_load_preparer: Arc, + parsed_source_cache: Arc, + resolver: Arc, + npm_module_loader: NpmModuleLoader, + ) -> Self { + Self { + state: Arc::new(SharedCliModuleLoaderState { + lib_window: options.ts_type_lib_window(), + lib_worker: options.ts_type_lib_worker(), + is_inspecting: options.is_inspecting(), + is_repl: matches!(options.sub_command(), DenoSubcommand::Repl(_)), + emitter, + graph_container, + module_load_preparer, + parsed_source_cache, + resolver, + npm_module_loader, + }), + } + } + + pub fn create_for_main( + &self, + root_permissions: PermissionsContainer, + dynamic_permissions: PermissionsContainer, + ) -> CliModuleLoader { + self.create_with_lib( + self.state.lib_window, + root_permissions, + dynamic_permissions, + ) + } + + pub fn create_for_worker( + &self, + root_permissions: PermissionsContainer, + dynamic_permissions: PermissionsContainer, + ) -> CliModuleLoader { + self.create_with_lib( + self.state.lib_worker, + root_permissions, + dynamic_permissions, + ) + } + + fn create_with_lib( + &self, + lib: TsTypeLib, + root_permissions: PermissionsContainer, + dynamic_permissions: PermissionsContainer, + ) -> CliModuleLoader { + CliModuleLoader { + lib, + root_permissions, + dynamic_permissions, + shared: self.state.clone(), + } + } +} + pub struct CliModuleLoader { lib: TsTypeLib, /// The initial set of permissions used to resolve the static imports in the @@ -233,62 +314,10 @@ pub struct CliModuleLoader { /// Permissions used to resolve dynamic imports, these get passed as /// "root permissions" for Web Worker. dynamic_permissions: PermissionsContainer, - cli_options: Arc, - emitter: Arc, - graph_container: Arc, - module_load_preparer: Arc, - parsed_source_cache: Arc, - resolver: Arc, - npm_module_loader: NpmModuleLoader, + shared: Arc, } impl CliModuleLoader { - pub fn new( - ps: ProcState, - root_permissions: PermissionsContainer, - dynamic_permissions: PermissionsContainer, - ) -> Rc { - Rc::new(CliModuleLoader { - lib: ps.options.ts_type_lib_window(), - root_permissions, - dynamic_permissions, - cli_options: ps.options.clone(), - emitter: ps.emitter.clone(), - graph_container: ps.graph_container.clone(), - module_load_preparer: ps.module_load_preparer.clone(), - parsed_source_cache: ps.parsed_source_cache.clone(), - resolver: ps.resolver.clone(), - npm_module_loader: NpmModuleLoader::new( - ps.cjs_resolutions.clone(), - ps.node_code_translator.clone(), - ps.node_resolver.clone(), - ), - }) - } - - pub fn new_for_worker( - ps: ProcState, - root_permissions: PermissionsContainer, - dynamic_permissions: PermissionsContainer, - ) -> Rc { - Rc::new(CliModuleLoader { - lib: ps.options.ts_type_lib_worker(), - root_permissions, - dynamic_permissions, - cli_options: ps.options.clone(), - emitter: ps.emitter.clone(), - graph_container: ps.graph_container.clone(), - module_load_preparer: ps.module_load_preparer.clone(), - parsed_source_cache: ps.parsed_source_cache.clone(), - resolver: ps.resolver.clone(), - npm_module_loader: NpmModuleLoader::new( - ps.cjs_resolutions.clone(), - ps.node_code_translator.clone(), - ps.node_resolver.clone(), - ), - }) - } - fn load_prepared_module( &self, specifier: &ModuleSpecifier, @@ -298,7 +327,7 @@ impl CliModuleLoader { unreachable!(); // Node built-in modules should be handled internally. } - let graph = self.graph_container.graph(); + let graph = self.shared.graph_container.graph(); match graph.get(specifier) { Some(deno_graph::Module::Json(JsonModule { source, @@ -331,9 +360,11 @@ impl CliModuleLoader { | MediaType::Jsx | MediaType::Tsx => { // get emit text - self - .emitter - .emit_parsed_source(specifier, *media_type, source)? + self.shared.emitter.emit_parsed_source( + specifier, + *media_type, + source, + )? } MediaType::TsBuildInfo | MediaType::Wasm | MediaType::SourceMap => { panic!("Unexpected media type {media_type} for {specifier}") @@ -341,7 +372,7 @@ impl CliModuleLoader { }; // at this point, we no longer need the parsed source in memory, so free it - self.parsed_source_cache.free(specifier); + self.shared.parsed_source_cache.free(specifier); Ok(ModuleCodeSource { code, @@ -370,15 +401,17 @@ impl CliModuleLoader { } else { &self.root_permissions }; - let code_source = if let Some(code_source) = self - .npm_module_loader - .load_sync(specifier, maybe_referrer, permissions)? - { + let code_source = if let Some(code_source) = + self.shared.npm_module_loader.load_sync( + specifier, + maybe_referrer, + permissions, + )? { code_source } else { self.load_prepared_module(specifier, maybe_referrer)? }; - let code = if self.cli_options.is_inspecting() { + let code = if self.shared.is_inspecting { // we need the code with the source map in order for // it to work with --inspect or --inspect-brk code_source.code @@ -418,15 +451,15 @@ impl ModuleLoader for CliModuleLoader { let referrer_result = deno_core::resolve_url_or_path(referrer, &cwd); if let Ok(referrer) = referrer_result.as_ref() { - if let Some(result) = self.npm_module_loader.resolve_if_in_npm_package( - specifier, - referrer, - permissions, - ) { + if let Some(result) = self + .shared + .npm_module_loader + .resolve_if_in_npm_package(specifier, referrer, permissions) + { return result; } - let graph = self.graph_container.graph(); + let graph = self.shared.graph_container.graph(); let maybe_resolved = match graph.get(referrer) { Some(Module::Esm(module)) => { module.dependencies.get(specifier).map(|d| &d.maybe_code) @@ -440,6 +473,7 @@ impl ModuleLoader for CliModuleLoader { return match graph.get(specifier) { Some(Module::Npm(module)) => self + .shared .npm_module_loader .resolve_npm_module(module, permissions), Some(Module::Node(module)) => { @@ -471,9 +505,7 @@ impl ModuleLoader for CliModuleLoader { // FIXME(bartlomieju): this is a hacky way to provide compatibility with REPL // and `Deno.core.evalContext` API. Ideally we should always have a referrer filled // but sadly that's not the case due to missing APIs in V8. - let is_repl = - matches!(self.cli_options.sub_command(), DenoSubcommand::Repl(_)); - let referrer = if referrer.is_empty() && is_repl { + let referrer = if referrer.is_empty() && self.shared.is_repl { deno_core::resolve_path("./$deno$repl.ts", &cwd)? } else { referrer_result? @@ -481,9 +513,9 @@ impl ModuleLoader for CliModuleLoader { // FIXME(bartlomieju): this is another hack way to provide NPM specifier // support in REPL. This should be fixed. - let resolution = self.resolver.resolve(specifier, &referrer); + let resolution = self.shared.resolver.resolve(specifier, &referrer); - if is_repl { + if self.shared.is_repl { let specifier = resolution .as_ref() .ok() @@ -494,6 +526,7 @@ impl ModuleLoader for CliModuleLoader { NpmPackageReqReference::from_specifier(&specifier) { return self + .shared .npm_module_loader .resolve_for_repl(&reference, permissions); } @@ -526,12 +559,14 @@ impl ModuleLoader for CliModuleLoader { _maybe_referrer: Option, is_dynamic: bool, ) -> Pin>>> { - if let Some(result) = self.npm_module_loader.maybe_prepare_load(specifier) { + if let Some(result) = + self.shared.npm_module_loader.maybe_prepare_load(specifier) + { return Box::pin(deno_core::futures::future::ready(result)); } let specifier = specifier.clone(); - let module_load_preparer = self.module_load_preparer.clone(); + let module_load_preparer = self.shared.module_load_preparer.clone(); let root_permissions = if is_dynamic { self.dynamic_permissions.clone() @@ -567,7 +602,7 @@ impl SourceMapGetter for CliModuleLoader { file_name: &str, line_number: usize, ) -> Option { - let graph = self.graph_container.graph(); + let graph = self.shared.graph_container.graph(); let code = match graph.get(&resolve_url(file_name).ok()?) { Some(deno_graph::Module::Esm(module)) => &module.source, Some(deno_graph::Module::Json(module)) => &module.source, diff --git a/cli/proc_state.rs b/cli/proc_state.rs index 950e198242..bb8fd9c3e7 100644 --- a/cli/proc_state.rs +++ b/cli/proc_state.rs @@ -4,6 +4,7 @@ use crate::args::CliOptions; use crate::args::DenoSubcommand; use crate::args::Flags; use crate::args::Lockfile; +use crate::args::StorageKeyResolver; use crate::args::TsConfigType; use crate::cache::Caches; use crate::cache::DenoDir; @@ -16,7 +17,9 @@ use crate::file_fetcher::FileFetcher; use crate::graph_util::ModuleGraphBuilder; use crate::graph_util::ModuleGraphContainer; use crate::http_util::HttpClient; +use crate::module_loader::CliModuleLoaderFactory; use crate::module_loader::ModuleLoadPreparer; +use crate::module_loader::NpmModuleLoader; use crate::node::CliCjsEsmCodeAnalyzer; use crate::node::CliNodeCodeTranslator; use crate::npm::create_npm_fs_resolver; @@ -29,20 +32,20 @@ use crate::resolver::CliGraphResolver; use crate::tools::check::TypeChecker; use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; +use crate::worker::CliMainWorkerFactory; +use crate::worker::CliMainWorkerOptions; use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; -use deno_core::CompiledWasmModuleStore; use deno_core::ModuleSpecifier; -use deno_core::SharedArrayBufferStore; -use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel; use deno_runtime::deno_node; use deno_runtime::deno_node::analyze::NodeCodeTranslator; use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_tls::rustls::RootCertStore; use deno_runtime::deno_web::BlobStore; use deno_runtime::inspector_server::InspectorServer; +use deno_semver::npm::NpmPackageReqReference; use import_map::ImportMap; use log::warn; use std::collections::HashSet; @@ -70,9 +73,6 @@ pub struct Inner { pub maybe_inspector_server: Option>, pub root_cert_store: RootCertStore, pub blob_store: BlobStore, - pub broadcast_channel: InMemoryBroadcastChannel, - pub shared_array_buffer_store: SharedArrayBufferStore, - pub compiled_wasm_module_store: CompiledWasmModuleStore, pub parsed_source_cache: Arc, pub resolver: Arc, maybe_file_watcher_reporter: Option, @@ -142,9 +142,6 @@ impl ProcState { maybe_inspector_server: self.maybe_inspector_server.clone(), root_cert_store: self.root_cert_store.clone(), blob_store: self.blob_store.clone(), - broadcast_channel: Default::default(), - shared_array_buffer_store: Default::default(), - compiled_wasm_module_store: Default::default(), parsed_source_cache: self.parsed_source_cache.clone(), resolver: self.resolver.clone(), maybe_file_watcher_reporter: self.maybe_file_watcher_reporter.clone(), @@ -203,9 +200,6 @@ impl ProcState { _ => {} } let blob_store = BlobStore::default(); - let broadcast_channel = InMemoryBroadcastChannel::default(); - let shared_array_buffer_store = SharedArrayBufferStore::default(); - let compiled_wasm_module_store = CompiledWasmModuleStore::default(); let deps_cache_location = dir.deps_folder_path(); let http_cache = HttpCache::new(&deps_cache_location); let root_cert_store = cli_options.resolve_root_cert_store()?; @@ -358,9 +352,6 @@ impl ProcState { maybe_inspector_server, root_cert_store, blob_store, - broadcast_channel, - shared_array_buffer_store, - compiled_wasm_module_store, parsed_source_cache, resolver, maybe_file_watcher_reporter, @@ -378,6 +369,73 @@ impl ProcState { progress_bar, }))) } + + // todo(dsherret): this is a transitory method as we separate out + // ProcState from more code + pub fn into_cli_main_worker_factory(self) -> CliMainWorkerFactory { + CliMainWorkerFactory::new( + StorageKeyResolver::from_options(&self.options), + self.npm_resolver.clone(), + self.node_resolver.clone(), + self.graph_container.clone(), + self.blob_store.clone(), + CliModuleLoaderFactory::new( + &self.options, + self.emitter.clone(), + self.graph_container.clone(), + self.module_load_preparer.clone(), + self.parsed_source_cache.clone(), + self.resolver.clone(), + NpmModuleLoader::new( + self.cjs_resolutions.clone(), + self.node_code_translator.clone(), + self.node_resolver.clone(), + ), + ), + self.root_cert_store.clone(), + self.node_fs.clone(), + self.maybe_inspector_server.clone(), + CliMainWorkerOptions { + argv: self.options.argv().clone(), + debug: self + .options + .log_level() + .map(|l| l == log::Level::Debug) + .unwrap_or(false), + coverage_dir: self.options.coverage_dir(), + enable_testing_features: self.options.enable_testing_features(), + has_node_modules_dir: self.options.has_node_modules_dir(), + inspect_brk: self.options.inspect_brk().is_some(), + inspect_wait: self.options.inspect_wait().is_some(), + is_inspecting: self.options.is_inspecting(), + is_npm_main: self.options.is_npm_main(), + location: self.options.location_flag().clone(), + maybe_binary_npm_command_name: { + let mut maybe_binary_command_name = None; + if let DenoSubcommand::Run(flags) = self.options.sub_command() { + if let Ok(pkg_ref) = NpmPackageReqReference::from_str(&flags.script) + { + // if the user ran a binary command, we'll need to set process.argv[0] + // to be the name of the binary command instead of deno + let binary_name = pkg_ref + .sub_path + .as_deref() + .unwrap_or(pkg_ref.req.name.as_str()); + maybe_binary_command_name = Some(binary_name.to_string()); + } + } + maybe_binary_command_name + }, + origin_data_folder_path: self.dir.origin_data_folder_path(), + seed: self.options.seed(), + unsafely_ignore_certificate_errors: self + .options + .unsafely_ignore_certificate_errors() + .clone(), + unstable: self.options.unstable(), + }, + ) + } } /// Keeps track of what module specifiers were resolved as CJS. diff --git a/cli/tools/bench.rs b/cli/tools/bench.rs index 5f467bc6e2..88e19dd701 100644 --- a/cli/tools/bench.rs +++ b/cli/tools/bench.rs @@ -15,7 +15,7 @@ use crate::util::file_watcher::ResolutionResult; use crate::util::fs::collect_specifiers; use crate::util::path::is_supported_ext; use crate::version::get_user_agent; -use crate::worker::create_custom_worker; +use crate::worker::CliMainWorkerFactory; use deno_core::error::generic_error; use deno_core::error::AnyError; @@ -48,6 +48,7 @@ use tokio::sync::mpsc::UnboundedSender; struct BenchSpecifierOptions { filter: TestFilter, json: bool, + log_level: Option, } #[derive(Debug, Clone, Eq, PartialEq, Deserialize)] @@ -434,20 +435,20 @@ async fn check_specifiers( /// Run a single specifier as an executable bench module. async fn bench_specifier( - ps: ProcState, + worker_factory: &CliMainWorkerFactory, permissions: Permissions, specifier: ModuleSpecifier, sender: UnboundedSender, filter: TestFilter, ) -> Result<(), AnyError> { - let mut worker = create_custom_worker( - &ps, - specifier.clone(), - PermissionsContainer::new(permissions), - vec![ops::bench::deno_bench::init_ops(sender.clone())], - Default::default(), - ) - .await?; + let mut worker = worker_factory + .create_custom_worker( + specifier.clone(), + PermissionsContainer::new(permissions), + vec![ops::bench::deno_bench::init_ops(sender.clone())], + Default::default(), + ) + .await?; // We execute the main module as a side module so that import.meta.main is not set. worker.execute_side_module_possibly_with_npm().await?; @@ -508,26 +509,29 @@ async fn bench_specifier( /// Test a collection of specifiers with test modes concurrently. async fn bench_specifiers( - ps: &ProcState, + worker_factory: Arc, permissions: &Permissions, specifiers: Vec, options: BenchSpecifierOptions, ) -> Result<(), AnyError> { - let log_level = ps.options.log_level(); - let (sender, mut receiver) = unbounded_channel::(); - + let log_level = options.log_level; let option_for_handles = options.clone(); let join_handles = specifiers.into_iter().map(move |specifier| { - let ps = ps.clone(); + let worker_factory = worker_factory.clone(); let permissions = permissions.clone(); let specifier = specifier; let sender = sender.clone(); let options = option_for_handles.clone(); tokio::task::spawn_blocking(move || { - let future = - bench_specifier(ps, permissions, specifier, sender, options.filter); + let future = bench_specifier( + &worker_factory, + permissions, + specifier, + sender, + options.filter, + ); run_local(future) }) }); @@ -650,13 +654,16 @@ pub async fn run_benchmarks( return Ok(()); } + let log_level = ps.options.log_level(); + let worker_factory = Arc::new(ps.into_cli_main_worker_factory()); bench_specifiers( - &ps, + worker_factory, &permissions, specifiers, BenchSpecifierOptions { filter: TestFilter::from_flag(&bench_options.filter), json: bench_options.json, + log_level, }, ) .await?; @@ -809,13 +816,16 @@ pub async fn run_benchmarks_with_watch( return Ok(()); } + let log_level = ps.options.log_level(); + let worker_factory = Arc::new(ps.into_cli_main_worker_factory()); bench_specifiers( - &ps, + worker_factory, permissions, specifiers, BenchSpecifierOptions { filter: TestFilter::from_flag(&bench_options.filter), json: bench_options.json, + log_level, }, ) .await?; diff --git a/cli/tools/repl/mod.rs b/cli/tools/repl/mod.rs index a6cc716373..bfba627525 100644 --- a/cli/tools/repl/mod.rs +++ b/cli/tools/repl/mod.rs @@ -1,10 +1,11 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use crate::args::CliOptions; use crate::args::Flags; use crate::args::ReplFlags; use crate::colors; +use crate::file_fetcher::FileFetcher; use crate::proc_state::ProcState; -use crate::worker::create_main_worker; use deno_core::error::AnyError; use deno_runtime::permissions::Permissions; use deno_runtime::permissions::PermissionsContainer; @@ -65,14 +66,14 @@ async fn read_line_and_poll( } async fn read_eval_file( - ps: &ProcState, + cli_options: &CliOptions, + file_fetcher: &FileFetcher, eval_file: &str, ) -> Result { let specifier = - deno_core::resolve_url_or_path(eval_file, ps.options.initial_cwd())?; + deno_core::resolve_url_or_path(eval_file, cli_options.initial_cwd())?; - let file = ps - .file_fetcher + let file = file_fetcher .fetch(&specifier, PermissionsContainer::allow_all()) .await?; @@ -82,17 +83,24 @@ async fn read_eval_file( pub async fn run(flags: Flags, repl_flags: ReplFlags) -> Result { let ps = ProcState::from_flags(flags).await?; let main_module = ps.options.resolve_main_module()?; - let mut worker = create_main_worker( - &ps, - main_module, - PermissionsContainer::new(Permissions::from_options( - &ps.options.permissions_options(), - )?), - ) - .await?; + let permissions = PermissionsContainer::new(Permissions::from_options( + &ps.options.permissions_options(), + )?); + let cli_options = ps.options.clone(); + let npm_resolver = ps.npm_resolver.clone(); + let resolver = ps.resolver.clone(); + let dir = ps.dir.clone(); + let file_fetcher = ps.file_fetcher.clone(); + let worker_factory = ps.into_cli_main_worker_factory(); + + let mut worker = worker_factory + .create_main_worker(main_module, permissions) + .await?; worker.setup_repl().await?; let worker = worker.into_main_worker(); - let mut repl_session = ReplSession::initialize(ps.clone(), worker).await?; + let mut repl_session = + ReplSession::initialize(&cli_options, npm_resolver, resolver, worker) + .await?; let mut rustyline_channel = rustyline_channel(); let helper = EditorHelper { @@ -100,12 +108,12 @@ pub async fn run(flags: Flags, repl_flags: ReplFlags) -> Result { sync_sender: rustyline_channel.0, }; - let history_file_path = ps.dir.repl_history_file_path(); + let history_file_path = dir.repl_history_file_path(); let editor = ReplEditor::new(helper, history_file_path)?; if let Some(eval_files) = repl_flags.eval_files { for eval_file in eval_files { - match read_eval_file(&ps, &eval_file).await { + match read_eval_file(&cli_options, &file_fetcher, &eval_file).await { Ok(eval_source) => { let output = repl_session .evaluate_line_and_get_output(&eval_source) @@ -132,7 +140,7 @@ pub async fn run(flags: Flags, repl_flags: ReplFlags) -> Result { // Doing this manually, instead of using `log::info!` because these messages // are supposed to go to stdout, not stderr. - if !ps.options.is_quiet() { + if !cli_options.is_quiet() { println!("Deno {}", crate::version::deno()); println!("exit using ctrl+d, ctrl+c, or close()"); if repl_flags.is_default_command { diff --git a/cli/tools/repl/session.rs b/cli/tools/repl/session.rs index 7fc251362e..b2645097c4 100644 --- a/cli/tools/repl/session.rs +++ b/cli/tools/repl/session.rs @@ -1,8 +1,12 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use std::sync::Arc; + +use crate::args::CliOptions; use crate::colors; use crate::lsp::ReplLanguageServer; -use crate::ProcState; +use crate::npm::CliNpmResolver; +use crate::resolver::CliGraphResolver; use deno_ast::swc::ast as swc_ast; use deno_ast::swc::visit::noop_visit_type; @@ -117,7 +121,9 @@ struct TsEvaluateResponse { } pub struct ReplSession { - proc_state: ProcState, + has_node_modules_dir: bool, + npm_resolver: Arc, + resolver: Arc, pub worker: MainWorker, session: LocalInspectorSession, pub context_id: u64, @@ -132,7 +138,9 @@ pub struct ReplSession { impl ReplSession { pub async fn initialize( - proc_state: ProcState, + cli_options: &CliOptions, + npm_resolver: Arc, + resolver: Arc, mut worker: MainWorker, ) -> Result { let language_server = ReplLanguageServer::new_initialized().await?; @@ -171,14 +179,14 @@ impl ReplSession { } assert_ne!(context_id, 0); - let referrer = deno_core::resolve_path( - "./$deno$repl.ts", - proc_state.options.initial_cwd(), - ) - .unwrap(); + let referrer = + deno_core::resolve_path("./$deno$repl.ts", cli_options.initial_cwd()) + .unwrap(); let mut repl_session = ReplSession { - proc_state, + has_node_modules_dir: cli_options.has_node_modules_dir(), + npm_resolver, + resolver, worker, session, context_id, @@ -487,7 +495,6 @@ impl ReplSession { .iter() .flat_map(|i| { self - .proc_state .resolver .resolve(i, &self.referrer) .ok() @@ -506,22 +513,17 @@ impl ReplSession { if !self.has_initialized_node_runtime { deno_node::initialize_runtime( &mut self.worker.js_runtime, - self.proc_state.options.has_node_modules_dir(), + self.has_node_modules_dir, None, )?; self.has_initialized_node_runtime = true; } - self - .proc_state - .npm_resolver - .add_package_reqs(npm_imports) - .await?; + self.npm_resolver.add_package_reqs(npm_imports).await?; // prevent messages in the repl about @types/node not being cached if has_node_specifier { self - .proc_state .npm_resolver .inject_synthetic_types_node_package() .await?; diff --git a/cli/tools/run.rs b/cli/tools/run.rs index 7f4b5c8f74..6515ebde69 100644 --- a/cli/tools/run.rs +++ b/cli/tools/run.rs @@ -13,7 +13,6 @@ use crate::args::Flags; use crate::file_fetcher::File; use crate::proc_state::ProcState; use crate::util; -use crate::worker::create_main_worker; pub async fn run_script(flags: Flags) -> Result { if !flags.has_permission() && flags.has_permission_in_argv() { @@ -48,7 +47,10 @@ To grant permissions, set them before the script argument. For example: let permissions = PermissionsContainer::new(Permissions::from_options( &ps.options.permissions_options(), )?); - let mut worker = create_main_worker(&ps, main_module, permissions).await?; + let worker_factory = ps.into_cli_main_worker_factory(); + let mut worker = worker_factory + .create_main_worker(main_module, permissions) + .await?; let exit_code = worker.run().await?; Ok(exit_code) @@ -58,15 +60,9 @@ pub async fn run_from_stdin(flags: Flags) -> Result { let ps = ProcState::from_flags(flags).await?; let main_module = ps.options.resolve_main_module()?; - let mut worker = create_main_worker( - &ps, - main_module.clone(), - PermissionsContainer::new(Permissions::from_options( - &ps.options.permissions_options(), - )?), - ) - .await?; - + let permissions = PermissionsContainer::new(Permissions::from_options( + &ps.options.permissions_options(), + )?); let mut source = Vec::new(); std::io::stdin().read_to_end(&mut source)?; // Create a dummy source file. @@ -75,13 +71,17 @@ pub async fn run_from_stdin(flags: Flags) -> Result { maybe_types: None, media_type: MediaType::TypeScript, source: String::from_utf8(source)?.into(), - specifier: main_module, + specifier: main_module.clone(), maybe_headers: None, }; // Save our fake file into file fetcher cache // to allow module access by TS compiler ps.file_fetcher.insert_cached(source_file); + let worker_factory = ps.into_cli_main_worker_factory(); + let mut worker = worker_factory + .create_main_worker(main_module, permissions) + .await?; let exit_code = worker.run().await?; Ok(exit_code) } @@ -102,7 +102,10 @@ async fn run_with_watch(flags: Flags) -> Result { let permissions = PermissionsContainer::new(Permissions::from_options( &ps.options.permissions_options(), )?); - let worker = create_main_worker(&ps, main_module, permissions).await?; + let worker_factory = ps.into_cli_main_worker_factory(); + let worker = worker_factory + .create_main_worker(main_module, permissions) + .await?; worker.run_for_watcher().await?; Ok(()) @@ -132,8 +135,6 @@ pub async fn eval_command( let permissions = PermissionsContainer::new(Permissions::from_options( &ps.options.permissions_options(), )?); - let mut worker = - create_main_worker(&ps, main_module.clone(), permissions).await?; // Create a dummy source file. let source_code = if eval_flags.print { format!("console.log({})", eval_flags.code) @@ -147,13 +148,18 @@ pub async fn eval_command( maybe_types: None, media_type: MediaType::Unknown, source: String::from_utf8(source_code)?.into(), - specifier: main_module, + specifier: main_module.clone(), maybe_headers: None, }; // Save our fake file into file fetcher cache // to allow module access by TS compiler. ps.file_fetcher.insert_cached(file); + + let mut worker = ps + .into_cli_main_worker_factory() + .create_main_worker(main_module, permissions) + .await?; let exit_code = worker.run().await?; Ok(exit_code) } diff --git a/cli/tools/test.rs b/cli/tools/test.rs index 62a104733d..3bc0e79aab 100644 --- a/cli/tools/test.rs +++ b/cli/tools/test.rs @@ -17,7 +17,7 @@ use crate::util::fs::collect_specifiers; use crate::util::path::get_extension; use crate::util::path::is_supported_ext; use crate::util::path::mapped_specifier_for_tsc; -use crate::worker::create_custom_worker; +use crate::worker::CliMainWorkerFactory; use deno_ast::swc::common::comments::CommentKind; use deno_ast::MediaType; @@ -336,10 +336,18 @@ pub struct TestSummary { } #[derive(Debug, Clone)] -struct TestSpecifierOptions { +struct TestSpecifiersOptions { concurrent_jobs: NonZeroUsize, fail_fast: Option, - filter: TestFilter, + log_level: Option, + specifier: TestSpecifierOptions, +} + +#[derive(Debug, Clone)] +pub struct TestSpecifierOptions { + pub shuffle: Option, + pub filter: TestFilter, + pub trace_ops: bool, } impl TestSummary { @@ -907,30 +915,30 @@ pub fn format_test_error(js_error: &JsError) -> String { /// Test a single specifier as documentation containing test programs, an executable test module or /// both. pub async fn test_specifier( - ps: &ProcState, + worker_factory: &CliMainWorkerFactory, permissions: Permissions, specifier: ModuleSpecifier, mut sender: TestEventSender, fail_fast_tracker: FailFastTracker, - filter: TestFilter, + options: &TestSpecifierOptions, ) -> Result<(), AnyError> { if fail_fast_tracker.should_stop() { return Ok(()); } let stdout = StdioPipe::File(sender.stdout()); let stderr = StdioPipe::File(sender.stderr()); - let mut worker = create_custom_worker( - ps, - specifier.clone(), - PermissionsContainer::new(permissions), - vec![ops::testing::deno_test::init_ops(sender.clone())], - Stdio { - stdin: StdioPipe::Inherit, - stdout, - stderr, - }, - ) - .await?; + let mut worker = worker_factory + .create_custom_worker( + specifier.clone(), + PermissionsContainer::new(permissions), + vec![ops::testing::deno_test::init_ops(sender.clone())], + Stdio { + stdin: StdioPipe::Inherit, + stdout, + stderr, + }, + ) + .await?; let mut coverage_collector = worker.maybe_setup_coverage_collector().await?; @@ -951,7 +959,7 @@ pub async fn test_specifier( } let mut worker = worker.into_main_worker(); - if ps.options.trace_ops() { + if options.trace_ops { worker.js_runtime.execute_script_static( located_script_name!(), "Deno[Deno.internal].enableOpCallTracing();", @@ -971,9 +979,9 @@ pub async fn test_specifier( let tests = if used_only { only } else { no_only }; let mut tests = tests .into_iter() - .filter(|(d, _)| filter.includes(&d.name)) + .filter(|(d, _)| options.filter.includes(&d.name)) .collect::>(); - if let Some(seed) = ps.options.shuffle_tests() { + if let Some(seed) = options.shuffle { tests.shuffle(&mut SmallRng::seed_from_u64(seed)); } sender.send(TestEvent::Plan(TestPlan { @@ -1288,13 +1296,12 @@ static HAS_TEST_RUN_SIGINT_HANDLER: AtomicBool = AtomicBool::new(false); /// Test a collection of specifiers with test modes concurrently. async fn test_specifiers( - ps: &ProcState, + worker_factory: Arc, permissions: &Permissions, specifiers: Vec, - options: TestSpecifierOptions, + options: TestSpecifiersOptions, ) -> Result<(), AnyError> { - let log_level = ps.options.log_level(); - let specifiers = if let Some(seed) = ps.options.shuffle_tests() { + let specifiers = if let Some(seed) = options.specifier.shuffle { let mut rng = SmallRng::seed_from_u64(seed); let mut specifiers = specifiers; specifiers.sort(); @@ -1316,19 +1323,19 @@ async fn test_specifiers( HAS_TEST_RUN_SIGINT_HANDLER.store(true, Ordering::Relaxed); let join_handles = specifiers.into_iter().map(move |specifier| { - let ps = ps.clone(); + let worker_factory = worker_factory.clone(); let permissions = permissions.clone(); let sender = sender.clone(); - let options = options.clone(); let fail_fast_tracker = FailFastTracker::new(options.fail_fast); + let specifier_options = options.specifier.clone(); tokio::task::spawn_blocking(move || { run_local(test_specifier( - &ps, + &worker_factory, permissions, specifier, sender.clone(), fail_fast_tracker, - options.filter, + &specifier_options, )) }) }); @@ -1339,7 +1346,7 @@ async fn test_specifiers( let mut reporter = Box::new(PrettyTestReporter::new( concurrent_jobs.get() > 1, - log_level != Some(Level::Error), + options.log_level != Some(Level::Error), )); let handler = { @@ -1626,6 +1633,7 @@ pub async fn run_tests( // file would have impact on other files, which is undesirable. let permissions = Permissions::from_options(&ps.options.permissions_options())?; + let log_level = ps.options.log_level(); let specifiers_with_mode = fetch_specifiers_with_test_mode( &ps, @@ -1644,8 +1652,10 @@ pub async fn run_tests( return Ok(()); } + let worker_factory = Arc::new(ps.into_cli_main_worker_factory()); + test_specifiers( - &ps, + worker_factory, &permissions, specifiers_with_mode .into_iter() @@ -1654,10 +1664,15 @@ pub async fn run_tests( _ => Some(s), }) .collect(), - TestSpecifierOptions { + TestSpecifiersOptions { concurrent_jobs: test_options.concurrent_jobs, fail_fast: test_options.fail_fast, - filter: TestFilter::from_flag(&test_options.filter), + log_level, + specifier: TestSpecifierOptions { + filter: TestFilter::from_flag(&test_options.filter), + shuffle: test_options.shuffle, + trace_ops: test_options.trace_ops, + }, }, ) .await?; @@ -1676,6 +1691,7 @@ pub async fn run_tests_with_watch( let permissions = Permissions::from_options(&ps.options.permissions_options())?; let no_check = ps.options.type_check_mode() == TypeCheckMode::None; + let log_level = ps.options.log_level(); let ps = RefCell::new(ps); @@ -1816,8 +1832,10 @@ pub async fn run_tests_with_watch( return Ok(()); } + let worker_factory = Arc::new(ps.into_cli_main_worker_factory()); + test_specifiers( - &ps, + worker_factory, permissions, specifiers_with_mode .into_iter() @@ -1826,10 +1844,15 @@ pub async fn run_tests_with_watch( _ => Some(s), }) .collect(), - TestSpecifierOptions { + TestSpecifiersOptions { concurrent_jobs: test_options.concurrent_jobs, fail_fast: test_options.fail_fast, - filter: TestFilter::from_flag(&test_options.filter), + log_level, + specifier: TestSpecifierOptions { + filter: TestFilter::from_flag(&test_options.filter), + shuffle: test_options.shuffle, + trace_ops: test_options.trace_ops, + }, }, ) .await?; diff --git a/cli/worker.rs b/cli/worker.rs index e565789ede..3dad2fbe14 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -1,6 +1,7 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. use std::path::PathBuf; +use std::rc::Rc; use std::sync::Arc; use deno_ast::ModuleSpecifier; @@ -8,12 +9,20 @@ use deno_core::error::AnyError; use deno_core::futures::task::LocalFutureObj; use deno_core::futures::FutureExt; use deno_core::located_script_name; +use deno_core::url::Url; +use deno_core::CompiledWasmModuleStore; use deno_core::Extension; use deno_core::ModuleId; +use deno_core::SharedArrayBufferStore; use deno_runtime::colors; +use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel; use deno_runtime::deno_node; use deno_runtime::deno_node::NodeResolution; +use deno_runtime::deno_node::NodeResolver; +use deno_runtime::deno_tls::rustls::RootCertStore; +use deno_runtime::deno_web::BlobStore; use deno_runtime::fmt_errors::format_js_error; +use deno_runtime::inspector_server::InspectorServer; use deno_runtime::ops::worker_host::CreateWebWorkerCb; use deno_runtime::ops::worker_host::WorkerEventCb; use deno_runtime::permissions::PermissionsContainer; @@ -24,21 +33,56 @@ use deno_runtime::worker::WorkerOptions; use deno_runtime::BootstrapOptions; use deno_semver::npm::NpmPackageReqReference; -use crate::args::DenoSubcommand; +use crate::args::StorageKeyResolver; use crate::errors; -use crate::module_loader::CliModuleLoader; +use crate::graph_util::ModuleGraphContainer; +use crate::module_loader::CliModuleLoaderFactory; +use crate::npm::CliNpmResolver; use crate::ops; -use crate::proc_state::ProcState; use crate::tools; use crate::tools::coverage::CoverageCollector; use crate::util::checksum; use crate::version; +pub struct CliMainWorkerOptions { + pub argv: Vec, + pub debug: bool, + pub coverage_dir: Option, + pub enable_testing_features: bool, + pub has_node_modules_dir: bool, + pub inspect_brk: bool, + pub inspect_wait: bool, + pub is_inspecting: bool, + pub is_npm_main: bool, + pub location: Option, + pub maybe_binary_npm_command_name: Option, + pub origin_data_folder_path: PathBuf, + pub seed: Option, + pub unsafely_ignore_certificate_errors: Option>, + pub unstable: bool, +} + +struct SharedWorkerState { + pub options: CliMainWorkerOptions, + pub storage_key_resolver: StorageKeyResolver, + pub npm_resolver: Arc, + pub node_resolver: Arc, + pub graph_container: Arc, + pub blob_store: BlobStore, + pub broadcast_channel: InMemoryBroadcastChannel, + pub shared_array_buffer_store: SharedArrayBufferStore, + pub compiled_wasm_module_store: CompiledWasmModuleStore, + pub module_loader_factory: CliModuleLoaderFactory, + pub root_cert_store: RootCertStore, + pub node_fs: Arc, + pub maybe_inspector_server: Option>, +} + pub struct CliMainWorker { main_module: ModuleSpecifier, is_main_cjs: bool, worker: MainWorker, - ps: ProcState, + shared: Arc, } impl CliMainWorker { @@ -62,7 +106,7 @@ impl CliMainWorker { &mut self.worker.js_runtime, &self.main_module.to_file_path().unwrap().to_string_lossy(), true, - self.ps.options.inspect_brk().is_some(), + self.shared.options.inspect_brk, )?; } else { self.execute_main_module_possibly_with_npm().await?; @@ -183,8 +227,8 @@ impl CliMainWorker { &mut self, id: ModuleId, ) -> Result<(), AnyError> { - if self.ps.npm_resolver.has_packages() - || self.ps.graph_container.graph().has_node_specifier + if self.shared.npm_resolver.has_packages() + || self.shared.graph_container.graph().has_node_specifier { self.initialize_main_module_for_node()?; } @@ -192,24 +236,10 @@ impl CliMainWorker { } fn initialize_main_module_for_node(&mut self) -> Result<(), AnyError> { - let mut maybe_binary_command_name = None; - - if let DenoSubcommand::Run(flags) = self.ps.options.sub_command() { - if let Ok(pkg_ref) = NpmPackageReqReference::from_str(&flags.script) { - // if the user ran a binary command, we'll need to set process.argv[0] - // to be the name of the binary command instead of deno - let binary_name = pkg_ref - .sub_path - .as_deref() - .unwrap_or(pkg_ref.req.name.as_str()); - maybe_binary_command_name = Some(binary_name.to_string()); - } - } - deno_node::initialize_runtime( &mut self.worker.js_runtime, - self.ps.options.has_node_modules_dir(), - maybe_binary_command_name, + self.shared.options.has_node_modules_dir, + self.shared.options.maybe_binary_npm_command_name.as_deref(), )?; Ok(()) @@ -218,7 +248,7 @@ impl CliMainWorker { pub async fn maybe_setup_coverage_collector( &mut self, ) -> Result, AnyError> { - if let Some(ref coverage_dir) = self.ps.options.coverage_dir() { + if let Some(coverage_dir) = &self.shared.options.coverage_dir { let session = self.worker.create_inspector_session().await; let coverage_dir = PathBuf::from(coverage_dir); @@ -235,142 +265,188 @@ impl CliMainWorker { } } -pub async fn create_main_worker( - ps: &ProcState, - main_module: ModuleSpecifier, - permissions: PermissionsContainer, -) -> Result { - create_custom_worker(ps, main_module, permissions, vec![], Default::default()) - .await +pub struct CliMainWorkerFactory { + shared: Arc, } -pub async fn create_custom_worker( - ps: &ProcState, - main_module: ModuleSpecifier, - permissions: PermissionsContainer, - mut custom_extensions: Vec, - stdio: deno_runtime::deno_io::Stdio, -) -> Result { - let (main_module, is_main_cjs) = if let Ok(package_ref) = - NpmPackageReqReference::from_specifier(&main_module) - { - ps.npm_resolver - .add_package_reqs(vec![package_ref.req.clone()]) - .await?; - let node_resolution = - ps.node_resolver.resolve_binary_export(&package_ref)?; - let is_main_cjs = matches!(node_resolution, NodeResolution::CommonJs(_)); - (node_resolution.into_url(), is_main_cjs) - } else if ps.options.is_npm_main() { - let node_resolution = - ps.node_resolver.url_to_node_resolution(main_module)?; - let is_main_cjs = matches!(node_resolution, NodeResolution::CommonJs(_)); - (node_resolution.into_url(), is_main_cjs) - } else { - (main_module, false) - }; +impl CliMainWorkerFactory { + #[allow(clippy::too_many_arguments)] + pub fn new( + storage_key_resolver: StorageKeyResolver, + npm_resolver: Arc, + node_resolver: Arc, + graph_container: Arc, + blob_store: BlobStore, + module_loader_factory: CliModuleLoaderFactory, + root_cert_store: RootCertStore, + node_fs: Arc, + maybe_inspector_server: Option>, + options: CliMainWorkerOptions, + ) -> Self { + Self { + shared: Arc::new(SharedWorkerState { + options, + storage_key_resolver, + npm_resolver, + node_resolver, + graph_container, + blob_store, + broadcast_channel: Default::default(), + shared_array_buffer_store: Default::default(), + compiled_wasm_module_store: Default::default(), + module_loader_factory, + root_cert_store, + node_fs, + maybe_inspector_server, + }), + } + } - let module_loader = CliModuleLoader::new( - ps.clone(), - PermissionsContainer::allow_all(), - permissions.clone(), - ); + pub async fn create_main_worker( + &self, + main_module: ModuleSpecifier, + permissions: PermissionsContainer, + ) -> Result { + self + .create_custom_worker( + main_module, + permissions, + vec![], + Default::default(), + ) + .await + } - let maybe_inspector_server = ps.maybe_inspector_server.clone(); + pub async fn create_custom_worker( + &self, + main_module: ModuleSpecifier, + permissions: PermissionsContainer, + mut custom_extensions: Vec, + stdio: deno_runtime::deno_io::Stdio, + ) -> Result { + let shared = &self.shared; + let (main_module, is_main_cjs) = if let Ok(package_ref) = + NpmPackageReqReference::from_specifier(&main_module) + { + shared + .npm_resolver + .add_package_reqs(vec![package_ref.req.clone()]) + .await?; + let node_resolution = + shared.node_resolver.resolve_binary_export(&package_ref)?; + let is_main_cjs = matches!(node_resolution, NodeResolution::CommonJs(_)); + (node_resolution.into_url(), is_main_cjs) + } else if shared.options.is_npm_main { + let node_resolution = + shared.node_resolver.url_to_node_resolution(main_module)?; + let is_main_cjs = matches!(node_resolution, NodeResolution::CommonJs(_)); + (node_resolution.into_url(), is_main_cjs) + } else { + (main_module, false) + }; - let create_web_worker_cb = - create_web_worker_callback(ps.clone(), stdio.clone()); - let web_worker_preload_module_cb = - create_web_worker_preload_module_callback(ps.clone()); - let web_worker_pre_execute_module_cb = - create_web_worker_pre_execute_module_callback(ps.clone()); + let module_loader = + Rc::new(shared.module_loader_factory.create_for_main( + PermissionsContainer::allow_all(), + permissions.clone(), + )); + let maybe_inspector_server = shared.maybe_inspector_server.clone(); - let maybe_storage_key = ps.options.resolve_storage_key(&main_module); - let origin_storage_dir = maybe_storage_key.as_ref().map(|key| { - ps.dir - .origin_data_folder_path() - .join(checksum::gen(&[key.as_bytes()])) - }); - let cache_storage_dir = maybe_storage_key.map(|key| { - // TODO(@satyarohith): storage quota management - // Note: we currently use temp_dir() to avoid managing storage size. - std::env::temp_dir() - .join("deno_cache") - .join(checksum::gen(&[key.as_bytes()])) - }); + let create_web_worker_cb = + create_web_worker_callback(shared.clone(), stdio.clone()); + let web_worker_preload_module_cb = + create_web_worker_preload_module_callback(shared); + let web_worker_pre_execute_module_cb = + create_web_worker_pre_execute_module_callback(shared.clone()); - let mut extensions = ops::cli_exts(ps.npm_resolver.clone()); - extensions.append(&mut custom_extensions); - - let options = WorkerOptions { - bootstrap: BootstrapOptions { - args: ps.options.argv().clone(), - cpu_count: std::thread::available_parallelism() - .map(|p| p.get()) - .unwrap_or(1), - debug_flag: ps + let maybe_storage_key = shared + .storage_key_resolver + .resolve_storage_key(&main_module); + let origin_storage_dir = maybe_storage_key.as_ref().map(|key| { + shared .options - .log_level() - .map(|l| l == log::Level::Debug) - .unwrap_or(false), - enable_testing_features: ps.options.enable_testing_features(), - locale: deno_core::v8::icu::get_language_tag(), - location: ps.options.location_flag().clone(), - no_color: !colors::use_color(), - is_tty: colors::is_tty(), - runtime_version: version::deno().to_string(), - ts_version: version::TYPESCRIPT.to_string(), - unstable: ps.options.unstable(), - user_agent: version::get_user_agent().to_string(), - inspect: ps.options.is_inspecting(), - }, - extensions, - startup_snapshot: Some(crate::js::deno_isolate_init()), - unsafely_ignore_certificate_errors: ps - .options - .unsafely_ignore_certificate_errors() - .clone(), - root_cert_store: Some(ps.root_cert_store.clone()), - seed: ps.options.seed(), - source_map_getter: Some(Box::new(module_loader.clone())), - format_js_error_fn: Some(Arc::new(format_js_error)), - create_web_worker_cb, - web_worker_preload_module_cb, - web_worker_pre_execute_module_cb, - maybe_inspector_server, - should_break_on_first_statement: ps.options.inspect_brk().is_some(), - should_wait_for_inspector_session: ps.options.inspect_wait().is_some(), - module_loader, - node_fs: Some(ps.node_fs.clone()), - npm_resolver: Some(ps.npm_resolver.clone()), - get_error_class_fn: Some(&errors::get_error_class_name), - cache_storage_dir, - origin_storage_dir, - blob_store: ps.blob_store.clone(), - broadcast_channel: ps.broadcast_channel.clone(), - shared_array_buffer_store: Some(ps.shared_array_buffer_store.clone()), - compiled_wasm_module_store: Some(ps.compiled_wasm_module_store.clone()), - stdio, - }; + .origin_data_folder_path + .join(checksum::gen(&[key.as_bytes()])) + }); + let cache_storage_dir = maybe_storage_key.map(|key| { + // TODO(@satyarohith): storage quota management + // Note: we currently use temp_dir() to avoid managing storage size. + std::env::temp_dir() + .join("deno_cache") + .join(checksum::gen(&[key.as_bytes()])) + }); - let worker = MainWorker::bootstrap_from_options( - main_module.clone(), - permissions, - options, - ); + let mut extensions = ops::cli_exts(shared.npm_resolver.clone()); + extensions.append(&mut custom_extensions); - Ok(CliMainWorker { - main_module, - is_main_cjs, - worker, - ps: ps.clone(), - }) + let options = WorkerOptions { + bootstrap: BootstrapOptions { + args: shared.options.argv.clone(), + cpu_count: std::thread::available_parallelism() + .map(|p| p.get()) + .unwrap_or(1), + debug_flag: shared.options.debug, + enable_testing_features: shared.options.enable_testing_features, + locale: deno_core::v8::icu::get_language_tag(), + location: shared.options.location.clone(), + no_color: !colors::use_color(), + is_tty: colors::is_tty(), + runtime_version: version::deno().to_string(), + ts_version: version::TYPESCRIPT.to_string(), + unstable: shared.options.unstable, + user_agent: version::get_user_agent().to_string(), + inspect: shared.options.is_inspecting, + }, + extensions, + startup_snapshot: Some(crate::js::deno_isolate_init()), + unsafely_ignore_certificate_errors: shared + .options + .unsafely_ignore_certificate_errors + .clone(), + root_cert_store: Some(shared.root_cert_store.clone()), + seed: shared.options.seed, + source_map_getter: Some(Box::new(module_loader.clone())), + format_js_error_fn: Some(Arc::new(format_js_error)), + create_web_worker_cb, + web_worker_preload_module_cb, + web_worker_pre_execute_module_cb, + maybe_inspector_server, + should_break_on_first_statement: shared.options.inspect_brk, + should_wait_for_inspector_session: shared.options.inspect_wait, + module_loader, + node_fs: Some(shared.node_fs.clone()), + npm_resolver: Some(shared.npm_resolver.clone()), + get_error_class_fn: Some(&errors::get_error_class_name), + cache_storage_dir, + origin_storage_dir, + blob_store: shared.blob_store.clone(), + broadcast_channel: shared.broadcast_channel.clone(), + shared_array_buffer_store: Some(shared.shared_array_buffer_store.clone()), + compiled_wasm_module_store: Some( + shared.compiled_wasm_module_store.clone(), + ), + stdio, + }; + + let worker = MainWorker::bootstrap_from_options( + main_module.clone(), + permissions, + options, + ); + + Ok(CliMainWorker { + main_module, + is_main_cjs, + worker, + shared: shared.clone(), + }) + } } // TODO(bartlomieju): this callback could have default value // and not be required fn create_web_worker_preload_module_callback( - _ps: ProcState, + _shared: &Arc, ) -> Arc { Arc::new(move |worker| { let fut = async move { Ok(worker) }; @@ -379,16 +455,16 @@ fn create_web_worker_preload_module_callback( } fn create_web_worker_pre_execute_module_callback( - ps: ProcState, + shared: Arc, ) -> Arc { Arc::new(move |mut worker| { - let ps = ps.clone(); + let shared = shared.clone(); let fut = async move { // this will be up to date after pre-load - if ps.npm_resolver.has_packages() { + if shared.npm_resolver.has_packages() { deno_node::initialize_runtime( &mut worker.js_runtime, - ps.options.has_node_modules_dir(), + shared.options.has_node_modules_dir, None, )?; } @@ -400,27 +476,28 @@ fn create_web_worker_pre_execute_module_callback( } fn create_web_worker_callback( - ps: ProcState, + shared: Arc, stdio: deno_runtime::deno_io::Stdio, ) -> Arc { Arc::new(move |args| { - let maybe_inspector_server = ps.maybe_inspector_server.clone(); + let maybe_inspector_server = shared.maybe_inspector_server.clone(); - let module_loader = CliModuleLoader::new_for_worker( - ps.clone(), - args.parent_permissions.clone(), - args.permissions.clone(), - ); + let module_loader = + Rc::new(shared.module_loader_factory.create_for_worker( + args.parent_permissions.clone(), + args.permissions.clone(), + )); let create_web_worker_cb = - create_web_worker_callback(ps.clone(), stdio.clone()); - let preload_module_cb = - create_web_worker_preload_module_callback(ps.clone()); + create_web_worker_callback(shared.clone(), stdio.clone()); + let preload_module_cb = create_web_worker_preload_module_callback(&shared); let pre_execute_module_cb = - create_web_worker_pre_execute_module_callback(ps.clone()); + create_web_worker_pre_execute_module_callback(shared.clone()); - let extensions = ops::cli_exts(ps.npm_resolver.clone()); + let extensions = ops::cli_exts(shared.npm_resolver.clone()); - let maybe_storage_key = ps.options.resolve_storage_key(&args.main_module); + let maybe_storage_key = shared + .storage_key_resolver + .resolve_storage_key(&args.main_module); let cache_storage_dir = maybe_storage_key.map(|key| { // TODO(@satyarohith): storage quota management // Note: we currently use temp_dir() to avoid managing storage size. @@ -431,49 +508,47 @@ fn create_web_worker_callback( let options = WebWorkerOptions { bootstrap: BootstrapOptions { - args: ps.options.argv().clone(), + args: shared.options.argv.clone(), cpu_count: std::thread::available_parallelism() .map(|p| p.get()) .unwrap_or(1), - debug_flag: ps - .options - .log_level() - .map(|l| l == log::Level::Debug) - .unwrap_or(false), - enable_testing_features: ps.options.enable_testing_features(), + debug_flag: shared.options.debug, + enable_testing_features: shared.options.enable_testing_features, locale: deno_core::v8::icu::get_language_tag(), location: Some(args.main_module.clone()), no_color: !colors::use_color(), is_tty: colors::is_tty(), runtime_version: version::deno().to_string(), ts_version: version::TYPESCRIPT.to_string(), - unstable: ps.options.unstable(), + unstable: shared.options.unstable, user_agent: version::get_user_agent().to_string(), - inspect: ps.options.is_inspecting(), + inspect: shared.options.is_inspecting, }, extensions, startup_snapshot: Some(crate::js::deno_isolate_init()), - unsafely_ignore_certificate_errors: ps + unsafely_ignore_certificate_errors: shared .options - .unsafely_ignore_certificate_errors() + .unsafely_ignore_certificate_errors .clone(), - root_cert_store: Some(ps.root_cert_store.clone()), - seed: ps.options.seed(), + root_cert_store: Some(shared.root_cert_store.clone()), + seed: shared.options.seed, create_web_worker_cb, preload_module_cb, pre_execute_module_cb, format_js_error_fn: Some(Arc::new(format_js_error)), source_map_getter: Some(Box::new(module_loader.clone())), module_loader, - node_fs: Some(ps.node_fs.clone()), - npm_resolver: Some(ps.npm_resolver.clone()), + node_fs: Some(shared.node_fs.clone()), + npm_resolver: Some(shared.npm_resolver.clone()), worker_type: args.worker_type, maybe_inspector_server, get_error_class_fn: Some(&errors::get_error_class_name), - blob_store: ps.blob_store.clone(), - broadcast_channel: ps.broadcast_channel.clone(), - shared_array_buffer_store: Some(ps.shared_array_buffer_store.clone()), - compiled_wasm_module_store: Some(ps.compiled_wasm_module_store.clone()), + blob_store: shared.blob_store.clone(), + broadcast_channel: shared.broadcast_channel.clone(), + shared_array_buffer_store: Some(shared.shared_array_buffer_store.clone()), + compiled_wasm_module_store: Some( + shared.compiled_wasm_module_store.clone(), + ), stdio: stdio.clone(), cache_storage_dir, }; diff --git a/ext/node/lib.rs b/ext/node/lib.rs index cc4afb2b80..53b4f5c08d 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -534,10 +534,10 @@ deno_core::extension!(deno_node, pub fn initialize_runtime( js_runtime: &mut JsRuntime, uses_local_node_modules_dir: bool, - maybe_binary_command_name: Option, + maybe_binary_command_name: Option<&str>, ) -> Result<(), AnyError> { let argv0 = if let Some(binary_command_name) = maybe_binary_command_name { - serde_json::to_string(binary_command_name.as_str())? + serde_json::to_string(binary_command_name)? } else { "undefined".to_string() }; From b0264bea7de1901c1b3ed764454290d10613d14b Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Thu, 27 Apr 2023 19:40:59 +0530 Subject: [PATCH 067/320] fix(ext/node): prime generation (#18861) Towards https://github.com/denoland/deno/issues/18455 `safe`, `add` and `rem` options are not implemented because there is no rust crate that provides this functionality (except rust-openssl maybe) and its just not clear if this API is used widely. --- .../test/parallel/test-crypto-prime.js | 210 +++++++++--------- ext/node/lib.rs | 2 + ext/node/ops/crypto/mod.rs | 17 ++ ext/node/ops/crypto/primes.rs | 2 +- ext/node/polyfills/internal/crypto/random.ts | 179 +++++++++++---- 5 files changed, 256 insertions(+), 154 deletions(-) diff --git a/cli/tests/node_compat/test/parallel/test-crypto-prime.js b/cli/tests/node_compat/test/parallel/test-crypto-prime.js index de1e88fd76..fc2218c2ab 100644 --- a/cli/tests/node_compat/test/parallel/test-crypto-prime.js +++ b/cli/tests/node_compat/test/parallel/test-crypto-prime.js @@ -36,129 +36,129 @@ assert( checks: 10 })); -// (async function() { -// const prime = await pgeneratePrime(36); -// assert(await pCheckPrime(prime)); -// })().then(common.mustCall()); +(async function() { + const prime = await pgeneratePrime(36); + assert(await pCheckPrime(prime)); +})().then(common.mustCall()); -// assert.throws(() => { -// generatePrimeSync(32, { bigint: '' }); -// }, { code: 'ERR_INVALID_ARG_TYPE' }); +assert.throws(() => { + generatePrimeSync(32, { bigint: '' }); +}, { code: 'ERR_INVALID_ARG_TYPE' }); -// assert.throws(() => { -// generatePrime(32, { bigint: '' }, common.mustNotCall()); -// }, { code: 'ERR_INVALID_ARG_TYPE' }); +assert.throws(() => { + generatePrime(32, { bigint: '' }, common.mustNotCall()); +}, { code: 'ERR_INVALID_ARG_TYPE' }); -// { -// const prime = generatePrimeSync(3, { bigint: true }); -// assert.strictEqual(typeof prime, 'bigint'); -// assert.strictEqual(prime, 7n); -// assert(checkPrimeSync(prime)); -// checkPrime(prime, common.mustSucceed(assert)); -// } +{ + const prime = generatePrimeSync(3, { bigint: true }); + assert.strictEqual(typeof prime, 'bigint'); + assert.strictEqual(prime, 7n); + assert(checkPrimeSync(prime)); + checkPrime(prime, common.mustSucceed(assert)); +} -// { -// generatePrime(3, { bigint: true }, common.mustSucceed((prime) => { -// assert.strictEqual(typeof prime, 'bigint'); -// assert.strictEqual(prime, 7n); -// assert(checkPrimeSync(prime)); -// checkPrime(prime, common.mustSucceed(assert)); -// })); -// } +{ + generatePrime(3, { bigint: true }, common.mustSucceed((prime) => { + assert.strictEqual(typeof prime, 'bigint'); + assert.strictEqual(prime, 7n); + assert(checkPrimeSync(prime)); + checkPrime(prime, common.mustSucceed(assert)); + })); +} -// ['hello', false, {}, []].forEach((i) => { -// assert.throws(() => generatePrime(i), { -// code: 'ERR_INVALID_ARG_TYPE' -// }); -// assert.throws(() => generatePrimeSync(i), { -// code: 'ERR_INVALID_ARG_TYPE' -// }); -// }); +['hello', false, {}, []].forEach((i) => { + assert.throws(() => generatePrime(i), { + code: 'ERR_INVALID_ARG_TYPE' + }); + assert.throws(() => generatePrimeSync(i), { + code: 'ERR_INVALID_ARG_TYPE' + }); +}); -// ['hello', false, 123].forEach((i) => { -// assert.throws(() => generatePrime(80, i, common.mustNotCall()), { -// code: 'ERR_INVALID_ARG_TYPE' -// }); -// assert.throws(() => generatePrimeSync(80, i), { -// code: 'ERR_INVALID_ARG_TYPE' -// }); -// }); +['hello', false, 123].forEach((i) => { + assert.throws(() => generatePrime(80, i, common.mustNotCall()), { + code: 'ERR_INVALID_ARG_TYPE' + }); + assert.throws(() => generatePrimeSync(80, i), { + code: 'ERR_INVALID_ARG_TYPE' + }); +}); -// ['hello', false, 123].forEach((i) => { -// assert.throws(() => generatePrime(80, {}), { -// code: 'ERR_INVALID_ARG_TYPE' -// }); -// }); +['hello', false, 123].forEach((i) => { + assert.throws(() => generatePrime(80, {}), { + code: 'ERR_INVALID_ARG_TYPE' + }); +}); -// [-1, 0, 2 ** 31, 2 ** 31 + 1, 2 ** 32 - 1, 2 ** 32].forEach((size) => { -// assert.throws(() => generatePrime(size, common.mustNotCall()), { -// code: 'ERR_OUT_OF_RANGE', -// message: />= 1 && <= 2147483647/ -// }); -// assert.throws(() => generatePrimeSync(size), { -// code: 'ERR_OUT_OF_RANGE', -// message: />= 1 && <= 2147483647/ -// }); -// }); +[-1, 0, 2 ** 31, 2 ** 31 + 1, 2 ** 32 - 1, 2 ** 32].forEach((size) => { + assert.throws(() => generatePrime(size, common.mustNotCall()), { + code: 'ERR_OUT_OF_RANGE', + message: />= 1 && <= 2147483647/ + }); + assert.throws(() => generatePrimeSync(size), { + code: 'ERR_OUT_OF_RANGE', + message: />= 1 && <= 2147483647/ + }); +}); -// ['test', -1, {}, []].forEach((i) => { -// assert.throws(() => generatePrime(8, { safe: i }, common.mustNotCall()), { -// code: 'ERR_INVALID_ARG_TYPE' -// }); -// assert.throws(() => generatePrime(8, { rem: i }, common.mustNotCall()), { -// code: 'ERR_INVALID_ARG_TYPE' -// }); -// assert.throws(() => generatePrime(8, { add: i }, common.mustNotCall()), { -// code: 'ERR_INVALID_ARG_TYPE' -// }); -// assert.throws(() => generatePrimeSync(8, { safe: i }), { -// code: 'ERR_INVALID_ARG_TYPE' -// }); -// assert.throws(() => generatePrimeSync(8, { rem: i }), { -// code: 'ERR_INVALID_ARG_TYPE' -// }); -// assert.throws(() => generatePrimeSync(8, { add: i }), { -// code: 'ERR_INVALID_ARG_TYPE' -// }); -// }); +['test', -1, {}, []].forEach((i) => { + assert.throws(() => generatePrime(8, { safe: i }, common.mustNotCall()), { + code: 'ERR_INVALID_ARG_TYPE' + }); + assert.throws(() => generatePrime(8, { rem: i }, common.mustNotCall()), { + code: 'ERR_INVALID_ARG_TYPE' + }); + assert.throws(() => generatePrime(8, { add: i }, common.mustNotCall()), { + code: 'ERR_INVALID_ARG_TYPE' + }); + assert.throws(() => generatePrimeSync(8, { safe: i }), { + code: 'ERR_INVALID_ARG_TYPE' + }); + assert.throws(() => generatePrimeSync(8, { rem: i }), { + code: 'ERR_INVALID_ARG_TYPE' + }); + assert.throws(() => generatePrimeSync(8, { add: i }), { + code: 'ERR_INVALID_ARG_TYPE' + }); +}); -// { -// // Negative BigInts should not be converted to 0 silently. +{ + // Negative BigInts should not be converted to 0 silently. -// assert.throws(() => generatePrime(20, { add: -1n }, common.mustNotCall()), { -// code: 'ERR_OUT_OF_RANGE', -// message: 'The value of "options.add" is out of range. It must be >= 0. ' + -// 'Received -1n' -// }); + assert.throws(() => generatePrime(20, { add: -1n }, common.mustNotCall()), { + code: 'ERR_OUT_OF_RANGE', + message: 'The value of "options.add" is out of range. It must be >= 0. ' + + 'Received -1n' + }); -// assert.throws(() => generatePrime(20, { rem: -1n }, common.mustNotCall()), { -// code: 'ERR_OUT_OF_RANGE', -// message: 'The value of "options.rem" is out of range. It must be >= 0. ' + -// 'Received -1n' -// }); + assert.throws(() => generatePrime(20, { rem: -1n }, common.mustNotCall()), { + code: 'ERR_OUT_OF_RANGE', + message: 'The value of "options.rem" is out of range. It must be >= 0. ' + + 'Received -1n' + }); -// assert.throws(() => checkPrime(-1n, common.mustNotCall()), { -// code: 'ERR_OUT_OF_RANGE', -// message: 'The value of "candidate" is out of range. It must be >= 0. ' + -// 'Received -1n' -// }); -// } + // assert.throws(() => checkPrime(-1n, common.mustNotCall()), { + // code: 'ERR_OUT_OF_RANGE', + // message: 'The value of "candidate" is out of range. It must be >= 0. ' + + // 'Received -1n' + // }); +} -// generatePrime(80, common.mustSucceed((prime) => { -// assert(checkPrimeSync(prime)); -// checkPrime(prime, common.mustSucceed((result) => { -// assert(result); -// })); -// })); +generatePrime(80, common.mustSucceed((prime) => { + assert(checkPrimeSync(prime)); + checkPrime(prime, common.mustSucceed((result) => { + assert(result); + })); +})); -// assert(checkPrimeSync(generatePrimeSync(80))); +assert(checkPrimeSync(generatePrimeSync(80))); -// generatePrime(80, {}, common.mustSucceed((prime) => { -// assert(checkPrimeSync(prime)); -// })); +generatePrime(80, {}, common.mustSucceed((prime) => { + assert(checkPrimeSync(prime)); +})); -// assert(checkPrimeSync(generatePrimeSync(80, {}))); +assert(checkPrimeSync(generatePrimeSync(80, {}))); // generatePrime(32, { safe: true }, common.mustSucceed((prime) => { // assert(checkPrimeSync(prime)); diff --git a/ext/node/lib.rs b/ext/node/lib.rs index 53b4f5c08d..cef92328d9 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -213,6 +213,8 @@ deno_core::extension!(deno_node, ops::crypto::op_node_check_prime_async, ops::crypto::op_node_check_prime_bytes, ops::crypto::op_node_check_prime_bytes_async, + ops::crypto::op_node_gen_prime, + ops::crypto::op_node_gen_prime_async, ops::crypto::op_node_pbkdf2, ops::crypto::op_node_pbkdf2_async, ops::crypto::op_node_hkdf, diff --git a/ext/node/ops/crypto/mod.rs b/ext/node/ops/crypto/mod.rs index d224b40f72..92e3029e0d 100644 --- a/ext/node/ops/crypto/mod.rs +++ b/ext/node/ops/crypto/mod.rs @@ -901,3 +901,20 @@ pub async fn op_node_scrypt_async( }) .await? } + +#[inline] +fn gen_prime(size: usize) -> ZeroCopyBuf { + primes::Prime::generate(size).0.to_bytes_be().into() +} + +#[op] +pub fn op_node_gen_prime(size: usize) -> ZeroCopyBuf { + gen_prime(size) +} + +#[op] +pub async fn op_node_gen_prime_async( + size: usize, +) -> Result { + Ok(tokio::task::spawn_blocking(move || gen_prime(size)).await?) +} diff --git a/ext/node/ops/crypto/primes.rs b/ext/node/ops/crypto/primes.rs index d03398f024..15aa643adb 100644 --- a/ext/node/ops/crypto/primes.rs +++ b/ext/node/ops/crypto/primes.rs @@ -8,7 +8,7 @@ use num_traits::Zero; use rand::Rng; use std::ops::Deref; -pub struct Prime(num_bigint_dig::BigUint); +pub struct Prime(pub num_bigint_dig::BigUint); impl Prime { pub fn generate(n: usize) -> Self { diff --git a/ext/node/polyfills/internal/crypto/random.ts b/ext/node/polyfills/internal/crypto/random.ts index 04678b6be1..32256b13bf 100644 --- a/ext/node/polyfills/internal/crypto/random.ts +++ b/ext/node/polyfills/internal/crypto/random.ts @@ -8,6 +8,7 @@ import randomFill, { } from "ext:deno_node/internal/crypto/_randomFill.ts"; import randomInt from "ext:deno_node/internal/crypto/_randomInt.ts"; import { + validateBoolean, validateFunction, validateInt32, validateObject, @@ -16,7 +17,10 @@ import { isAnyArrayBuffer, isArrayBufferView, } from "ext:deno_node/internal/util/types.ts"; -import { ERR_INVALID_ARG_TYPE } from "ext:deno_node/internal/errors.ts"; +import { + ERR_INVALID_ARG_TYPE, + ERR_OUT_OF_RANGE, +} from "ext:deno_node/internal/errors.ts"; export { default as randomBytes } from "ext:deno_node/internal/crypto/_randomBytes.ts"; export { @@ -142,62 +146,141 @@ export interface GeneratePrimeOptions { bigint?: boolean | undefined; } -export interface GeneratePrimeOptionsBigInt extends GeneratePrimeOptions { - bigint: true; -} - -export interface GeneratePrimeOptionsArrayBuffer extends GeneratePrimeOptions { - bigint?: false | undefined; -} - export function generatePrime( size: number, - callback: (err: Error | null, prime: ArrayBuffer) => void, -): void; -export function generatePrime( - size: number, - options: GeneratePrimeOptionsBigInt, - callback: (err: Error | null, prime: bigint) => void, -): void; -export function generatePrime( - size: number, - options: GeneratePrimeOptionsArrayBuffer, - callback: (err: Error | null, prime: ArrayBuffer) => void, -): void; -export function generatePrime( - size: number, - options: GeneratePrimeOptions, - callback: (err: Error | null, prime: ArrayBuffer | bigint) => void, -): void; -export function generatePrime( - _size: number, - _options?: unknown, - _callback?: unknown, + options: GeneratePrimeOptions = {}, + callback?: (err: Error | null, prime: ArrayBuffer | bigint) => void, ) { - notImplemented("crypto.generatePrime"); + validateInt32(size, "size", 1); + if (typeof options === "function") { + callback = options; + options = {}; + } + validateFunction(callback, "callback"); + const { + bigint, + } = validateRandomPrimeJob(size, options); + core.opAsync2("op_node_gen_prime_async", size).then((prime: Uint8Array) => + bigint ? arrayBufferToUnsignedBigInt(prime.buffer) : prime.buffer + ).then((prime: ArrayBuffer | bigint) => { + callback?.(null, prime); + }); } -export function generatePrimeSync(size: number): ArrayBuffer; export function generatePrimeSync( size: number, - options: GeneratePrimeOptionsBigInt, -): bigint; -export function generatePrimeSync( - size: number, - options: GeneratePrimeOptionsArrayBuffer, -): ArrayBuffer; -export function generatePrimeSync( + options: GeneratePrimeOptions = {}, +): ArrayBuffer | bigint { + const { + bigint, + } = validateRandomPrimeJob(size, options); + + const prime = ops.op_node_gen_prime(size); + if (bigint) return arrayBufferToUnsignedBigInt(prime.buffer); + return prime.buffer; +} + +function validateRandomPrimeJob( size: number, options: GeneratePrimeOptions, -): ArrayBuffer | bigint; -export function generatePrimeSync( - _size: number, - _options?: - | GeneratePrimeOptionsBigInt - | GeneratePrimeOptionsArrayBuffer - | GeneratePrimeOptions, -): ArrayBuffer | bigint { - notImplemented("crypto.generatePrimeSync"); +): GeneratePrimeOptions { + validateInt32(size, "size", 1); + validateObject(options, "options"); + + let { + safe = false, + bigint = false, + add, + rem, + } = options!; + + validateBoolean(safe, "options.safe"); + validateBoolean(bigint, "options.bigint"); + + if (add !== undefined) { + if (typeof add === "bigint") { + add = unsignedBigIntToBuffer(add, "options.add"); + } else if (!isAnyArrayBuffer(add) && !isArrayBufferView(add)) { + throw new ERR_INVALID_ARG_TYPE( + "options.add", + [ + "ArrayBuffer", + "TypedArray", + "Buffer", + "DataView", + "bigint", + ], + add, + ); + } + } + + if (rem !== undefined) { + if (typeof rem === "bigint") { + rem = unsignedBigIntToBuffer(rem, "options.rem"); + } else if (!isAnyArrayBuffer(rem) && !isArrayBufferView(rem)) { + throw new ERR_INVALID_ARG_TYPE( + "options.rem", + [ + "ArrayBuffer", + "TypedArray", + "Buffer", + "DataView", + "bigint", + ], + rem, + ); + } + } + + // TODO(@littledivy): safe, add and rem options are not implemented. + if (safe || add || rem) { + notImplemented("safe, add and rem options are not implemented."); + } + + return { + safe, + bigint, + add, + rem, + }; +} + +/** + * 48 is the ASCII code for '0', 97 is the ASCII code for 'a'. + * @param {number} number An integer between 0 and 15. + * @returns {number} corresponding to the ASCII code of the hex representation + * of the parameter. + */ +const numberToHexCharCode = (number: number): number => + (number < 10 ? 48 : 87) + number; + +/** + * @param {ArrayBuffer} buf An ArrayBuffer. + * @return {bigint} + */ +function arrayBufferToUnsignedBigInt(buf: ArrayBuffer): bigint { + const length = buf.byteLength; + const chars: number[] = Array(length * 2); + const view = new DataView(buf); + + for (let i = 0; i < length; i++) { + const val = view.getUint8(i); + chars[2 * i] = numberToHexCharCode(val >> 4); + chars[2 * i + 1] = numberToHexCharCode(val & 0xf); + } + + return BigInt(`0x${String.fromCharCode(...chars)}`); +} + +function unsignedBigIntToBuffer(bigint: bigint, name: string) { + if (bigint < 0) { + throw new ERR_OUT_OF_RANGE(name, ">= 0", bigint); + } + + const hex = bigint.toString(16); + const padded = hex.padStart(hex.length + (hex.length % 2), 0); + return Buffer.from(padded, "hex"); } export const randomUUID = () => globalThis.crypto.randomUUID(); From 3fbb31c3c1f85011db9cc616dab0ef113342d7dd Mon Sep 17 00:00:00 2001 From: Luca Casonato Date: Thu, 27 Apr 2023 16:59:02 +0200 Subject: [PATCH 068/320] feat(kv): return ok bool from atomic commit (#18873) --- cli/tests/unit/kv_test.ts | 39 ++++++++++++++-------------- cli/tsc/dts/lib.deno.unstable.d.ts | 41 ++++++++++++++++++++---------- ext/kv/01_db.ts | 8 +++--- 3 files changed, 52 insertions(+), 36 deletions(-) diff --git a/cli/tests/unit/kv_test.ts b/cli/tests/unit/kv_test.ts index 0af8f338d2..0dc1690aad 100644 --- a/cli/tests/unit/kv_test.ts +++ b/cli/tests/unit/kv_test.ts @@ -66,6 +66,7 @@ dbTest("basic read-write-delete and versionstamps", async (db) => { assertEquals(result1.versionstamp, null); const setRes = await db.set(["a"], "b"); + assert(setRes.ok); assertEquals(setRes.versionstamp, "00000000000000010000"); const result2 = await db.get(["a"]); assertEquals(result2.key, ["a"]); @@ -183,7 +184,7 @@ dbTest("compare and mutate", async (db) => { .check({ key: ["t"], versionstamp: currentValue.versionstamp }) .set(currentValue.key, "2") .commit(); - assert(res); + assert(res.ok); assertEquals(res.versionstamp, "00000000000000020000"); const newValue = await db.get(["t"]); @@ -194,7 +195,7 @@ dbTest("compare and mutate", async (db) => { .check({ key: ["t"], versionstamp: currentValue.versionstamp }) .set(currentValue.key, "3") .commit(); - assertEquals(res, null); + assert(!res.ok); const newValue2 = await db.get(["t"]); assertEquals(newValue2.versionstamp, "00000000000000020000"); @@ -206,7 +207,7 @@ dbTest("compare and mutate not exists", async (db) => { .check({ key: ["t"], versionstamp: null }) .set(["t"], "1") .commit(); - assert(res); + assert(res.ok); const newValue = await db.get(["t"]); assertEquals(newValue.versionstamp, "00000000000000010000"); @@ -216,7 +217,7 @@ dbTest("compare and mutate not exists", async (db) => { .check({ key: ["t"], versionstamp: null }) .set(["t"], "2") .commit(); - assertEquals(res, null); + assert(!res.ok); }); dbTest("atomic mutation helper (sum)", async (db) => { @@ -264,7 +265,7 @@ dbTest("compare multiple and mutate", async (db) => { .set(currentValue1.key, "3") .set(currentValue2.key, "4") .commit(); - assert(res); + assert(res.ok); const newValue1 = await db.get(["t1"]); assertEquals(newValue1.versionstamp, "00000000000000030000"); @@ -280,7 +281,7 @@ dbTest("compare multiple and mutate", async (db) => { .set(newValue1.key, "5") .set(newValue2.key, "6") .commit(); - assertEquals(res2, null); + assert(!res2.ok); const newValue3 = await db.get(["t1"]); assertEquals(newValue3.versionstamp, "00000000000000030000"); @@ -296,7 +297,7 @@ dbTest("atomic mutation ordering (set before delete)", async (db) => { .set(["a"], "2") .delete(["a"]) .commit(); - assert(res); + assert(res.ok); const result = await db.get(["a"]); assertEquals(result.value, null); }); @@ -307,7 +308,7 @@ dbTest("atomic mutation ordering (delete before set)", async (db) => { .delete(["a"]) .set(["a"], "2") .commit(); - assert(res); + assert(res.ok); const result = await db.get(["a"]); assertEquals(result.value, "2"); }); @@ -316,7 +317,7 @@ dbTest("atomic mutation type=set", async (db) => { const res = await db.atomic() .mutate({ key: ["a"], value: "1", type: "set" }) .commit(); - assert(res); + assert(res.ok); const result = await db.get(["a"]); assertEquals(result.value, "1"); }); @@ -326,7 +327,7 @@ dbTest("atomic mutation type=set overwrite", async (db) => { const res = await db.atomic() .mutate({ key: ["a"], value: "2", type: "set" }) .commit(); - assert(res); + assert(res.ok); const result = await db.get(["a"]); assertEquals(result.value, "2"); }); @@ -336,7 +337,7 @@ dbTest("atomic mutation type=delete", async (db) => { const res = await db.atomic() .mutate({ key: ["a"], type: "delete" }) .commit(); - assert(res); + assert(res.ok); const result = await db.get(["a"]); assertEquals(result.value, null); }); @@ -345,7 +346,7 @@ dbTest("atomic mutation type=delete no exists", async (db) => { const res = await db.atomic() .mutate({ key: ["a"], type: "delete" }) .commit(); - assert(res); + assert(res.ok); const result = await db.get(["a"]); assertEquals(result.value, null); }); @@ -355,7 +356,7 @@ dbTest("atomic mutation type=sum", async (db) => { const res = await db.atomic() .mutate({ key: ["a"], value: new Deno.KvU64(1n), type: "sum" }) .commit(); - assert(res); + assert(res.ok); const result = await db.get(["a"]); assertEquals(result.value, new Deno.KvU64(11n)); }); @@ -364,7 +365,7 @@ dbTest("atomic mutation type=sum no exists", async (db) => { const res = await db.atomic() .mutate({ key: ["a"], value: new Deno.KvU64(1n), type: "sum" }) .commit(); - assert(res); + assert(res.ok); const result = await db.get(["a"]); assert(result.value); assertEquals(result.value, new Deno.KvU64(1n)); @@ -375,7 +376,7 @@ dbTest("atomic mutation type=sum wrap around", async (db) => { const res = await db.atomic() .mutate({ key: ["a"], value: new Deno.KvU64(10n), type: "sum" }) .commit(); - assert(res); + assert(res.ok); const result = await db.get(["a"]); assertEquals(result.value, new Deno.KvU64(9n)); @@ -423,7 +424,7 @@ dbTest("atomic mutation type=min", async (db) => { const res = await db.atomic() .mutate({ key: ["a"], value: new Deno.KvU64(5n), type: "min" }) .commit(); - assert(res); + assert(res.ok); const result = await db.get(["a"]); assertEquals(result.value, new Deno.KvU64(5n)); @@ -439,7 +440,7 @@ dbTest("atomic mutation type=min no exists", async (db) => { const res = await db.atomic() .mutate({ key: ["a"], value: new Deno.KvU64(1n), type: "min" }) .commit(); - assert(res); + assert(res.ok); const result = await db.get(["a"]); assert(result.value); assertEquals(result.value, new Deno.KvU64(1n)); @@ -477,7 +478,7 @@ dbTest("atomic mutation type=max", async (db) => { const res = await db.atomic() .mutate({ key: ["a"], value: new Deno.KvU64(5n), type: "max" }) .commit(); - assert(res); + assert(res.ok); const result = await db.get(["a"]); assertEquals(result.value, new Deno.KvU64(10n)); @@ -493,7 +494,7 @@ dbTest("atomic mutation type=max no exists", async (db) => { const res = await db.atomic() .mutate({ key: ["a"], value: new Deno.KvU64(1n), type: "max" }) .commit(); - assert(res); + assert(res.ok); const result = await db.get(["a"]); assert(result.value); assertEquals(result.value, new Deno.KvU64(1n)); diff --git a/cli/tsc/dts/lib.deno.unstable.d.ts b/cli/tsc/dts/lib.deno.unstable.d.ts index f169e0254b..8613da2ab5 100644 --- a/cli/tsc/dts/lib.deno.unstable.d.ts +++ b/cli/tsc/dts/lib.deno.unstable.d.ts @@ -1545,6 +1545,10 @@ declare namespace Deno { * relative significance of the types can be found in documentation for the * {@linkcode Deno.KvKeyPart} type. * + * Keys have a maximum size of 2048 bytes serialized. If the size of the key + * exceeds this limit, an error will be thrown on the operation that this key + * was passed to. + * * @category KV */ export type KvKey = readonly KvKeyPart[]; @@ -1758,10 +1762,16 @@ declare namespace Deno { /** @category KV */ export interface KvCommitResult { + ok: true; /** The versionstamp of the value committed to KV. */ versionstamp: string; } + /** @category KV */ + export interface KvCommitError { + ok: false; + } + /** **UNSTABLE**: New API, yet to be vetted. * * A check to perform as part of a {@linkcode Deno.AtomicOperation}. The check @@ -1803,11 +1813,13 @@ declare namespace Deno { * * The `commit` method of an atomic operation returns a value indicating * whether checks passed and mutations were performed. If the operation failed - * because of a failed check, the return value will be `null`. If the + * because of a failed check, the return value will be a + * {@linkcode Deno.KvCommitError} with an `ok: false` property. If the * operation failed for any other reason (storage error, invalid value, etc.), * an exception will be thrown. If the operation succeeded, the return value - * will be a {@linkcode Deno.KvCommitResult} object containing the - * versionstamp of the value committed to KV. + * will be a {@linkcode Deno.KvCommitResult} object with a `ok: true` property + * and the versionstamp of the value committed to KV. + * * @category KV */ @@ -1857,17 +1869,19 @@ declare namespace Deno { /** * Commit the operation to the KV store. Returns a value indicating whether * checks passed and mutations were performed. If the operation failed - * because of a failed check, the return value will be `null`. If the - * operation failed for any other reason (storage error, invalid value, - * etc.), an exception will be thrown. If the operation succeeded, the - * return value will be a {@linkcode Deno.KvCommitResult} object containing - * the versionstamp of the value committed to KV. + * because of a failed check, the return value will be a {@linkcode + * Deno.KvCommitError} with an `ok: false` property. If the operation failed + * for any other reason (storage error, invalid value, etc.), an exception + * will be thrown. If the operation succeeded, the return value will be a + * {@linkcode Deno.KvCommitResult} object with a `ok: true` property and the + * versionstamp of the value committed to KV. * - * If the commit returns `null`, one may create a new atomic operation with - * updated checks and mutations and attempt to commit it again. See the note - * on optimistic locking in the documentation for {@linkcode Deno.AtomicOperation}. + * If the commit returns `ok: false`, one may create a new atomic operation + * with updated checks and mutations and attempt to commit it again. See the + * note on optimistic locking in the documentation for + * {@linkcode Deno.AtomicOperation}. */ - commit(): Promise; + commit(): Promise; } /** **UNSTABLE**: New API, yet to be vetted. @@ -1901,7 +1915,8 @@ declare namespace Deno { * maximum length of 64 KiB after serialization. Serialization of both keys * and values is somewhat opaque, but one can usually assume that the * serialization of any value is about the same length as the resulting string - * of a JSON serialization of that same value. + * of a JSON serialization of that same value. If theses limits are exceeded, + * an exception will be thrown. * * @category KV */ diff --git a/ext/kv/01_db.ts b/ext/kv/01_db.ts index da29a09521..0dd6ba83a2 100644 --- a/ext/kv/01_db.ts +++ b/ext/kv/01_db.ts @@ -116,7 +116,7 @@ class Kv { [], ); if (versionstamp === null) throw new TypeError("Failed to set value"); - return { versionstamp }; + return { ok: true, versionstamp }; } async delete(key: Deno.KvKey) { @@ -266,7 +266,7 @@ class AtomicOperation { return this; } - async commit(): Promise { + async commit(): Promise { const versionstamp = await core.opAsync( "op_kv_atomic_write", this.#rid, @@ -274,8 +274,8 @@ class AtomicOperation { this.#mutations, [], // TODO(@losfair): enqueue ); - if (versionstamp === null) return null; - return { versionstamp }; + if (versionstamp === null) return { ok: false }; + return { ok: true, versionstamp }; } then() { From c3d670dbc992ffaff02cd8df82335ee41e88596e Mon Sep 17 00:00:00 2001 From: Levente Kurusa Date: Thu, 27 Apr 2023 18:31:35 +0200 Subject: [PATCH 069/320] feat(node/crypto): Elliptic Curve Diffie-Hellman (ECDH) support (#18832) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - ECDH class - crypto.createECDH() - Supported curves: - secp256k1 - prime256v1 / secp256r1 - secp384r1 - secp224r1 Co-authored-by: Bartek Iwańczuk --- Cargo.lock | 205 ++++++++++++++++-- Cargo.toml | 4 + ext/node/Cargo.toml | 5 + ext/node/lib.rs | 4 +- ext/node/ops/crypto/mod.rs | 165 ++++++++++++++ .../internal/crypto/diffiehellman.ts | 75 ++++++- ext/node/polyfills/internal/crypto/util.ts | 44 +++- 7 files changed, 469 insertions(+), 33 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 352ca75106..ddd92ea833 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -243,6 +243,12 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" +[[package]] +name = "base16ct" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" + [[package]] name = "base32" version = "0.4.0" @@ -595,6 +601,18 @@ dependencies = [ "zeroize", ] +[[package]] +name = "crypto-bigint" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf4c2f4e1afd912bc40bfd6fed5d9dc1f288e0ba01bfcc835cc5bc3eb13efe15" +dependencies = [ + "generic-array 0.14.6", + "rand_core 0.6.4", + "subtle", + "zeroize", +] + [[package]] name = "crypto-common" version = "0.1.6" @@ -877,15 +895,15 @@ dependencies = [ "curve25519-dalek 2.1.3", "deno_core", "deno_web", - "elliptic-curve", + "elliptic-curve 0.12.3", "num-traits", "once_cell", - "p256", - "p384", + "p256 0.11.1", + "p384 0.11.2", "rand", "ring", "rsa", - "sec1", + "sec1 0.3.0", "serde", "serde_bytes", "sha1", @@ -1137,6 +1155,7 @@ dependencies = [ "digest 0.10.6", "dsa", "ecb", + "elliptic-curve 0.13.4", "hex", "hkdf", "idna 0.3.0", @@ -1150,6 +1169,9 @@ dependencies = [ "num-integer", "num-traits", "once_cell", + "p224", + "p256 0.13.2", + "p384 0.13.0", "path-clean", "pbkdf2", "rand", @@ -1158,6 +1180,7 @@ dependencies = [ "ripemd", "rsa", "scrypt", + "secp256k1", "serde", "sha-1", "sha2", @@ -1371,7 +1394,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f1a467a65c5e759bce6e65eaf91cc29f466cdc57cb65777bd646872a8a1fd4de" dependencies = [ "const-oid", - "pem-rfc7468", + "pem-rfc7468 0.6.0", "zeroize", ] @@ -1382,6 +1405,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "82b10af9f9f9f2134a42d3f8aa74658660f2e0234b0eb81bd171df8aa32779ed" dependencies = [ "const-oid", + "pem-rfc7468 0.7.0", "zeroize", ] @@ -1628,11 +1652,24 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "413301934810f597c1d19ca71c8710e99a3f1ba28a0d2ebc01551a2daeea3c5c" dependencies = [ "der 0.6.1", - "elliptic-curve", + "elliptic-curve 0.12.3", "rfc6979 0.3.1", "signature 1.6.4", ] +[[package]] +name = "ecdsa" +version = "0.16.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a48e5d537b8a30c0b023116d981b16334be1485af7ca68db3a2b7024cbc957fd" +dependencies = [ + "der 0.7.3", + "digest 0.10.6", + "elliptic-curve 0.13.4", + "rfc6979 0.4.0", + "signature 2.1.0", +] + [[package]] name = "either" version = "1.8.1" @@ -1645,18 +1682,39 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e7bb888ab5300a19b8e5bceef25ac745ad065f3c9f7efc6de1b91958110891d3" dependencies = [ - "base16ct", - "crypto-bigint", + "base16ct 0.1.1", + "crypto-bigint 0.4.9", "der 0.6.1", "digest 0.10.6", - "ff", + "ff 0.12.1", "generic-array 0.14.6", - "group", + "group 0.12.1", "hkdf", - "pem-rfc7468", + "pem-rfc7468 0.6.0", "pkcs8 0.9.0", "rand_core 0.6.4", - "sec1", + "sec1 0.3.0", + "subtle", + "zeroize", +] + +[[package]] +name = "elliptic-curve" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75c71eaa367f2e5d556414a8eea812bc62985c879748d6403edabd9cb03f16e7" +dependencies = [ + "base16ct 0.2.0", + "crypto-bigint 0.5.2", + "digest 0.10.6", + "ff 0.13.0", + "generic-array 0.14.6", + "group 0.13.0", + "hkdf", + "pem-rfc7468 0.7.0", + "pkcs8 0.10.2", + "rand_core 0.6.4", + "sec1 0.7.1", "subtle", "zeroize", ] @@ -1828,6 +1886,16 @@ dependencies = [ "subtle", ] +[[package]] +name = "ff" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" +dependencies = [ + "rand_core 0.6.4", + "subtle", +] + [[package]] name = "filetime" version = "0.2.20" @@ -2051,6 +2119,7 @@ checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9" dependencies = [ "typenum", "version_check", + "zeroize", ] [[package]] @@ -2106,7 +2175,18 @@ version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" dependencies = [ - "ff", + "ff 0.12.1", + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "group" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" +dependencies = [ + "ff 0.13.0", "rand_core 0.6.4", "subtle", ] @@ -3123,14 +3203,38 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4030760ffd992bef45b0ae3f10ce1aba99e33464c90d14dd7c039884963ddc7a" +[[package]] +name = "p224" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30c06436d66652bc2f01ade021592c80a2aad401570a18aa18b82e440d2b9aa1" +dependencies = [ + "ecdsa 0.16.6", + "elliptic-curve 0.13.4", + "primeorder", + "sha2", +] + [[package]] name = "p256" version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51f44edd08f51e2ade572f141051021c5af22677e42b7dd28a88155151c33594" dependencies = [ - "ecdsa", - "elliptic-curve", + "ecdsa 0.14.8", + "elliptic-curve 0.12.3", + "sha2", +] + +[[package]] +name = "p256" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9863ad85fa8f4460f9c48cb909d38a0d689dba1f6f6988a5e3e0d31071bcd4b" +dependencies = [ + "ecdsa 0.16.6", + "elliptic-curve 0.13.4", + "primeorder", "sha2", ] @@ -3140,8 +3244,20 @@ version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dfc8c5bf642dde52bb9e87c0ecd8ca5a76faac2eeed98dedb7c717997e1080aa" dependencies = [ - "ecdsa", - "elliptic-curve", + "ecdsa 0.14.8", + "elliptic-curve 0.12.3", + "sha2", +] + +[[package]] +name = "p384" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70786f51bcc69f6a4c0360e063a4cac5419ef7c5cd5b3c99ad70f3be5ba79209" +dependencies = [ + "ecdsa 0.16.6", + "elliptic-curve 0.13.4", + "primeorder", "sha2", ] @@ -3244,6 +3360,15 @@ dependencies = [ "base64ct", ] +[[package]] +name = "pem-rfc7468" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" +dependencies = [ + "base64ct", +] + [[package]] name = "percent-encoding" version = "2.2.0" @@ -3431,6 +3556,15 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "primeorder" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf8d3875361e28f7753baefef104386e7aa47642c93023356d97fdef4003bfb5" +dependencies = [ + "elliptic-curve 0.13.4", +] + [[package]] name = "proc-macro-crate" version = "1.3.1" @@ -3689,7 +3823,7 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7743f17af12fa0b03b803ba12cd6a8d9483a587e89c69445e3909655c0b9fabb" dependencies = [ - "crypto-bigint", + "crypto-bigint 0.4.9", "hmac", "zeroize", ] @@ -3955,7 +4089,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3be24c1842290c45df0a7bf069e0c268a747ad05a192f2fd7dcfdbc1cba40928" dependencies = [ - "base16ct", + "base16ct 0.1.1", "der 0.6.1", "generic-array 0.14.6", "pkcs8 0.9.0", @@ -3963,6 +4097,39 @@ dependencies = [ "zeroize", ] +[[package]] +name = "sec1" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48518a2b5775ba8ca5b46596aae011caa431e6ce7e4a67ead66d92f08884220e" +dependencies = [ + "base16ct 0.2.0", + "der 0.7.3", + "generic-array 0.14.6", + "pkcs8 0.10.2", + "subtle", + "zeroize", +] + +[[package]] +name = "secp256k1" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25996b82292a7a57ed3508f052cfff8640d38d32018784acd714758b43da9c8f" +dependencies = [ + "rand", + "secp256k1-sys", +] + +[[package]] +name = "secp256k1-sys" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70a129b9e9efbfb223753b9163c4ab3b13cff7fd9c7f010fbac25ab4099fa07e" +dependencies = [ + "cc", +] + [[package]] name = "security-framework" version = "2.8.2" diff --git a/Cargo.toml b/Cargo.toml index 3b0a0abf2a..5664a69ed2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -139,6 +139,10 @@ tower-lsp = { version = "=0.17.0", features = ["proposed"] } url = { version = "2.3.1", features = ["serde", "expose_internals"] } uuid = { version = "1.3.0", features = ["v4"] } zstd = "=0.11.2" +elliptic-curve = { version = "0.13.4", features = ["alloc", "arithmetic", "ecdh", "std", "pem"] } +p224 = { version = "0.13.0", features = ["ecdh"] } +p256 = { version = "0.13.2", features = ["ecdh"] } +p384 = { version = "0.13.0", features = ["ecdh"] } # crypto rsa = { version = "0.7.0", default-features = false, features = ["std", "pem", "hazmat"] } # hazmat needed for PrehashSigner in ext/node diff --git a/ext/node/Cargo.toml b/ext/node/Cargo.toml index 576e62d559..14928db307 100644 --- a/ext/node/Cargo.toml +++ b/ext/node/Cargo.toml @@ -24,6 +24,7 @@ deno_semver.workspace = true digest = { version = "0.10.5", features = ["core-api", "std"] } dsa = "0.6.1" ecb.workspace = true +elliptic-curve.workspace = true hex.workspace = true hkdf.workspace = true idna = "0.3.0" @@ -37,6 +38,9 @@ num-bigint-dig = "0.8.2" num-integer = "0.1.45" num-traits = "0.2.14" once_cell.workspace = true +p224.workspace = true +p256.workspace = true +p384.workspace = true path-clean = "=0.1.0" pbkdf2 = "0.12.1" rand.workspace = true @@ -45,6 +49,7 @@ ring.workspace = true ripemd = "0.1.3" rsa.workspace = true scrypt = "0.11.0" +secp256k1 = { version = "0.27.0", features = ["rand-std"] } serde = "1.0.149" sha-1 = "0.10.0" sha2.workspace = true diff --git a/ext/node/lib.rs b/ext/node/lib.rs index cef92328d9..b5db83297e 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -240,6 +240,9 @@ deno_core::extension!(deno_node, ops::crypto::op_node_random_int, ops::crypto::op_node_scrypt_sync, ops::crypto::op_node_scrypt_async, + ops::crypto::op_node_ecdh_generate_keys, + ops::crypto::op_node_ecdh_compute_secret, + ops::crypto::op_node_ecdh_compute_public_key, ops::crypto::x509::op_node_x509_parse, ops::crypto::x509::op_node_x509_ca, ops::crypto::x509::op_node_x509_check_email, @@ -267,7 +270,6 @@ deno_core::extension!(deno_node, ops::zlib::op_zlib_init, ops::zlib::op_zlib_reset, op_node_build_os, - ops::require::op_require_init_paths, ops::require::op_require_node_module_paths, ops::require::op_require_proxy_path, diff --git a/ext/node/ops/crypto/mod.rs b/ext/node/ops/crypto/mod.rs index 92e3029e0d..9e1a3da989 100644 --- a/ext/node/ops/crypto/mod.rs +++ b/ext/node/ops/crypto/mod.rs @@ -18,12 +18,18 @@ use rand::Rng; use std::future::Future; use std::rc::Rc; +use p224::NistP224; +use p256::NistP256; +use p384::NistP384; use rsa::padding::PaddingScheme; use rsa::pkcs8::DecodePrivateKey; use rsa::pkcs8::DecodePublicKey; use rsa::PublicKey; use rsa::RsaPrivateKey; use rsa::RsaPublicKey; +use secp256k1::ecdh::SharedSecret; +use secp256k1::Secp256k1; +use secp256k1::SecretKey; mod cipher; mod dh; @@ -902,6 +908,165 @@ pub async fn op_node_scrypt_async( .await? } +#[op] +pub fn op_node_ecdh_generate_keys( + curve: &str, + pubbuf: &mut [u8], + privbuf: &mut [u8], +) -> Result { + let mut rng = rand::thread_rng(); + match curve { + "secp256k1" => { + let secp = Secp256k1::new(); + let (privkey, pubkey) = secp.generate_keypair(&mut rng); + pubbuf.copy_from_slice(&pubkey.serialize_uncompressed()); + privbuf.copy_from_slice(&privkey.secret_bytes()); + + Ok(0) + } + "prime256v1" | "secp256r1" => { + let privkey = elliptic_curve::SecretKey::::random(&mut rng); + let pubkey = privkey.public_key(); + pubbuf.copy_from_slice(pubkey.to_sec1_bytes().as_ref()); + privbuf.copy_from_slice(privkey.to_nonzero_scalar().to_bytes().as_ref()); + Ok(0) + } + "secp384r1" => { + let privkey = elliptic_curve::SecretKey::::random(&mut rng); + let pubkey = privkey.public_key(); + pubbuf.copy_from_slice(pubkey.to_sec1_bytes().as_ref()); + privbuf.copy_from_slice(privkey.to_nonzero_scalar().to_bytes().as_ref()); + Ok(0) + } + "secp224r1" => { + let privkey = elliptic_curve::SecretKey::::random(&mut rng); + let pubkey = privkey.public_key(); + pubbuf.copy_from_slice(pubkey.to_sec1_bytes().as_ref()); + privbuf.copy_from_slice(privkey.to_nonzero_scalar().to_bytes().as_ref()); + Ok(0) + } + &_ => todo!(), + } +} + +#[op] +pub fn op_node_ecdh_compute_secret( + curve: &str, + this_priv: Option, + their_pub: &mut [u8], + secret: &mut [u8], +) -> Result<(), AnyError> { + match curve { + "secp256k1" => { + let this_secret_key = SecretKey::from_slice( + this_priv.expect("no private key provided?").as_ref(), + ) + .unwrap(); + let their_public_key = + secp256k1::PublicKey::from_slice(their_pub).unwrap(); + let shared_secret = + SharedSecret::new(&their_public_key, &this_secret_key); + + secret.copy_from_slice(&shared_secret.secret_bytes()); + Ok(()) + } + "prime256v1" | "secp256r1" => { + let their_public_key = + elliptic_curve::PublicKey::::from_sec1_bytes(their_pub) + .expect("bad public key"); + let this_private_key = elliptic_curve::SecretKey::::from_slice( + &this_priv.expect("must supply private key"), + ) + .expect("bad private key"); + let shared_secret = elliptic_curve::ecdh::diffie_hellman( + this_private_key.to_nonzero_scalar(), + their_public_key.as_affine(), + ); + secret.copy_from_slice(shared_secret.raw_secret_bytes()); + + Ok(()) + } + "secp384r1" => { + let their_public_key = + elliptic_curve::PublicKey::::from_sec1_bytes(their_pub) + .expect("bad public key"); + let this_private_key = elliptic_curve::SecretKey::::from_slice( + &this_priv.expect("must supply private key"), + ) + .expect("bad private key"); + let shared_secret = elliptic_curve::ecdh::diffie_hellman( + this_private_key.to_nonzero_scalar(), + their_public_key.as_affine(), + ); + secret.copy_from_slice(shared_secret.raw_secret_bytes()); + + Ok(()) + } + "secp224r1" => { + let their_public_key = + elliptic_curve::PublicKey::::from_sec1_bytes(their_pub) + .expect("bad public key"); + let this_private_key = elliptic_curve::SecretKey::::from_slice( + &this_priv.expect("must supply private key"), + ) + .expect("bad private key"); + let shared_secret = elliptic_curve::ecdh::diffie_hellman( + this_private_key.to_nonzero_scalar(), + their_public_key.as_affine(), + ); + secret.copy_from_slice(shared_secret.raw_secret_bytes()); + + Ok(()) + } + &_ => todo!(), + } +} + +#[op] +pub fn op_node_ecdh_compute_public_key( + curve: &str, + privkey: &[u8], + pubkey: &mut [u8], +) -> Result<(), AnyError> { + match curve { + "secp256k1" => { + let secp = Secp256k1::new(); + let secret_key = SecretKey::from_slice(privkey).unwrap(); + let public_key = + secp256k1::PublicKey::from_secret_key(&secp, &secret_key); + + pubkey.copy_from_slice(&public_key.serialize_uncompressed()); + + Ok(()) + } + "prime256v1" | "secp256r1" => { + let this_private_key = + elliptic_curve::SecretKey::::from_slice(privkey) + .expect("bad private key"); + let public_key = this_private_key.public_key(); + pubkey.copy_from_slice(public_key.to_sec1_bytes().as_ref()); + Ok(()) + } + "secp384r1" => { + let this_private_key = + elliptic_curve::SecretKey::::from_slice(privkey) + .expect("bad private key"); + let public_key = this_private_key.public_key(); + pubkey.copy_from_slice(public_key.to_sec1_bytes().as_ref()); + Ok(()) + } + "secp224r1" => { + let this_private_key = + elliptic_curve::SecretKey::::from_slice(privkey) + .expect("bad private key"); + let public_key = this_private_key.public_key(); + pubkey.copy_from_slice(public_key.to_sec1_bytes().as_ref()); + Ok(()) + } + &_ => todo!(), + } +} + #[inline] fn gen_prime(size: usize) -> ZeroCopyBuf { primes::Prime::generate(size).0.to_bytes_be().into() diff --git a/ext/node/polyfills/internal/crypto/diffiehellman.ts b/ext/node/polyfills/internal/crypto/diffiehellman.ts index 3aa1f80809..62a802126f 100644 --- a/ext/node/polyfills/internal/crypto/diffiehellman.ts +++ b/ext/node/polyfills/internal/crypto/diffiehellman.ts @@ -13,6 +13,8 @@ import { } from "ext:deno_node/internal/validators.mjs"; import { Buffer } from "ext:deno_node/buffer.ts"; import { + EllipticCurve, + ellipticCurves, getDefaultEncoding, toBuf, } from "ext:deno_node/internal/crypto/util.ts"; @@ -24,6 +26,8 @@ import type { import { KeyObject } from "ext:deno_node/internal/crypto/keys.ts"; import type { BufferEncoding } from "ext:deno_node/_global.d.ts"; +const { ops } = Deno.core; + const DH_GENERATOR = 2; export class DiffieHellman { @@ -219,10 +223,21 @@ export class DiffieHellmanGroup { } export class ECDH { + #curve: EllipticCurve; // the selected curve + #privbuf: Buffer; // the private key + #pubbuf: Buffer; // the public key + constructor(curve: string) { validateString(curve, "curve"); - notImplemented("crypto.ECDH"); + const c = ellipticCurves.find((x) => x.name == curve); + if (c == undefined) { + throw new Error("invalid curve"); + } + + this.#curve = c; + this.#pubbuf = Buffer.alloc(this.#curve.publicKeySize); + this.#privbuf = Buffer.alloc(this.#curve.privateKeySize); } static convertKey( @@ -250,44 +265,80 @@ export class ECDH { outputEncoding: BinaryToTextEncoding, ): string; computeSecret( - _otherPublicKey: ArrayBufferView | string, + otherPublicKey: ArrayBufferView | string, _inputEncoding?: BinaryToTextEncoding, _outputEncoding?: BinaryToTextEncoding, ): Buffer | string { - notImplemented("crypto.ECDH.prototype.computeSecret"); + const secretBuf = Buffer.alloc(this.#curve.sharedSecretSize); + + ops.op_node_ecdh_compute_secret( + this.#curve.name, + this.#privbuf, + otherPublicKey, + secretBuf, + ); + + return secretBuf; } generateKeys(): Buffer; generateKeys(encoding: BinaryToTextEncoding, format?: ECDHKeyFormat): string; generateKeys( - _encoding?: BinaryToTextEncoding, + encoding?: BinaryToTextEncoding, _format?: ECDHKeyFormat, ): Buffer | string { - notImplemented("crypto.ECDH.prototype.generateKeys"); + ops.op_node_ecdh_generate_keys( + this.#curve.name, + this.#pubbuf, + this.#privbuf, + ); + + if (encoding !== undefined) { + return this.#pubbuf.toString(encoding); + } + return this.#pubbuf; } getPrivateKey(): Buffer; getPrivateKey(encoding: BinaryToTextEncoding): string; - getPrivateKey(_encoding?: BinaryToTextEncoding): Buffer | string { - notImplemented("crypto.ECDH.prototype.getPrivateKey"); + getPrivateKey(encoding?: BinaryToTextEncoding): Buffer | string { + if (encoding !== undefined) { + return this.#privbuf.toString(encoding); + } + return this.#privbuf; } getPublicKey(): Buffer; getPublicKey(encoding: BinaryToTextEncoding, format?: ECDHKeyFormat): string; getPublicKey( - _encoding?: BinaryToTextEncoding, + encoding?: BinaryToTextEncoding, _format?: ECDHKeyFormat, ): Buffer | string { - notImplemented("crypto.ECDH.prototype.getPublicKey"); + if (encoding !== undefined) { + return this.#pubbuf.toString(encoding); + } + return this.#pubbuf; } setPrivateKey(privateKey: ArrayBufferView): void; setPrivateKey(privateKey: string, encoding: BinaryToTextEncoding): void; setPrivateKey( - _privateKey: ArrayBufferView | string, - _encoding?: BinaryToTextEncoding, + privateKey: ArrayBufferView | string, + encoding?: BinaryToTextEncoding, ): Buffer | string { - notImplemented("crypto.ECDH.prototype.setPrivateKey"); + this.#privbuf = privateKey; + this.#pubbuf = Buffer.alloc(this.#curve.publicKeySize); + + ops.op_node_ecdh_compute_public_key( + this.#curve.name, + this.#privbuf, + this.#pubbuf, + ); + + if (encoding !== undefined) { + return this.#pubbuf.toString(encoding); + } + return this.#pubbuf; } } diff --git a/ext/node/polyfills/internal/crypto/util.ts b/ext/node/polyfills/internal/crypto/util.ts index ccb7726316..2e269b7fad 100644 --- a/ext/node/polyfills/internal/crypto/util.ts +++ b/ext/node/polyfills/internal/crypto/util.ts @@ -46,6 +46,47 @@ const digestAlgorithms = [ "sha1", ]; +export type EllipticCurve = { + name: string; + ephemeral: boolean; + privateKeySize: number; + publicKeySize: number; + sharedSecretSize: number; +}; + +export const ellipticCurves: Array = [ + { + name: "secp256k1", + privateKeySize: 32, + publicKeySize: 65, + sharedSecretSize: 32, + }, // Weierstrass-class EC used by Bitcoin + { + name: "prime256v1", + privateKeySize: 32, + publicKeySize: 65, + sharedSecretSize: 32, + }, // NIST P-256 EC + { + name: "secp256r1", + privateKeySize: 32, + publicKeySize: 65, + sharedSecretSize: 32, + }, // NIST P-256 EC (same as above) + { + name: "secp384r1", + privateKeySize: 48, + publicKeySize: 97, + sharedSecretSize: 48, + }, // NIST P-384 EC + { + name: "secp224r1", + privateKeySize: 28, + publicKeySize: 57, + sharedSecretSize: 28, + }, // NIST P-224 EC +]; + // deno-fmt-ignore const supportedCiphers = [ "aes-128-ecb", "aes-192-ecb", @@ -114,8 +155,9 @@ export function getHashes(): readonly string[] { return digestAlgorithms; } +const curveNames = ellipticCurves.map((x) => x.name); export function getCurves(): readonly string[] { - notImplemented("crypto.getCurves"); + return curveNames; } export interface SecureHeapUsage { From 6cd62ea5e969de258b1d308daf5bec91e73e79d3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Thu, 27 Apr 2023 20:50:46 +0200 Subject: [PATCH 070/320] chore: upgrade rusty_v8 to 0.71.0 (#18868) --- .github/workflows/ci.generate.ts | 4 +- .github/workflows/ci.yml | 6 +- Cargo.lock | 4 +- Cargo.toml | 2 +- ...event_listener_error_immediate_exit.ts.out | 4 + ...istener_error_immediate_exit_worker.ts.out | 4 + core/runtime.rs | 25 +--- serde_v8/error.rs | 3 + serde_v8/magic/v8slice.rs | 13 +- serde_v8/tests/de.rs | 10 ++ tools/wpt/expectation.json | 113 +----------------- 11 files changed, 48 insertions(+), 140 deletions(-) diff --git a/.github/workflows/ci.generate.ts b/.github/workflows/ci.generate.ts index 41abf17370..ea9f93bc1c 100755 --- a/.github/workflows/ci.generate.ts +++ b/.github/workflows/ci.generate.ts @@ -17,7 +17,7 @@ const Runners = (() => { })(); // bump the number at the start when you want to purge the cache const prCacheKeyPrefix = - "22-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-"; + "23-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-"; const installPkgsCommand = "sudo apt-get install --no-install-recommends debootstrap clang-15 lld-15"; @@ -476,7 +476,7 @@ const ci = { "~/.cargo/git/db", ].join("\n"), key: - "22-cargo-home-${{ matrix.os }}-${{ hashFiles('Cargo.lock') }}", + "23-cargo-home-${{ matrix.os }}-${{ hashFiles('Cargo.lock') }}", }, }, { diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c70590d61b..205e5c069f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -290,7 +290,7 @@ jobs: ~/.cargo/registry/index ~/.cargo/registry/cache ~/.cargo/git/db - key: '22-cargo-home-${{ matrix.os }}-${{ hashFiles(''Cargo.lock'') }}' + key: '23-cargo-home-${{ matrix.os }}-${{ hashFiles(''Cargo.lock'') }}' if: '!(github.event_name == ''pull_request'' && matrix.skip_pr)' - name: Restore cache build output (PR) uses: actions/cache/restore@v3 @@ -302,7 +302,7 @@ jobs: !./target/*/*.zip !./target/*/*.tar.gz key: never_saved - restore-keys: '22-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-' + restore-keys: '23-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-' - name: Apply and update mtime cache if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (!startsWith(github.ref, ''refs/tags/''))' uses: ./.github/mtime_cache @@ -578,7 +578,7 @@ jobs: !./target/*/gn_out !./target/*/*.zip !./target/*/*.tar.gz - key: '22-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' + key: '23-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' publish-canary: name: publish canary runs-on: ubuntu-22.04 diff --git a/Cargo.lock b/Cargo.lock index ddd92ea833..483d57340b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5678,9 +5678,9 @@ dependencies = [ [[package]] name = "v8" -version = "0.70.0" +version = "0.71.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab13e022340b67561836bbb90ceeebbfca7e35fbc05471ceff5ce099e5a754a3" +checksum = "51a173a437bebab13d587a4aaf0a1e7a49433226538c9a78ca3b4ce3b8c6aeb6" dependencies = [ "bitflags 1.3.2", "fslock", diff --git a/Cargo.toml b/Cargo.toml index 5664a69ed2..4a7d051f09 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -41,7 +41,7 @@ license = "MIT" repository = "https://github.com/denoland/deno" [workspace.dependencies] -v8 = { version = "0.70.0", default-features = false } +v8 = { version = "0.71.0", default-features = false } deno_ast = { version = "0.26.0", features = ["transpiling"] } deno_core = { version = "0.181.0", path = "./core" } diff --git a/cli/tests/testdata/run/event_listener_error_immediate_exit.ts.out b/cli/tests/testdata/run/event_listener_error_immediate_exit.ts.out index 8f03f71b81..2b1b264449 100644 --- a/cli/tests/testdata/run/event_listener_error_immediate_exit.ts.out +++ b/cli/tests/testdata/run/event_listener_error_immediate_exit.ts.out @@ -3,4 +3,8 @@ error: Uncaught Error: bar throw new Error("bar"); ^ at [WILDCARD]/event_listener_error_immediate_exit.ts:4:9[WILDCARD] + at innerInvokeEventListeners (ext:deno_web/02_event.js:785:7) + at invokeEventListeners (ext:deno_web/02_event.js:825:5) + at dispatch (ext:deno_web/02_event.js:694:9) + at dispatchEvent (ext:deno_web/02_event.js:1086:12) at [WILDCARD]/event_listener_error_immediate_exit.ts:11:1 diff --git a/cli/tests/testdata/run/event_listener_error_immediate_exit_worker.ts.out b/cli/tests/testdata/run/event_listener_error_immediate_exit_worker.ts.out index 8bd3122980..8ab76d6cf2 100644 --- a/cli/tests/testdata/run/event_listener_error_immediate_exit_worker.ts.out +++ b/cli/tests/testdata/run/event_listener_error_immediate_exit_worker.ts.out @@ -3,6 +3,10 @@ error: Uncaught (in worker "") Error: bar throw new Error("bar"); ^ at [WILDCARD]/event_listener_error_immediate_exit.ts:4:9 + at innerInvokeEventListeners (ext:deno_web/02_event.js:785:7) + at invokeEventListeners (ext:deno_web/02_event.js:825:5) + at dispatch (ext:deno_web/02_event.js:694:9) + at dispatchEvent (ext:deno_web/02_event.js:1086:12) at [WILDCARD]/event_listener_error_immediate_exit.ts:11:1 error: Uncaught (in promise) Error: Unhandled error in child worker. at [WILDCARD] diff --git a/core/runtime.rs b/core/runtime.rs index d8355ae6d6..be777bdf7e 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -196,7 +196,6 @@ fn v8_init( " --no-validate-asm", " --turbo_fast_api_calls", " --harmony-change-array-by-copy", - " --no-harmony-rab-gsab", ); if predictable { @@ -1527,6 +1526,10 @@ pub(crate) fn exception_to_err_result( let state_rc = JsRuntime::state(scope); let was_terminating_execution = scope.is_execution_terminating(); + // Disable running microtasks for a moment. When upgrading to V8 v11.4 + // we discovered that canceling termination here will cause the queued + // microtasks to run which breaks some tests. + scope.set_microtasks_policy(v8::MicrotasksPolicy::Explicit); // If TerminateExecution was called, cancel isolate termination so that the // exception can be created. Note that `scope.is_execution_terminating()` may // have returned false if TerminateExecution was indeed called but there was @@ -1560,6 +1563,7 @@ pub(crate) fn exception_to_err_result( // Resume exception termination. scope.terminate_execution(); } + scope.set_microtasks_policy(v8::MicrotasksPolicy::Auto); Err(js_error.into()) } @@ -4737,25 +4741,6 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { .is_ok()); } - #[test] - fn test_resizable_array_buffer() { - // Verify that "resizable ArrayBuffer" is disabled - let mut runtime = JsRuntime::new(Default::default()); - runtime - .execute_script_static( - "test_rab.js", - r#"const a = new ArrayBuffer(100, {maxByteLength: 200}); - if (a.byteLength !== 100) { - throw new Error('wrong byte length'); - } - if (a.maxByteLength !== undefined) { - throw new Error("ArrayBuffer shouldn't have maxByteLength"); - } - "#, - ) - .unwrap(); - } - #[test] fn test_non_existent_async_op_error() { // Verify that "resizable ArrayBuffer" is disabled diff --git a/serde_v8/error.rs b/serde_v8/error.rs index aa2d92bf8e..16d7882b70 100644 --- a/serde_v8/error.rs +++ b/serde_v8/error.rs @@ -55,6 +55,9 @@ pub enum Error { #[error("serde_v8 error: length mismatch, got: {0}, expected: {1}")] LengthMismatch(usize, usize), + + #[error("serde_v8 error: can't create slice from resizable ArrayBuffer")] + ResizableBackingStoreNotSupported, } impl serde::ser::Error for Error { diff --git a/serde_v8/magic/v8slice.rs b/serde_v8/magic/v8slice.rs index 073e752355..b1dd897703 100644 --- a/serde_v8/magic/v8slice.rs +++ b/serde_v8/magic/v8slice.rs @@ -91,9 +91,16 @@ impl FromV8 for V8Slice { scope: &mut v8::HandleScope, value: v8::Local, ) -> Result { - to_ranged_buffer(scope, value) - .and_then(|(b, r)| Self::from_buffer(b, r)) - .map_err(|_| crate::Error::ExpectedBuffer(value_to_type_str(value))) + match to_ranged_buffer(scope, value) { + Ok((b, r)) => { + if b.get_backing_store().is_resizable_by_user_javascript() { + return Err(crate::Error::ResizableBackingStoreNotSupported); + } + Self::from_buffer(b, r) + .map_err(|_| crate::Error::ExpectedBuffer(value_to_type_str(value))) + } + Err(_) => Err(crate::Error::ExpectedBuffer(value_to_type_str(value))), + } } } diff --git a/serde_v8/tests/de.rs b/serde_v8/tests/de.rs index 4e5e1e4b99..4c5cf72836 100644 --- a/serde_v8/tests/de.rs +++ b/serde_v8/tests/de.rs @@ -265,6 +265,16 @@ fn de_buffers() { assert_eq!(&*buf, &[0x68, 0x65, 0x6C, 0x6C, 0x6F]); }, ); + + dedo("(new ArrayBuffer(4))", |scope, v| { + let buf: ZeroCopyBuf = serde_v8::from_v8(scope, v).unwrap(); + assert_eq!(&*buf, &[0x0, 0x0, 0x0, 0x0]); + }); + + dedo("(new ArrayBuffer(8, { maxByteLength: 16}))", |scope, v| { + let result: Result = serde_v8::from_v8(scope, v); + matches!(result, Err(Error::ResizableBackingStoreNotSupported)); + }); } // Structs diff --git a/tools/wpt/expectation.json b/tools/wpt/expectation.json index 8f45901836..7cff6b6eb4 100644 --- a/tools/wpt/expectation.json +++ b/tools/wpt/expectation.json @@ -5480,18 +5480,9 @@ "Serializing a non-serializable platform object fails", "An object whose interface is deleted from the global must still deserialize", "A subclass instance will deserialize as its closest serializable superclass", - "Resizable ArrayBuffer", "Growable SharedArrayBuffer", - "Length-tracking TypedArray", - "Length-tracking DataView", - "Serializing OOB TypedArray throws", - "Serializing OOB DataView throws", "A subclass instance will be received as its closest transferable superclass", - "Resizable ArrayBuffer is transferable", - "Length-tracking TypedArray is transferable", - "Length-tracking DataView is transferable", - "Transferring OOB TypedArray throws", - "Transferring OOB DataView throws" + "Transferring OOB TypedArray throws" ], "structured-clone.any.worker.html": [ "Blob basic", @@ -5517,18 +5508,9 @@ "Serializing a non-serializable platform object fails", "An object whose interface is deleted from the global must still deserialize", "A subclass instance will deserialize as its closest serializable superclass", - "Resizable ArrayBuffer", "Growable SharedArrayBuffer", - "Length-tracking TypedArray", - "Length-tracking DataView", - "Serializing OOB TypedArray throws", - "Serializing OOB DataView throws", "A subclass instance will be received as its closest transferable superclass", - "Resizable ArrayBuffer is transferable", - "Length-tracking TypedArray is transferable", - "Length-tracking DataView is transferable", - "Transferring OOB TypedArray throws", - "Transferring OOB DataView throws" + "Transferring OOB TypedArray throws" ] }, "dynamic-markup-insertion": { @@ -5659,95 +5641,8 @@ }, "infrastructure": { "safe-passing-of-structured-data": { - "messagechannel.any.html": [ - "Blob basic", - "Blob unpaired high surrogate (invalid utf-8)", - "Blob unpaired low surrogate (invalid utf-8)", - "Blob paired surrogates (invalid utf-8)", - "Blob empty", - "Blob NUL", - "Array Blob object, Blob basic", - "Array Blob object, Blob unpaired high surrogate (invalid utf-8)", - "Array Blob object, Blob unpaired low surrogate (invalid utf-8)", - "Array Blob object, Blob paired surrogates (invalid utf-8)", - "Array Blob object, Blob empty", - "Array Blob object, Blob NUL", - "Array Blob object, two Blobs", - "Object Blob object, Blob basic", - "Object Blob object, Blob unpaired high surrogate (invalid utf-8)", - "Object Blob object, Blob unpaired low surrogate (invalid utf-8)", - "Object Blob object, Blob paired surrogates (invalid utf-8)", - "Object Blob object, Blob empty", - "Object Blob object, Blob NUL", - "File basic", - "FileList empty", - "Array FileList object, FileList empty", - "Object FileList object, FileList empty", - "ImageData 1x1 transparent black", - "ImageData 1x1 non-transparent non-black", - "Array ImageData object, ImageData 1x1 transparent black", - "Array ImageData object, ImageData 1x1 non-transparent non-black", - "Object ImageData object, ImageData 1x1 transparent black", - "Object ImageData object, ImageData 1x1 non-transparent non-black", - "ImageBitmap 1x1 transparent black", - "ImageBitmap 1x1 non-transparent non-black", - "Array ImageBitmap object, ImageBitmap 1x1 transparent black", - "Array ImageBitmap object, ImageBitmap 1x1 transparent non-black", - "Object ImageBitmap object, ImageBitmap 1x1 transparent black", - "Object ImageBitmap object, ImageBitmap 1x1 transparent non-black", - "Serializing a non-serializable platform object fails", - "An object whose interface is deleted from the global must still deserialize", - "A subclass instance will deserialize as its closest serializable superclass", - "Resizable ArrayBuffer", - "Growable SharedArrayBuffer", - "Length-tracking TypedArray", - "Length-tracking DataView", - "Serializing OOB TypedArray throws", - "Serializing OOB DataView throws", - "A subclass instance will be received as its closest transferable superclass", - "Resizable ArrayBuffer is transferable", - "Length-tracking TypedArray is transferable", - "Length-tracking DataView is transferable", - "Transferring OOB TypedArray throws", - "Transferring OOB DataView throws" - ], - "messagechannel.any.worker.html": [ - "Blob basic", - "Blob unpaired high surrogate (invalid utf-8)", - "Blob unpaired low surrogate (invalid utf-8)", - "Blob paired surrogates (invalid utf-8)", - "Blob empty", - "Blob NUL", - "Array Blob object, Blob basic", - "Array Blob object, Blob unpaired high surrogate (invalid utf-8)", - "Array Blob object, Blob unpaired low surrogate (invalid utf-8)", - "Array Blob object, Blob paired surrogates (invalid utf-8)", - "Array Blob object, Blob empty", - "Array Blob object, Blob NUL", - "Array Blob object, two Blobs", - "Object Blob object, Blob basic", - "Object Blob object, Blob unpaired high surrogate (invalid utf-8)", - "Object Blob object, Blob unpaired low surrogate (invalid utf-8)", - "Object Blob object, Blob paired surrogates (invalid utf-8)", - "Object Blob object, Blob empty", - "Object Blob object, Blob NUL", - "File basic", - "Serializing a non-serializable platform object fails", - "An object whose interface is deleted from the global must still deserialize", - "A subclass instance will deserialize as its closest serializable superclass", - "Resizable ArrayBuffer", - "Growable SharedArrayBuffer", - "Length-tracking TypedArray", - "Length-tracking DataView", - "Serializing OOB TypedArray throws", - "Serializing OOB DataView throws", - "A subclass instance will be received as its closest transferable superclass", - "Resizable ArrayBuffer is transferable", - "Length-tracking TypedArray is transferable", - "Length-tracking DataView is transferable", - "Transferring OOB TypedArray throws", - "Transferring OOB DataView throws" - ], + "messagechannel.any.html": false, + "messagechannel.any.worker.html": false, "shared-array-buffers": { "no-coop-coep.https.any.html": false, "no-coop-coep.https.any.worker.html": false, From 504482dadd4d8cd9e4105d56ed86802906767f39 Mon Sep 17 00:00:00 2001 From: Nayeem Rahman Date: Thu, 27 Apr 2023 22:36:49 +0100 Subject: [PATCH 071/320] fix(repl): print unhandled rejections and event errors (#18878) Fixes #8858. Fixes #8869. ``` $ target/debug/deno Deno 1.32.5 exit using ctrl+d, ctrl+c, or close() REPL is running with all permissions allowed. To specify permissions, run `deno repl` with allow flags. > Promise.reject(new Error("bar")); Promise { Error: bar at :2:16 } Uncaught (in promise) Error: bar at :2:16 > reportError(new Error("baz")); undefined Uncaught Error: baz at :2:13 > --- cli/tests/integration/repl_tests.rs | 30 ++++++++++++++++++++++++++++- cli/tools/repl/mod.rs | 19 +++++++++++++++++- cli/tools/repl/session.rs | 9 ++++----- core/inspector.rs | 29 ++++++++++++++++++++++++++++ core/ops_builtin_v8.rs | 3 +-- core/realm.rs | 10 ++++++++++ 6 files changed, 91 insertions(+), 9 deletions(-) diff --git a/cli/tests/integration/repl_tests.rs b/cli/tests/integration/repl_tests.rs index a473dc2006..d9966fe8ff 100644 --- a/cli/tests/integration/repl_tests.rs +++ b/cli/tests/integration/repl_tests.rs @@ -783,14 +783,42 @@ fn pty_tab_handler() { }); } +#[test] +fn repl_error() { + util::with_pty(&["repl"], |mut console| { + console.write_line("console.log(1);"); + console.expect_all(&["1", "undefined"]); + console.write_line(r#"throw new Error("foo");"#); + console.expect("Uncaught Error: foo"); + console.expect(" at "); + console.write_line("console.log(2);"); + console.expect("2"); + }); +} + +#[test] +fn repl_reject() { + util::with_pty(&["repl"], |mut console| { + console.write_line("console.log(1);"); + console.expect_all(&["1", "undefined"]); + console.write_line(r#"Promise.reject(new Error("foo"));"#); + console.expect("Promise { Error: foo"); + console.expect("Uncaught (in promise) Error: foo"); + console.expect(" at "); + console.write_line("console.log(2);"); + console.expect("2"); + }); +} + #[test] fn repl_report_error() { util::with_pty(&["repl"], |mut console| { console.write_line("console.log(1);"); console.expect_all(&["1", "undefined"]); - // TODO(nayeemrmn): The REPL should report event errors and rejections. console.write_line(r#"reportError(new Error("foo"));"#); console.expect("undefined"); + console.expect("Uncaught Error: foo"); + console.expect(" at "); console.write_line("console.log(2);"); console.expect("2"); }); diff --git a/cli/tools/repl/mod.rs b/cli/tools/repl/mod.rs index bfba627525..0a6d9b9e9d 100644 --- a/cli/tools/repl/mod.rs +++ b/cli/tools/repl/mod.rs @@ -7,6 +7,7 @@ use crate::colors; use crate::file_fetcher::FileFetcher; use crate::proc_state::ProcState; use deno_core::error::AnyError; +use deno_core::futures::StreamExt; use deno_runtime::permissions::Permissions; use deno_runtime::permissions::PermissionsContainer; use rustyline::error::ReadlineError; @@ -30,8 +31,11 @@ async fn read_line_and_poll( message_handler: &mut RustylineSyncMessageHandler, editor: ReplEditor, ) -> Result { + #![allow(clippy::await_holding_refcell_ref)] let mut line_fut = tokio::task::spawn_blocking(move || editor.readline()); let mut poll_worker = true; + let notifications_rc = repl_session.notifications.clone(); + let mut notifications = notifications_rc.borrow_mut(); loop { tokio::select! { @@ -57,7 +61,20 @@ async fn read_line_and_poll( } poll_worker = true; - }, + } + message = notifications.next() => { + if let Some(message) = message { + let method = message.get("method").unwrap().as_str().unwrap(); + if method == "Runtime.exceptionThrown" { + let params = message.get("params").unwrap().as_object().unwrap(); + let exception_details = params.get("exceptionDetails").unwrap().as_object().unwrap(); + let text = exception_details.get("text").unwrap().as_str().unwrap(); + let exception = exception_details.get("exception").unwrap().as_object().unwrap(); + let description = exception.get("description").unwrap().as_str().unwrap(); + println!("{text} {description}"); + } + } + } _ = repl_session.run_event_loop(), if poll_worker => { poll_worker = false; } diff --git a/cli/tools/repl/session.rs b/cli/tools/repl/session.rs index b2645097c4..6f8db6fcd8 100644 --- a/cli/tools/repl/session.rs +++ b/cli/tools/repl/session.rs @@ -1,5 +1,7 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use std::cell::RefCell; +use std::rc::Rc; use std::sync::Arc; use crate::args::CliOptions; @@ -128,12 +130,9 @@ pub struct ReplSession { session: LocalInspectorSession, pub context_id: u64, pub language_server: ReplLanguageServer, + pub notifications: Rc>>, has_initialized_node_runtime: bool, referrer: ModuleSpecifier, - // FIXME(bartlomieju): this field should be used to listen - // for "exceptionThrown" notifications - #[allow(dead_code)] - notification_rx: UnboundedReceiver, } impl ReplSession { @@ -193,7 +192,7 @@ impl ReplSession { language_server, has_initialized_node_runtime: false, referrer, - notification_rx, + notifications: Rc::new(RefCell::new(notification_rx)), }; // inject prelude diff --git a/core/inspector.rs b/core/inspector.rs index b0a55cf12b..22d1501544 100644 --- a/core/inspector.rs +++ b/core/inspector.rs @@ -231,6 +231,35 @@ impl JsRuntimeInspector { .context_destroyed(context); } + pub fn exception_thrown( + &self, + scope: &mut HandleScope, + exception: v8::Local<'_, v8::Value>, + in_promise: bool, + ) { + let context = scope.get_current_context(); + let message = v8::Exception::create_message(scope, exception); + let stack_trace = message.get_stack_trace(scope).unwrap(); + let mut v8_inspector_ref = self.v8_inspector.borrow_mut(); + let v8_inspector = v8_inspector_ref.as_mut().unwrap(); + let stack_trace = v8_inspector.create_stack_trace(stack_trace); + v8_inspector.exception_thrown( + context, + if in_promise { + v8::inspector::StringView::from("Uncaught (in promise)".as_bytes()) + } else { + v8::inspector::StringView::from("Uncaught".as_bytes()) + }, + exception, + v8::inspector::StringView::from("".as_bytes()), + v8::inspector::StringView::from("".as_bytes()), + 0, + 0, + stack_trace, + 0, + ); + } + pub fn has_active_sessions(&self) -> bool { self.sessions.borrow().has_active_sessions() } diff --git a/core/ops_builtin_v8.rs b/core/ops_builtin_v8.rs index f4133f3b8e..67cf1222f1 100644 --- a/core/ops_builtin_v8.rs +++ b/core/ops_builtin_v8.rs @@ -715,8 +715,7 @@ fn op_dispatch_exception( let mut state = state_rc.borrow_mut(); if let Some(inspector) = &state.inspector { let inspector = inspector.borrow(); - // TODO(nayeemrmn): Send exception message to inspector sessions here. - + inspector.exception_thrown(scope, exception.v8_value, false); // This indicates that the op is being called from a REPL. Skip termination. if inspector.is_dispatching_message() { return; diff --git a/core/realm.rs b/core/realm.rs index 08a550294d..f907553f08 100644 --- a/core/realm.rs +++ b/core/realm.rs @@ -4,6 +4,7 @@ use crate::bindings; use crate::modules::ModuleCode; use crate::ops::OpCtx; use crate::runtime::exception_to_err_result; +use crate::JsRuntime; use anyhow::Error; use std::cell::RefCell; use std::collections::HashMap; @@ -288,6 +289,15 @@ impl<'s> JsRealmLocal<'s> { drop(context_state); let exception = v8::Local::new(scope, handle); + let state_rc = JsRuntime::state(scope); + let state = state_rc.borrow(); + if let Some(inspector) = &state.inspector { + let inspector = inspector.borrow(); + inspector.exception_thrown(scope, exception, true); + if inspector.has_blocking_sessions() { + return Ok(()); + } + } exception_to_err_result(scope, exception, true) } } From 683dbd7f3bdd91c33005b816ad26b82a4343931a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Fri, 28 Apr 2023 00:37:03 +0200 Subject: [PATCH 072/320] Revert "refactor: don't expose Deno[Deno.internal].core namespace" (#18881) Also conditionally disabled one test if there's not enough space on device. --- cli/tests/integration/bench_tests.rs | 2 +- cli/tests/integration/inspector_tests.rs | 1 - cli/tests/integration/js_unit_tests.rs | 1 - cli/tests/integration/npm_tests.rs | 2 +- cli/tests/integration/run_tests.rs | 95 ++++++++++++------------ cli/tests/integration/test_tests.rs | 4 +- cli/tests/unit/read_text_file_test.ts | 16 +++- cli/tools/test.rs | 2 +- core/01_core.js | 8 +- core/runtime.rs | 20 ----- runtime/js/99_main.js | 19 ++--- test_napi/cleanup_hook_test.js | 1 - test_napi/tests/napi_tests.rs | 1 - 13 files changed, 75 insertions(+), 97 deletions(-) diff --git a/cli/tests/integration/bench_tests.rs b/cli/tests/integration/bench_tests.rs index 2a12be9636..16ac5852ec 100644 --- a/cli/tests/integration/bench_tests.rs +++ b/cli/tests/integration/bench_tests.rs @@ -198,7 +198,7 @@ fn recursive_permissions_pledge() { let context = TestContext::default(); let output = context .new_command() - .args("bench --enable-testing-features-do-not-use bench/recursive_permissions_pledge.js") + .args("bench bench/recursive_permissions_pledge.js") .run(); output.assert_exit_code(1); assert_contains!( diff --git a/cli/tests/integration/inspector_tests.rs b/cli/tests/integration/inspector_tests.rs index 29d13cd462..cf66c4adc1 100644 --- a/cli/tests/integration/inspector_tests.rs +++ b/cli/tests/integration/inspector_tests.rs @@ -307,7 +307,6 @@ async fn inspector_break_on_first_line() { let child = util::deno_cmd() .arg("run") .arg(inspect_flag_with_unique_port("--inspect-brk")) - .arg("--enable-testing-features-do-not-use") .arg(script) .stdout(std::process::Stdio::piped()) .stderr(std::process::Stdio::piped()) diff --git a/cli/tests/integration/js_unit_tests.rs b/cli/tests/integration/js_unit_tests.rs index 0e94390e86..793f66b1e1 100644 --- a/cli/tests/integration/js_unit_tests.rs +++ b/cli/tests/integration/js_unit_tests.rs @@ -28,7 +28,6 @@ fn js_unit_tests() { .arg("--unstable") .arg("--location=http://js-unit-tests/foo/bar") .arg("--no-prompt") - .arg("--enable-testing-features-do-not-use") .arg("-A") .arg(util::tests_path().join("unit")) .spawn() diff --git a/cli/tests/integration/npm_tests.rs b/cli/tests/integration/npm_tests.rs index 29f9054ba7..fad79e371f 100644 --- a/cli/tests/integration/npm_tests.rs +++ b/cli/tests/integration/npm_tests.rs @@ -118,7 +118,7 @@ itest!(dual_cjs_esm { }); itest!(child_process_fork_test { - args: "run -A --quiet --enable-testing-features-do-not-use npm/child_process_fork_test/main.ts", + args: "run -A --quiet npm/child_process_fork_test/main.ts", output: "npm/child_process_fork_test/main.out", envs: env_vars_for_npm_tests(), http_server: true, diff --git a/cli/tests/integration/run_tests.rs b/cli/tests/integration/run_tests.rs index d946b6a1c5..1ad8efb260 100644 --- a/cli/tests/integration/run_tests.rs +++ b/cli/tests/integration/run_tests.rs @@ -213,7 +213,7 @@ itest!(_038_checkjs { }); itest!(_042_dyn_import_evalcontext { - args: "run --quiet --allow-read --reload --enable-testing-features-do-not-use run/042_dyn_import_evalcontext.ts", + args: "run --quiet --allow-read --reload run/042_dyn_import_evalcontext.ts", output: "run/042_dyn_import_evalcontext.ts.out", }); @@ -1161,25 +1161,25 @@ itest!(exit_error42 { }); itest!(set_exit_code_0 { - args: "run --no-check --unstable --enable-testing-features-do-not-use run/set_exit_code_0.ts", + args: "run --no-check --unstable run/set_exit_code_0.ts", output_str: Some(""), exit_code: 0, }); itest!(set_exit_code_1 { - args: "run --no-check --unstable --enable-testing-features-do-not-use run/set_exit_code_1.ts", + args: "run --no-check --unstable run/set_exit_code_1.ts", output_str: Some(""), exit_code: 42, }); itest!(set_exit_code_2 { - args: "run --no-check --unstable --enable-testing-features-do-not-use run/set_exit_code_2.ts", + args: "run --no-check --unstable run/set_exit_code_2.ts", output_str: Some(""), exit_code: 42, }); itest!(op_exit_op_set_exit_code_in_worker { - args: "run --no-check --unstable --allow-read --enable-testing-features-do-not-use run/op_exit_op_set_exit_code_in_worker.ts", + args: "run --no-check --unstable --allow-read run/op_exit_op_set_exit_code_in_worker.ts", exit_code: 21, output_str: Some(""), }); @@ -1197,7 +1197,7 @@ itest!(heapstats { itest!(finalization_registry { args: - "run --quiet --unstable --enable-testing-features-do-not-use --v8-flags=--expose-gc run/finalization_registry.js", + "run --quiet --unstable --v8-flags=--expose-gc run/finalization_registry.js", output: "run/finalization_registry.js.out", }); @@ -2760,7 +2760,7 @@ itest!(long_data_url_formatting { }); itest!(eval_context_throw_dom_exception { - args: "run --enable-testing-features-do-not-use run/eval_context_throw_dom_exception.js", + args: "run run/eval_context_throw_dom_exception.js", output: "run/eval_context_throw_dom_exception.js.out", }); @@ -3122,115 +3122,115 @@ itest!(fetch_async_error_stack { }); itest!(unstable_ffi_1 { - args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_1.js", + args: "run run/ffi/unstable_ffi_1.js", output: "run/ffi/unstable_ffi_1.js.out", exit_code: 70, }); itest!(unstable_ffi_2 { - args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_2.js", + args: "run run/ffi/unstable_ffi_2.js", output: "run/ffi/unstable_ffi_2.js.out", exit_code: 70, }); itest!(unstable_ffi_3 { - args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_3.js", + args: "run run/ffi/unstable_ffi_3.js", output: "run/ffi/unstable_ffi_3.js.out", exit_code: 70, }); itest!(unstable_ffi_4 { - args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_4.js", + args: "run run/ffi/unstable_ffi_4.js", output: "run/ffi/unstable_ffi_4.js.out", exit_code: 70, }); itest!(unstable_ffi_5 { - args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_5.js", + args: "run run/ffi/unstable_ffi_5.js", output: "run/ffi/unstable_ffi_5.js.out", exit_code: 70, }); itest!(unstable_ffi_6 { - args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_6.js", + args: "run run/ffi/unstable_ffi_6.js", output: "run/ffi/unstable_ffi_6.js.out", exit_code: 70, }); itest!(unstable_ffi_7 { - args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_7.js", + args: "run run/ffi/unstable_ffi_7.js", output: "run/ffi/unstable_ffi_7.js.out", exit_code: 70, }); itest!(unstable_ffi_8 { - args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_8.js", + args: "run run/ffi/unstable_ffi_8.js", output: "run/ffi/unstable_ffi_8.js.out", exit_code: 70, }); itest!(unstable_ffi_9 { - args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_9.js", + args: "run run/ffi/unstable_ffi_9.js", output: "run/ffi/unstable_ffi_9.js.out", exit_code: 70, }); itest!(unstable_ffi_10 { - args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_10.js", + args: "run run/ffi/unstable_ffi_10.js", output: "run/ffi/unstable_ffi_10.js.out", exit_code: 70, }); itest!(unstable_ffi_11 { - args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_11.js", + args: "run run/ffi/unstable_ffi_11.js", output: "run/ffi/unstable_ffi_11.js.out", exit_code: 70, }); itest!(unstable_ffi_12 { - args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_12.js", + args: "run run/ffi/unstable_ffi_12.js", output: "run/ffi/unstable_ffi_12.js.out", exit_code: 70, }); itest!(unstable_ffi_13 { - args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_13.js", + args: "run run/ffi/unstable_ffi_13.js", output: "run/ffi/unstable_ffi_13.js.out", exit_code: 70, }); itest!(unstable_ffi_14 { - args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_14.js", + args: "run run/ffi/unstable_ffi_14.js", output: "run/ffi/unstable_ffi_14.js.out", exit_code: 70, }); itest!(unstable_ffi_15 { - args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_15.js", + args: "run run/ffi/unstable_ffi_15.js", output: "run/ffi/unstable_ffi_15.js.out", exit_code: 70, }); itest!(unstable_ffi_16 { - args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_16.js", + args: "run run/ffi/unstable_ffi_16.js", output: "run/ffi/unstable_ffi_16.js.out", exit_code: 70, }); itest!(unstable_ffi_17 { - args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_17.js", + args: "run run/ffi/unstable_ffi_17.js", output: "run/ffi/unstable_ffi_17.js.out", exit_code: 70, }); itest!(unstable_ffi_18 { - args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_18.js", + args: "run run/ffi/unstable_ffi_18.js", output: "run/ffi/unstable_ffi_18.js.out", exit_code: 70, }); itest!(unstable_ffi_19 { - args: "run --enable-testing-features-do-not-use run/ffi/unstable_ffi_19.js", + args: "run run/ffi/unstable_ffi_19.js", output: "run/ffi/unstable_ffi_19.js.out", exit_code: 70, }); @@ -3253,7 +3253,7 @@ itest!(event_listener_error_handled { // https://github.com/denoland/deno/pull/14159#issuecomment-1092285446 itest!(event_listener_error_immediate_exit { - args: "run --quiet --enable-testing-features-do-not-use run/event_listener_error_immediate_exit.ts", + args: "run --quiet run/event_listener_error_immediate_exit.ts", output: "run/event_listener_error_immediate_exit.ts.out", exit_code: 1, }); @@ -3261,7 +3261,7 @@ itest!(event_listener_error_immediate_exit { // https://github.com/denoland/deno/pull/14159#issuecomment-1092285446 itest!(event_listener_error_immediate_exit_worker { args: - "run --quiet --unstable -A --enable-testing-features-do-not-use run/event_listener_error_immediate_exit_worker.ts", + "run --quiet --unstable -A run/event_listener_error_immediate_exit_worker.ts", output: "run/event_listener_error_immediate_exit_worker.ts.out", exit_code: 1, }); @@ -4368,24 +4368,22 @@ fn permission_prompt_strips_ansi_codes_and_control_chars() { ) }); - util::with_pty( - &["repl", "--enable-testing-features-do-not-use"], - |mut console| { - console.write_line_raw(r#"const boldANSI = "\u001b[1m";"#); - console.expect("undefined"); - console.write_line_raw(r#"const unboldANSI = "\u001b[22m";"#); - console.expect("undefined"); - console.write_line_raw(r#"const prompt = `┌ ⚠️ ${boldANSI}Deno requests run access to "echo"${unboldANSI}\n ├ Requested by \`Deno.Command().output()`"#); - console.expect("undefined"); - console.write_line_raw(r#"const moveANSIUp = "\u001b[1A";"#); - console.expect("undefined"); - console.write_line_raw(r#"const clearANSI = "\u001b[2K";"#); - console.expect("undefined"); - console.write_line_raw(r#"const moveANSIStart = "\u001b[1000D";"#); - console.expect("undefined"); + util::with_pty(&["repl"], |mut console| { + console.write_line_raw(r#"const boldANSI = "\u001b[1m";"#); + console.expect("undefined"); + console.write_line_raw(r#"const unboldANSI = "\u001b[22m";"#); + console.expect("undefined"); + console.write_line_raw(r#"const prompt = `┌ ⚠️ ${boldANSI}Deno requests run access to "echo"${unboldANSI}\n ├ Requested by \`Deno.Command().output()`"#); + console.expect("undefined"); + console.write_line_raw(r#"const moveANSIUp = "\u001b[1A";"#); + console.expect("undefined"); + console.write_line_raw(r#"const clearANSI = "\u001b[2K";"#); + console.expect("undefined"); + console.write_line_raw(r#"const moveANSIStart = "\u001b[1000D";"#); + console.expect("undefined"); - console.write_line_raw( - r#"Deno[Deno.internal].core.ops.op_spawn_child({ + console.write_line_raw( + r#"Deno[Deno.internal].core.ops.op_spawn_child({ cmd: "cat", args: ["file.txt"], clearEnv: false, @@ -4399,11 +4397,10 @@ fn permission_prompt_strips_ansi_codes_and_control_chars() { signal: undefined, windowsRawArguments: false, }, moveANSIUp + clearANSI + moveANSIStart + prompt)"#, - ); + ); - console.expect(r#"┌ ⚠️ Deno requests run access to "cat""#); - }, - ); + console.expect(r#"┌ ⚠️ Deno requests run access to "cat""#); + }); } itest!(node_builtin_modules_ts { diff --git a/cli/tests/integration/test_tests.rs b/cli/tests/integration/test_tests.rs index 223c02e244..0dea3b8440 100644 --- a/cli/tests/integration/test_tests.rs +++ b/cli/tests/integration/test_tests.rs @@ -222,7 +222,7 @@ itest!(ops_sanitizer_timeout_failure { }); itest!(ops_sanitizer_multiple_timeout_tests { - args: "test --trace-ops --enable-testing-features-do-not-use test/ops_sanitizer_multiple_timeout_tests.ts", + args: "test --trace-ops test/ops_sanitizer_multiple_timeout_tests.ts", exit_code: 1, output: "test/ops_sanitizer_multiple_timeout_tests.out", }); @@ -396,7 +396,7 @@ fn recursive_permissions_pledge() { let context = TestContext::default(); let output = context .new_command() - .args("test --enable-testing-features-do-not-use test/recursive_permissions_pledge.js") + .args("test test/recursive_permissions_pledge.js") .run(); output.assert_exit_code(1); assert_contains!( diff --git a/cli/tests/unit/read_text_file_test.ts b/cli/tests/unit/read_text_file_test.ts index c40cb83e39..21b13c9281 100644 --- a/cli/tests/unit/read_text_file_test.ts +++ b/cli/tests/unit/read_text_file_test.ts @@ -164,7 +164,13 @@ Deno.test( const bytes = new Uint8Array(kStringMaxLengthPlusOne); const filePath = "cli/tests/testdata/too_big_a_file.txt"; - Deno.writeFileSync(filePath, bytes); + try { + Deno.writeFileSync(filePath, bytes); + } catch { + // NOTE(bartlomieju): writing a 0.5Gb file might be too much for CI, + // so skip running if writing fails. + return; + } assertThrows( () => { @@ -185,7 +191,13 @@ Deno.test( const bytes = new Uint8Array(kStringMaxLengthPlusOne); const filePath = "cli/tests/testdata/too_big_a_file_2.txt"; - await Deno.writeFile(filePath, bytes); + try { + await Deno.writeFile(filePath, bytes); + } catch { + // NOTE(bartlomieju): writing a 0.5Gb file might be too much for CI, + // so skip running if writing fails. + return; + } await assertRejects( async () => { diff --git a/cli/tools/test.rs b/cli/tools/test.rs index 3bc0e79aab..0bdcb88606 100644 --- a/cli/tools/test.rs +++ b/cli/tools/test.rs @@ -962,7 +962,7 @@ pub async fn test_specifier( if options.trace_ops { worker.js_runtime.execute_script_static( located_script_name!(), - "Deno[Deno.internal].enableOpCallTracing();", + "Deno[Deno.internal].core.enableOpCallTracing();", )?; } worker.dispatch_load_event(located_script_name!())?; diff --git a/core/01_core.js b/core/01_core.js index 7663db5d9f..a8bdeb2a86 100644 --- a/core/01_core.js +++ b/core/01_core.js @@ -453,6 +453,7 @@ BadResourcePrototype, Interrupted, InterruptedPrototype, + enableOpCallTracing, isOpCallTracingEnabled, opCallTraces, refOp, @@ -506,11 +507,8 @@ }); ObjectAssign(globalThis.__bootstrap, { core }); - ObjectAssign(globalThis.__bootstrap, { - internals: { - enableOpCallTracing, - }, - }); + const internals = {}; + ObjectAssign(globalThis.__bootstrap, { internals }); ObjectAssign(globalThis.Deno, { core }); // Direct bindings on `globalThis` diff --git a/core/runtime.rs b/core/runtime.rs index be777bdf7e..d88ddccacb 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -478,26 +478,6 @@ impl JsRuntime { } } } - // Cache bust plain JS (non-ES modules as well) - #[cfg(feature = "include_js_files_for_snapshotting")] - if snapshot_options != snapshot_util::SnapshotOptions::None { - let js_sources = options - .extensions - .iter() - .flat_map(|ext| match ext.get_js_sources() { - Some(s) => s.to_owned(), - None => vec![], - }) - .collect::>(); - for source in js_sources { - use crate::ExtensionFileSourceCode; - if let ExtensionFileSourceCode::LoadedFromFsDuringSnapshot(path) = - &source.code - { - println!("cargo:rerun-if-changed={}", path.display()) - } - } - } Rc::new(crate::modules::ExtModuleLoader::new( options.module_loader, diff --git a/runtime/js/99_main.js b/runtime/js/99_main.js index 914940f5cf..fa16cc1f40 100644 --- a/runtime/js/99_main.js +++ b/runtime/js/99_main.js @@ -391,6 +391,12 @@ function promiseRejectMacrotaskCallback() { let hasBootstrapped = false; // Set up global properties shared by main and worker runtime. ObjectDefineProperties(globalThis, windowOrWorkerGlobalScope); +// FIXME(bartlomieju): temporarily add whole `Deno.core` to +// `Deno[Deno.internal]` namespace. It should be removed and only necessary +// methods should be left there. +ObjectAssign(internals, { + core, +}); const internalSymbol = Symbol("Deno.internal"); const finalDenoNs = { internal: internalSymbol, @@ -422,7 +428,7 @@ function bootstrapMainRuntime(runtimeOptions) { 13: v8Version, 14: userAgent, 15: inspectFlag, - 16: enableTestingFeaturesFlag, + // 16: enableTestingFeaturesFlag } = runtimeOptions; performance.setTimeOrigin(DateNow()); @@ -497,12 +503,6 @@ function bootstrapMainRuntime(runtimeOptions) { ObjectAssign(finalDenoNs, denoNsUnstable); } - // Add `Deno[Deno.internal].core` namespace if - // `--enable-testing-features-do-not-use` flag is set. - if (enableTestingFeaturesFlag) { - ObjectAssign(internals, { core }); - } - // Setup `Deno` global - we're actually overriding already existing global // `Deno` with `Deno` namespace from "./deno.ts". ObjectDefineProperty(globalThis, "Deno", util.readOnly(finalDenoNs)); @@ -612,11 +612,6 @@ function bootstrapWorkerRuntime( noColor: util.readOnly(noColor), args: util.readOnly(ObjectFreeze(args)), }); - // Add `Deno[Deno.internal].core` namespace if - // `--enable-testing-features-do-not-use` flag is set. - if (enableTestingFeaturesFlag) { - ObjectAssign(internals, { core }); - } // Setup `Deno` global - we're actually overriding already // existing global `Deno` with `Deno` namespace from "./deno.ts". ObjectDefineProperty(globalThis, "Deno", util.readOnly(finalDenoNs)); diff --git a/test_napi/cleanup_hook_test.js b/test_napi/cleanup_hook_test.js index 15741b60a1..30ceae470c 100644 --- a/test_napi/cleanup_hook_test.js +++ b/test_napi/cleanup_hook_test.js @@ -12,7 +12,6 @@ if (import.meta.main) { const { stdout, stderr, code } = await new Deno.Command(Deno.execPath(), { args: [ "run", - "--enable-testing-features-do-not-use", "--allow-read", "--allow-run", "--allow-ffi", diff --git a/test_napi/tests/napi_tests.rs b/test_napi/tests/napi_tests.rs index 722dc94177..747f6aa276 100644 --- a/test_napi/tests/napi_tests.rs +++ b/test_napi/tests/napi_tests.rs @@ -31,7 +31,6 @@ fn napi_tests() { .arg("--allow-env") .arg("--allow-ffi") .arg("--allow-run") - .arg("--enable-testing-features-do-not-use") .spawn() .unwrap() .wait_with_output() From 39ece1fe0ddacc2cbf182403c9e7085bc01df5a6 Mon Sep 17 00:00:00 2001 From: denobot <33910674+denobot@users.noreply.github.com> Date: Fri, 28 Apr 2023 01:18:57 +0200 Subject: [PATCH 073/320] 1.33.0 (#18879) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: bartlomieju Co-authored-by: Bartek Iwańczuk --- Cargo.lock | 52 ++++++++++++++++---------------- Cargo.toml | 50 +++++++++++++++--------------- Releases.md | 40 ++++++++++++++++++++++++ bench_util/Cargo.toml | 2 +- cli/Cargo.toml | 2 +- cli/deno_std.rs | 2 +- cli/napi/sym/Cargo.toml | 2 +- core/Cargo.toml | 2 +- ext/broadcast_channel/Cargo.toml | 2 +- ext/cache/Cargo.toml | 2 +- ext/console/Cargo.toml | 2 +- ext/crypto/Cargo.toml | 2 +- ext/fetch/Cargo.toml | 2 +- ext/ffi/Cargo.toml | 2 +- ext/fs/Cargo.toml | 2 +- ext/http/Cargo.toml | 2 +- ext/io/Cargo.toml | 2 +- ext/kv/Cargo.toml | 2 +- ext/napi/Cargo.toml | 2 +- ext/net/Cargo.toml | 2 +- ext/node/Cargo.toml | 2 +- ext/tls/Cargo.toml | 2 +- ext/url/Cargo.toml | 2 +- ext/web/Cargo.toml | 2 +- ext/webidl/Cargo.toml | 2 +- ext/websocket/Cargo.toml | 2 +- ext/webstorage/Cargo.toml | 2 +- ops/Cargo.toml | 2 +- runtime/Cargo.toml | 2 +- serde_v8/Cargo.toml | 2 +- 30 files changed, 118 insertions(+), 78 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 483d57340b..fad0614642 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -696,7 +696,7 @@ checksum = "8d7439c3735f405729d52c3fbbe4de140eaf938a1fe47d227c27f8254d4302a5" [[package]] name = "deno" -version = "1.32.5" +version = "1.33.0" dependencies = [ "async-trait", "atty", @@ -818,7 +818,7 @@ dependencies = [ [[package]] name = "deno_bench_util" -version = "0.93.0" +version = "0.94.0" dependencies = [ "bencher", "deno_core", @@ -828,7 +828,7 @@ dependencies = [ [[package]] name = "deno_broadcast_channel" -version = "0.93.0" +version = "0.94.0" dependencies = [ "async-trait", "deno_core", @@ -838,7 +838,7 @@ dependencies = [ [[package]] name = "deno_cache" -version = "0.31.0" +version = "0.32.0" dependencies = [ "async-trait", "deno_core", @@ -850,14 +850,14 @@ dependencies = [ [[package]] name = "deno_console" -version = "0.99.0" +version = "0.100.0" dependencies = [ "deno_core", ] [[package]] name = "deno_core" -version = "0.181.0" +version = "0.182.0" dependencies = [ "anyhow", "bytes", @@ -882,7 +882,7 @@ dependencies = [ [[package]] name = "deno_crypto" -version = "0.113.0" +version = "0.114.0" dependencies = [ "aes", "aes-gcm", @@ -950,7 +950,7 @@ dependencies = [ [[package]] name = "deno_fetch" -version = "0.123.0" +version = "0.124.0" dependencies = [ "bytes", "data-url", @@ -967,7 +967,7 @@ dependencies = [ [[package]] name = "deno_ffi" -version = "0.86.0" +version = "0.87.0" dependencies = [ "deno_core", "dlopen", @@ -982,7 +982,7 @@ dependencies = [ [[package]] name = "deno_fs" -version = "0.9.0" +version = "0.10.0" dependencies = [ "async-trait", "deno_core", @@ -1022,7 +1022,7 @@ dependencies = [ [[package]] name = "deno_http" -version = "0.94.0" +version = "0.95.0" dependencies = [ "async-compression", "base64 0.13.1", @@ -1055,7 +1055,7 @@ dependencies = [ [[package]] name = "deno_io" -version = "0.9.0" +version = "0.10.0" dependencies = [ "deno_core", "nix", @@ -1066,7 +1066,7 @@ dependencies = [ [[package]] name = "deno_kv" -version = "0.7.0" +version = "0.8.0" dependencies = [ "anyhow", "async-trait", @@ -1120,7 +1120,7 @@ dependencies = [ [[package]] name = "deno_napi" -version = "0.29.0" +version = "0.30.0" dependencies = [ "deno_core", "libloading", @@ -1128,7 +1128,7 @@ dependencies = [ [[package]] name = "deno_net" -version = "0.91.0" +version = "0.92.0" dependencies = [ "deno_core", "deno_tls", @@ -1143,7 +1143,7 @@ dependencies = [ [[package]] name = "deno_node" -version = "0.36.0" +version = "0.37.0" dependencies = [ "aes", "cbc", @@ -1212,7 +1212,7 @@ dependencies = [ [[package]] name = "deno_ops" -version = "0.59.0" +version = "0.60.0" dependencies = [ "lazy-regex", "once_cell", @@ -1230,7 +1230,7 @@ dependencies = [ [[package]] name = "deno_runtime" -version = "0.107.0" +version = "0.108.0" dependencies = [ "atty", "console_static_text", @@ -1311,7 +1311,7 @@ dependencies = [ [[package]] name = "deno_tls" -version = "0.86.0" +version = "0.87.0" dependencies = [ "deno_core", "once_cell", @@ -1325,7 +1325,7 @@ dependencies = [ [[package]] name = "deno_url" -version = "0.99.0" +version = "0.100.0" dependencies = [ "deno_bench_util", "deno_core", @@ -1337,7 +1337,7 @@ dependencies = [ [[package]] name = "deno_web" -version = "0.130.0" +version = "0.131.0" dependencies = [ "async-trait", "base64-simd", @@ -1355,7 +1355,7 @@ dependencies = [ [[package]] name = "deno_webidl" -version = "0.99.0" +version = "0.100.0" dependencies = [ "deno_bench_util", "deno_core", @@ -1363,7 +1363,7 @@ dependencies = [ [[package]] name = "deno_websocket" -version = "0.104.0" +version = "0.105.0" dependencies = [ "bytes", "deno_core", @@ -1379,7 +1379,7 @@ dependencies = [ [[package]] name = "deno_webstorage" -version = "0.94.0" +version = "0.95.0" dependencies = [ "deno_core", "deno_web", @@ -2981,7 +2981,7 @@ dependencies = [ [[package]] name = "napi_sym" -version = "0.29.0" +version = "0.30.0" dependencies = [ "proc-macro2 1.0.56", "quote 1.0.26", @@ -4250,7 +4250,7 @@ dependencies = [ [[package]] name = "serde_v8" -version = "0.92.0" +version = "0.93.0" dependencies = [ "bencher", "bytes", diff --git a/Cargo.toml b/Cargo.toml index 4a7d051f09..eec64e1ce8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -44,12 +44,12 @@ repository = "https://github.com/denoland/deno" v8 = { version = "0.71.0", default-features = false } deno_ast = { version = "0.26.0", features = ["transpiling"] } -deno_core = { version = "0.181.0", path = "./core" } -deno_ops = { version = "0.59.0", path = "./ops" } -serde_v8 = { version = "0.92.0", path = "./serde_v8" } -deno_runtime = { version = "0.107.0", path = "./runtime" } -napi_sym = { version = "0.29.0", path = "./cli/napi/sym" } -deno_bench_util = { version = "0.93.0", path = "./bench_util" } +deno_core = { version = "0.182.0", path = "./core" } +deno_ops = { version = "0.60.0", path = "./ops" } +serde_v8 = { version = "0.93.0", path = "./serde_v8" } +deno_runtime = { version = "0.108.0", path = "./runtime" } +napi_sym = { version = "0.30.0", path = "./cli/napi/sym" } +deno_bench_util = { version = "0.94.0", path = "./bench_util" } test_util = { path = "./test_util" } deno_lockfile = "0.13.0" deno_media_type = { version = "0.1.0", features = ["module_specifier"] } @@ -57,25 +57,25 @@ deno_npm = "0.3.0" deno_semver = "0.2.1" # exts -deno_broadcast_channel = { version = "0.93.0", path = "./ext/broadcast_channel" } -deno_cache = { version = "0.31.0", path = "./ext/cache" } -deno_console = { version = "0.99.0", path = "./ext/console" } -deno_crypto = { version = "0.113.0", path = "./ext/crypto" } -deno_fetch = { version = "0.123.0", path = "./ext/fetch" } -deno_ffi = { version = "0.86.0", path = "./ext/ffi" } -deno_fs = { version = "0.9.0", path = "./ext/fs" } -deno_http = { version = "0.94.0", path = "./ext/http" } -deno_io = { version = "0.9.0", path = "./ext/io" } -deno_net = { version = "0.91.0", path = "./ext/net" } -deno_node = { version = "0.36.0", path = "./ext/node" } -deno_kv = { version = "0.7.0", path = "./ext/kv" } -deno_tls = { version = "0.86.0", path = "./ext/tls" } -deno_url = { version = "0.99.0", path = "./ext/url" } -deno_web = { version = "0.130.0", path = "./ext/web" } -deno_webidl = { version = "0.99.0", path = "./ext/webidl" } -deno_websocket = { version = "0.104.0", path = "./ext/websocket" } -deno_webstorage = { version = "0.94.0", path = "./ext/webstorage" } -deno_napi = { version = "0.29.0", path = "./ext/napi" } +deno_broadcast_channel = { version = "0.94.0", path = "./ext/broadcast_channel" } +deno_cache = { version = "0.32.0", path = "./ext/cache" } +deno_console = { version = "0.100.0", path = "./ext/console" } +deno_crypto = { version = "0.114.0", path = "./ext/crypto" } +deno_fetch = { version = "0.124.0", path = "./ext/fetch" } +deno_ffi = { version = "0.87.0", path = "./ext/ffi" } +deno_fs = { version = "0.10.0", path = "./ext/fs" } +deno_http = { version = "0.95.0", path = "./ext/http" } +deno_io = { version = "0.10.0", path = "./ext/io" } +deno_net = { version = "0.92.0", path = "./ext/net" } +deno_node = { version = "0.37.0", path = "./ext/node" } +deno_kv = { version = "0.8.0", path = "./ext/kv" } +deno_tls = { version = "0.87.0", path = "./ext/tls" } +deno_url = { version = "0.100.0", path = "./ext/url" } +deno_web = { version = "0.131.0", path = "./ext/web" } +deno_webidl = { version = "0.100.0", path = "./ext/webidl" } +deno_websocket = { version = "0.105.0", path = "./ext/websocket" } +deno_webstorage = { version = "0.95.0", path = "./ext/webstorage" } +deno_napi = { version = "0.30.0", path = "./ext/napi" } aes = "=0.8.2" anyhow = "1.0.57" diff --git a/Releases.md b/Releases.md index ebc49b8f51..57b908972a 100644 --- a/Releases.md +++ b/Releases.md @@ -6,6 +6,46 @@ https://github.com/denoland/deno/releases We also have one-line install commands at: https://github.com/denoland/deno_install +### 1.33.0 / 2023.04.27 + +- BREAKING(unstable): remove "Deno.serve(handler, options)" overload (#18759) +- Revert "chore(ext/websocket): Add autobahn|testsuite fuzzingclient (#… + (#18856) +- feat(bench): add `--no-run` flag (#18433) +- feat(cli): don't check permissions for statically analyzable dynamic imports + (#18713) +- feat(cli): flatten deno.json configuaration (#17799) +- feat(ext/ffi): support marking symbols as optional (#18529) +- feat(ext/http): Rework Deno.serve using hyper 1.0-rc3 (#18619) +- feat(ext/kv): add more atomic operation helpers (#18854) +- feat(ext/kv): return ok bool from atomic commit (#18873) +- feat(ext/url): `URL.canParse` (#18286) +- feat(lint): add `Deno.run` to `no-deprecated-deno-api` (#18869) +- feat(node/crypto): Elliptic Curve Diffie-Hellman (ECDH) support (#18832) +- feat(node/http): implement ClientRequest.setTimeout() (#18783) +- feat(task): introduce built-in `unset` command to `deno task` (#18606) +- feat: Deprecate Deno.run API in favor of Deno.Command (#17630) (#18866) +- fix(compile): write bytes directly to output file (#18777) +- fix(core): Wrap safe collections' argument of primordials (#18750) +- fix(coverage): exclude test files (#18748) +- fix(dts): `URLPatternComponentResult` groups should have possibly undefined + key values (#18643) +- fix(ext/node): add crypto.sign|verify methods (#18765) +- fix(ext/node): fix hash.flush (#18818) +- fix(ext/node): implement asymmetric keygen (#18651) +- fix(ext/node): improve vm.runInThisContext (#18767) +- fix(ext/node): prime generation (#18861) +- fix(lsp): show dependency errors for repeated imports (#18807) +- fix(npm): only include top level packages in top level node_modules directory + (#18824) +- fix(test): allow explicit undefined for boolean test options (#18786) +- fix(test): handle dispatched exceptions from test functions (#18853) +- perf(ext/http): avoid spread arg deopt in op_http_wait (#18850) +- perf(ext/http): optimize away code based on callback length (#18849) +- perf(ext/http): optimize for zero or one-packet response streams (#18834) +- perf(ext/http): use smi for slab IDs (#18848) +- perf(ext/websocket): various performance improvements (#18862) + ### 1.32.5 / 2023.04.18 - feat(UNSTABLE/kv): AtomicOperation#sum (#18704) diff --git a/bench_util/Cargo.toml b/bench_util/Cargo.toml index eccf73a0c7..876e9e3640 100644 --- a/bench_util/Cargo.toml +++ b/bench_util/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_bench_util" -version = "0.93.0" +version = "0.94.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/cli/Cargo.toml b/cli/Cargo.toml index a75fb2dcef..1af4655ca3 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno" -version = "1.32.5" +version = "1.33.0" authors.workspace = true default-run = "deno" edition.workspace = true diff --git a/cli/deno_std.rs b/cli/deno_std.rs index 4203c02f0b..826d73e7a8 100644 --- a/cli/deno_std.rs +++ b/cli/deno_std.rs @@ -2,4 +2,4 @@ // WARNING: Ensure this is the only deno_std version reference as this // is automatically updated by the version bump workflow. -pub const CURRENT_STD_URL_STR: &str = "https://deno.land/std@0.184.0/"; +pub const CURRENT_STD_URL_STR: &str = "https://deno.land/std@0.185.0/"; diff --git a/cli/napi/sym/Cargo.toml b/cli/napi/sym/Cargo.toml index 41586b1b10..0dac0b1c61 100644 --- a/cli/napi/sym/Cargo.toml +++ b/cli/napi/sym/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "napi_sym" -version = "0.29.0" +version = "0.30.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/core/Cargo.toml b/core/Cargo.toml index f77ae7adc0..9c8a7eff77 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_core" -version = "0.181.0" +version = "0.182.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/broadcast_channel/Cargo.toml b/ext/broadcast_channel/Cargo.toml index 005806aff9..ad6d092670 100644 --- a/ext/broadcast_channel/Cargo.toml +++ b/ext/broadcast_channel/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_broadcast_channel" -version = "0.93.0" +version = "0.94.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/cache/Cargo.toml b/ext/cache/Cargo.toml index 57d025bba3..4be593ea26 100644 --- a/ext/cache/Cargo.toml +++ b/ext/cache/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_cache" -version = "0.31.0" +version = "0.32.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/console/Cargo.toml b/ext/console/Cargo.toml index be3bd94698..7174090433 100644 --- a/ext/console/Cargo.toml +++ b/ext/console/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_console" -version = "0.99.0" +version = "0.100.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/crypto/Cargo.toml b/ext/crypto/Cargo.toml index 7f0d1fdf0e..56a9d42aad 100644 --- a/ext/crypto/Cargo.toml +++ b/ext/crypto/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_crypto" -version = "0.113.0" +version = "0.114.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/fetch/Cargo.toml b/ext/fetch/Cargo.toml index 754a8d3325..443196524b 100644 --- a/ext/fetch/Cargo.toml +++ b/ext/fetch/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fetch" -version = "0.123.0" +version = "0.124.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/ffi/Cargo.toml b/ext/ffi/Cargo.toml index 8486754f45..f3e144fb2e 100644 --- a/ext/ffi/Cargo.toml +++ b/ext/ffi/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_ffi" -version = "0.86.0" +version = "0.87.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/fs/Cargo.toml b/ext/fs/Cargo.toml index d6976d3744..89e1e0306d 100644 --- a/ext/fs/Cargo.toml +++ b/ext/fs/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fs" -version = "0.9.0" +version = "0.10.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/http/Cargo.toml b/ext/http/Cargo.toml index bb965d9b25..2a05e7e228 100644 --- a/ext/http/Cargo.toml +++ b/ext/http/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_http" -version = "0.94.0" +version = "0.95.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/io/Cargo.toml b/ext/io/Cargo.toml index ceb4e3f954..60e868883b 100644 --- a/ext/io/Cargo.toml +++ b/ext/io/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_io" -version = "0.9.0" +version = "0.10.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/kv/Cargo.toml b/ext/kv/Cargo.toml index 36155cd8ea..a15e5f7a2f 100644 --- a/ext/kv/Cargo.toml +++ b/ext/kv/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_kv" -version = "0.7.0" +version = "0.8.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/napi/Cargo.toml b/ext/napi/Cargo.toml index 3b563a5233..cac3e48f40 100644 --- a/ext/napi/Cargo.toml +++ b/ext/napi/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_napi" -version = "0.29.0" +version = "0.30.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/net/Cargo.toml b/ext/net/Cargo.toml index 6bab80cc79..a00e1b39d5 100644 --- a/ext/net/Cargo.toml +++ b/ext/net/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_net" -version = "0.91.0" +version = "0.92.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/node/Cargo.toml b/ext/node/Cargo.toml index 14928db307..fc30c911af 100644 --- a/ext/node/Cargo.toml +++ b/ext/node/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_node" -version = "0.36.0" +version = "0.37.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/tls/Cargo.toml b/ext/tls/Cargo.toml index 3b01da0a31..68248956cb 100644 --- a/ext/tls/Cargo.toml +++ b/ext/tls/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_tls" -version = "0.86.0" +version = "0.87.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/url/Cargo.toml b/ext/url/Cargo.toml index 9ded3c0ad7..635af056cf 100644 --- a/ext/url/Cargo.toml +++ b/ext/url/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_url" -version = "0.99.0" +version = "0.100.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/web/Cargo.toml b/ext/web/Cargo.toml index 4d5a412fbd..a1dab71a3b 100644 --- a/ext/web/Cargo.toml +++ b/ext/web/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_web" -version = "0.130.0" +version = "0.131.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/webidl/Cargo.toml b/ext/webidl/Cargo.toml index 939dfaeafe..57124d45bc 100644 --- a/ext/webidl/Cargo.toml +++ b/ext/webidl/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_webidl" -version = "0.99.0" +version = "0.100.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/websocket/Cargo.toml b/ext/websocket/Cargo.toml index 82be1d8636..bfc9e6133e 100644 --- a/ext/websocket/Cargo.toml +++ b/ext/websocket/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_websocket" -version = "0.104.0" +version = "0.105.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/webstorage/Cargo.toml b/ext/webstorage/Cargo.toml index 215986c24a..d3a323612c 100644 --- a/ext/webstorage/Cargo.toml +++ b/ext/webstorage/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_webstorage" -version = "0.94.0" +version = "0.95.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ops/Cargo.toml b/ops/Cargo.toml index a059a9580d..22d14f72e2 100644 --- a/ops/Cargo.toml +++ b/ops/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_ops" -version = "0.59.0" +version = "0.60.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index 20cbda0bfb..e1a23a60dc 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_runtime" -version = "0.107.0" +version = "0.108.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/serde_v8/Cargo.toml b/serde_v8/Cargo.toml index 530b938b63..86d6aaf991 100644 --- a/serde_v8/Cargo.toml +++ b/serde_v8/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "serde_v8" -version = "0.92.0" +version = "0.93.0" authors.workspace = true edition.workspace = true license.workspace = true From de5bd4e536bdbd6aa3621c97a961c3c926043d1a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Fri, 28 Apr 2023 05:39:42 +0200 Subject: [PATCH 074/320] build: define features for 'hyper' crate (#18882) Fixes the "publish" CI step. Somehow neither `cargo build` nor `cargo clippy` don't complain about it. --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index eec64e1ce8..ebc21ff352 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -98,7 +98,7 @@ futures = "0.3.21" hex = "0.4" http = "0.2.9" httparse = "1.8.0" -hyper = "0.14.26" +hyper = { version = "0.14.26", features = ["runtime", "http1"] } indexmap = { version = "1.9.2", features = ["serde"] } libc = "0.2.126" log = "=0.4.17" From 84b921555fa481a0a2c4cffe5c897fd1c87485b7 Mon Sep 17 00:00:00 2001 From: Luca Casonato Date: Fri, 28 Apr 2023 14:26:21 +0200 Subject: [PATCH 075/320] fix(ext/fetch): subview Uint8Array in Req/Resp (#18890) --- cli/tests/unit/fetch_test.ts | 16 ++++++++++++++ ext/fetch/22_body.js | 42 +++++++++++------------------------- 2 files changed, 28 insertions(+), 30 deletions(-) diff --git a/cli/tests/unit/fetch_test.ts b/cli/tests/unit/fetch_test.ts index bafb23c2a9..a92a7a0516 100644 --- a/cli/tests/unit/fetch_test.ts +++ b/cli/tests/unit/fetch_test.ts @@ -1893,3 +1893,19 @@ Deno.test( await server; }, ); + +Deno.test("Request with subarray TypedArray body", async () => { + const body = new Uint8Array([1, 2, 3, 4, 5]).subarray(1); + const req = new Request("https://example.com", { method: "POST", body }); + const actual = new Uint8Array(await req.arrayBuffer()); + const expected = new Uint8Array([2, 3, 4, 5]); + assertEquals(actual, expected); +}); + +Deno.test("Response with subarray TypedArray body", async () => { + const body = new Uint8Array([1, 2, 3, 4, 5]).subarray(1); + const req = new Response(body); + const actual = new Uint8Array(await req.arrayBuffer()); + const expected = new Uint8Array([2, 3, 4, 5]); + assertEquals(actual, expected); +}); diff --git a/ext/fetch/22_body.js b/ext/fetch/22_body.js index 9dbd58fa4b..875ec0620f 100644 --- a/ext/fetch/22_body.js +++ b/ext/fetch/22_body.js @@ -38,7 +38,6 @@ import { const primordials = globalThis.__bootstrap.primordials; const { ArrayBufferPrototype, - ArrayBufferPrototypeGetByteLength, ArrayBufferIsView, ArrayPrototypeMap, DataViewPrototypeGetBuffer, @@ -394,44 +393,27 @@ function extractBody(object) { } } else if (ArrayBufferIsView(object)) { const tag = TypedArrayPrototypeGetSymbolToStringTag(object); - if (tag === "Uint8Array") { - // Fast(er) path for common case of Uint8Array - const copy = TypedArrayPrototypeSlice( - object, - TypedArrayPrototypeGetByteOffset(/** @type {Uint8Array} */ (object)), - TypedArrayPrototypeGetByteLength(/** @type {Uint8Array} */ (object)), - ); - source = copy; - } else if (tag !== undefined) { + if (tag !== undefined) { // TypedArray - const copy = TypedArrayPrototypeSlice( - new Uint8Array( + if (tag !== "Uint8Array") { + // TypedArray, unless it's Uint8Array + object = new Uint8Array( TypedArrayPrototypeGetBuffer(/** @type {Uint8Array} */ (object)), TypedArrayPrototypeGetByteOffset(/** @type {Uint8Array} */ (object)), TypedArrayPrototypeGetByteLength(/** @type {Uint8Array} */ (object)), - ), - ); - source = copy; + ); + } } else { // DataView - const copy = TypedArrayPrototypeSlice( - new Uint8Array( - DataViewPrototypeGetBuffer(/** @type {DataView} */ (object)), - DataViewPrototypeGetByteOffset(/** @type {DataView} */ (object)), - DataViewPrototypeGetByteLength(/** @type {DataView} */ (object)), - ), + object = new Uint8Array( + DataViewPrototypeGetBuffer(/** @type {DataView} */ (object)), + DataViewPrototypeGetByteOffset(/** @type {DataView} */ (object)), + DataViewPrototypeGetByteLength(/** @type {DataView} */ (object)), ); - source = copy; } + source = TypedArrayPrototypeSlice(object); } else if (ObjectPrototypeIsPrototypeOf(ArrayBufferPrototype, object)) { - const copy = TypedArrayPrototypeSlice( - new Uint8Array( - object, - 0, - ArrayBufferPrototypeGetByteLength(object), - ), - ); - source = copy; + source = TypedArrayPrototypeSlice(new Uint8Array(object)); } else if (ObjectPrototypeIsPrototypeOf(FormDataPrototype, object)) { const res = formDataToBlob(object); stream = res.stream(); From 0b296c6378c46c18de7c3838b2a3e1d13eb9bd87 Mon Sep 17 00:00:00 2001 From: Nayeem Rahman Date: Fri, 28 Apr 2023 14:21:55 +0100 Subject: [PATCH 076/320] fix(repl): don't panic on undefined exception (#18888) Fixes regression from #18878 where `Promise.reject()`, `Promise.reject(undefined)` and `reportError(undefined)` panic in the REPL. Fixes `throw undefined` printing `Uncaught Unknown exception` instead of `Uncaught undefined`. --- cli/tests/integration/repl_tests.rs | 14 ++++++++++++++ cli/tools/repl/mod.rs | 2 +- cli/tools/repl/session.rs | 2 +- 3 files changed, 16 insertions(+), 2 deletions(-) diff --git a/cli/tests/integration/repl_tests.rs b/cli/tests/integration/repl_tests.rs index d9966fe8ff..f8987e20b6 100644 --- a/cli/tests/integration/repl_tests.rs +++ b/cli/tests/integration/repl_tests.rs @@ -824,6 +824,20 @@ fn repl_report_error() { }); } +#[test] +fn repl_error_undefined() { + util::with_pty(&["repl"], |mut console| { + console.write_line(r#"throw undefined;"#); + console.expect("Uncaught undefined"); + console.write_line(r#"Promise.reject();"#); + console.expect("Promise { undefined }"); + console.expect("Uncaught (in promise) undefined"); + console.write_line(r#"reportError(undefined);"#); + console.expect("undefined"); + console.expect("Uncaught undefined"); + }); +} + #[test] fn pty_aggregate_error() { util::with_pty(&["repl"], |mut console| { diff --git a/cli/tools/repl/mod.rs b/cli/tools/repl/mod.rs index 0a6d9b9e9d..59b79ce86c 100644 --- a/cli/tools/repl/mod.rs +++ b/cli/tools/repl/mod.rs @@ -70,7 +70,7 @@ async fn read_line_and_poll( let exception_details = params.get("exceptionDetails").unwrap().as_object().unwrap(); let text = exception_details.get("text").unwrap().as_str().unwrap(); let exception = exception_details.get("exception").unwrap().as_object().unwrap(); - let description = exception.get("description").unwrap().as_str().unwrap(); + let description = exception.get("description").and_then(|d| d.as_str()).unwrap_or("undefined"); println!("{text} {description}"); } } diff --git a/cli/tools/repl/session.rs b/cli/tools/repl/session.rs index 6f8db6fcd8..b8daf505bd 100644 --- a/cli/tools/repl/session.rs +++ b/cli/tools/repl/session.rs @@ -260,7 +260,7 @@ impl ReplSession { let description = match exception_details.exception { Some(exception) => exception .description - .unwrap_or_else(|| "Unknown exception".to_string()), + .unwrap_or_else(|| "undefined".to_string()), None => "Unknown exception".to_string(), }; EvaluationOutput::Error(format!( From 142c1ab9fcb6d88f36a8f9f096a59393525da7d0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Fri, 28 Apr 2023 16:48:00 +0200 Subject: [PATCH 077/320] fix(ext/websocket): restore op_ws_send_ping (#18891) Co-authored-by: Divy Srivastava --- cli/js/40_testing.js | 6 +++--- cli/tests/integration/run_tests.rs | 1 - .../testdata/run/websocket_server_idletimeout.ts | 4 ++-- ext/websocket/lib.rs | 15 +++++++++++++++ 4 files changed, 20 insertions(+), 6 deletions(-) diff --git a/cli/js/40_testing.js b/cli/js/40_testing.js index 1464483563..555f5f1fe7 100644 --- a/cli/js/40_testing.js +++ b/cli/js/40_testing.js @@ -83,8 +83,8 @@ const OP_DETAILS = { "op_dns_resolve": ["resolve a DNS name", "awaiting the result of a `Deno.resolveDns` call"], "op_fdatasync_async": ["flush pending data operations for a file to disk", "awaiting the result of a `Deno.fdatasync` call"], "op_fetch_send": ["send a HTTP request", "awaiting the result of a `fetch` call"], - "op_ffi_call_nonblocking": ["do a non blocking ffi call", "awaiting the returned promise"] , - "op_ffi_call_ptr_nonblocking": ["do a non blocking ffi call", "awaiting the returned promise"], + "op_ffi_call_nonblocking": ["do a non blocking ffi call", "awaiting the returned promise"], + "op_ffi_call_ptr_nonblocking": ["do a non blocking ffi call", "awaiting the returned promise"], "op_flock_async": ["lock a file", "awaiting the result of a `Deno.flock` call"], "op_fs_events_poll": ["get the next file system event", "breaking out of a for await loop looping over `Deno.FsEvents`"], "op_fstat_async": ["get file metadata", "awaiting the result of a `Deno.File#fstat` call"], @@ -124,7 +124,7 @@ const OP_DETAILS = { "op_tls_start": ["start a TLS connection", "awaiting a `Deno.startTls` call"], "op_truncate_async": ["truncate a file", "awaiting the result of a `Deno.truncate` call"], "op_utime_async": ["change file timestamps", "awaiting the result of a `Deno.utime` call"], - "op_worker_recv_message": ["receive a message from a web worker", "terminating a `Worker`"], + "op_worker_recv_message": ["receive a message from a web worker", "terminating a `Worker`"], "op_ws_close": ["close a WebSocket", "awaiting until the `close` event is emitted on a `WebSocket`, or the `WebSocketStream#closed` promise resolves"], "op_ws_create": ["create a WebSocket", "awaiting until the `open` event is emitted on a `WebSocket`, or the result of a `WebSocketStream#connection` promise"], "op_ws_next_event": ["receive the next message on a WebSocket", "closing a `WebSocket` or `WebSocketStream`"], diff --git a/cli/tests/integration/run_tests.rs b/cli/tests/integration/run_tests.rs index 1ad8efb260..26aacc6fdc 100644 --- a/cli/tests/integration/run_tests.rs +++ b/cli/tests/integration/run_tests.rs @@ -4228,7 +4228,6 @@ async fn websocket_server_multi_field_connection_header() { // TODO(bartlomieju): this should use `deno run`, not `deno test`; but the // test hangs then. https://github.com/denoland/deno/issues/14283 #[tokio::test] -#[ignore] async fn websocket_server_idletimeout() { let script = util::testdata_path().join("run/websocket_server_idletimeout.ts"); diff --git a/cli/tests/testdata/run/websocket_server_idletimeout.ts b/cli/tests/testdata/run/websocket_server_idletimeout.ts index 9ae6698cbf..211b5f6ea9 100644 --- a/cli/tests/testdata/run/websocket_server_idletimeout.ts +++ b/cli/tests/testdata/run/websocket_server_idletimeout.ts @@ -1,5 +1,5 @@ -import { assertEquals } from "../../../test_util/std/testing/asserts.ts"; -import { deferred } from "../../../test_util/std/async/deferred.ts"; +import { assertEquals } from "../../../../test_util/std/testing/asserts.ts"; +import { deferred } from "../../../../test_util/std/async/deferred.ts"; const errorDeferred = deferred(); const closeDeferred = deferred(); diff --git a/ext/websocket/lib.rs b/ext/websocket/lib.rs index 9ea341fbb6..d2ec14ec3e 100644 --- a/ext/websocket/lib.rs +++ b/ext/websocket/lib.rs @@ -406,6 +406,20 @@ pub async fn op_ws_send_pong( resource.write_frame(Frame::pong(vec![])).await } +#[op] +pub async fn op_ws_send_ping( + state: Rc>, + rid: ResourceId, +) -> Result<(), AnyError> { + let resource = state + .borrow_mut() + .resource_table + .get::(rid)?; + resource + .write_frame(Frame::new(true, OpCode::Ping, None, vec![])) + .await +} + #[op(deferred)] pub async fn op_ws_close( state: Rc>, @@ -499,6 +513,7 @@ deno_core::extension!(deno_websocket, op_ws_next_event, op_ws_send_binary, op_ws_send_text, + op_ws_send_ping, op_ws_send_pong, op_ws_server_create, ], From 8739519ebce4127af3fd3a0648d11e021b0084f9 Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Fri, 28 Apr 2023 21:30:27 +0530 Subject: [PATCH 078/320] fix(ext/websocket): client connect URI (#18892) --- ext/websocket/lib.rs | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/ext/websocket/lib.rs b/ext/websocket/lib.rs index d2ec14ec3e..2ce141fc92 100644 --- a/ext/websocket/lib.rs +++ b/ext/websocket/lib.rs @@ -184,7 +184,12 @@ where let root_cert_store = state.borrow().borrow::().0.clone(); let user_agent = state.borrow().borrow::().0.clone(); let uri: Uri = url.parse()?; - let mut request = Request::builder().method(Method::GET).uri(&uri); + let mut request = Request::builder().method(Method::GET).uri( + uri + .path_and_query() + .ok_or(type_error("Missing path in url".to_string()))? + .as_str(), + ); let authority = uri.authority().unwrap().as_str(); let host = authority @@ -195,7 +200,7 @@ where .header("User-Agent", user_agent) .header("Host", host) .header(UPGRADE, "websocket") - .header(CONNECTION, "upgrade") + .header(CONNECTION, "Upgrade") .header( "Sec-WebSocket-Key", fastwebsockets::handshake::generate_key(), From 6369098ad74a263e461478f94782bc469de15468 Mon Sep 17 00:00:00 2001 From: denobot <33910674+denobot@users.noreply.github.com> Date: Fri, 28 Apr 2023 21:14:26 +0200 Subject: [PATCH 079/320] chore: forward v1.33.1 release commit to main (#18897) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Bartek Iwańczuk --- Cargo.lock | 52 +++++++++++++-------------- Cargo.toml | 50 +++++++++++++------------- Releases.md | 7 ++++ bench_util/Cargo.toml | 2 +- cli/Cargo.toml | 2 +- cli/napi/sym/Cargo.toml | 2 +- core/Cargo.toml | 2 +- ext/broadcast_channel/Cargo.toml | 2 +- ext/cache/Cargo.toml | 2 +- ext/console/Cargo.toml | 2 +- ext/crypto/Cargo.toml | 2 +- ext/fetch/Cargo.toml | 2 +- ext/ffi/Cargo.toml | 2 +- ext/fs/Cargo.toml | 2 +- ext/http/Cargo.toml | 2 +- ext/io/Cargo.toml | 2 +- ext/kv/Cargo.toml | 2 +- ext/napi/Cargo.toml | 2 +- ext/net/Cargo.toml | 2 +- ext/node/Cargo.toml | 2 +- ext/tls/Cargo.toml | 2 +- ext/url/Cargo.toml | 2 +- ext/web/Cargo.toml | 2 +- ext/webidl/Cargo.toml | 2 +- ext/websocket/Cargo.toml | 2 +- ext/webstorage/Cargo.toml | 2 +- ops/Cargo.toml | 2 +- runtime/Cargo.toml | 2 +- serde_v8/Cargo.toml | 2 +- tools/release/release_doc_template.md | 13 +++++++ 30 files changed, 97 insertions(+), 77 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fad0614642..3fc8f0d688 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -696,7 +696,7 @@ checksum = "8d7439c3735f405729d52c3fbbe4de140eaf938a1fe47d227c27f8254d4302a5" [[package]] name = "deno" -version = "1.33.0" +version = "1.33.1" dependencies = [ "async-trait", "atty", @@ -818,7 +818,7 @@ dependencies = [ [[package]] name = "deno_bench_util" -version = "0.94.0" +version = "0.95.0" dependencies = [ "bencher", "deno_core", @@ -828,7 +828,7 @@ dependencies = [ [[package]] name = "deno_broadcast_channel" -version = "0.94.0" +version = "0.95.0" dependencies = [ "async-trait", "deno_core", @@ -838,7 +838,7 @@ dependencies = [ [[package]] name = "deno_cache" -version = "0.32.0" +version = "0.33.0" dependencies = [ "async-trait", "deno_core", @@ -850,14 +850,14 @@ dependencies = [ [[package]] name = "deno_console" -version = "0.100.0" +version = "0.101.0" dependencies = [ "deno_core", ] [[package]] name = "deno_core" -version = "0.182.0" +version = "0.183.0" dependencies = [ "anyhow", "bytes", @@ -882,7 +882,7 @@ dependencies = [ [[package]] name = "deno_crypto" -version = "0.114.0" +version = "0.115.0" dependencies = [ "aes", "aes-gcm", @@ -950,7 +950,7 @@ dependencies = [ [[package]] name = "deno_fetch" -version = "0.124.0" +version = "0.125.0" dependencies = [ "bytes", "data-url", @@ -967,7 +967,7 @@ dependencies = [ [[package]] name = "deno_ffi" -version = "0.87.0" +version = "0.88.0" dependencies = [ "deno_core", "dlopen", @@ -982,7 +982,7 @@ dependencies = [ [[package]] name = "deno_fs" -version = "0.10.0" +version = "0.11.0" dependencies = [ "async-trait", "deno_core", @@ -1022,7 +1022,7 @@ dependencies = [ [[package]] name = "deno_http" -version = "0.95.0" +version = "0.96.0" dependencies = [ "async-compression", "base64 0.13.1", @@ -1055,7 +1055,7 @@ dependencies = [ [[package]] name = "deno_io" -version = "0.10.0" +version = "0.11.0" dependencies = [ "deno_core", "nix", @@ -1066,7 +1066,7 @@ dependencies = [ [[package]] name = "deno_kv" -version = "0.8.0" +version = "0.9.0" dependencies = [ "anyhow", "async-trait", @@ -1120,7 +1120,7 @@ dependencies = [ [[package]] name = "deno_napi" -version = "0.30.0" +version = "0.31.0" dependencies = [ "deno_core", "libloading", @@ -1128,7 +1128,7 @@ dependencies = [ [[package]] name = "deno_net" -version = "0.92.0" +version = "0.93.0" dependencies = [ "deno_core", "deno_tls", @@ -1143,7 +1143,7 @@ dependencies = [ [[package]] name = "deno_node" -version = "0.37.0" +version = "0.38.0" dependencies = [ "aes", "cbc", @@ -1212,7 +1212,7 @@ dependencies = [ [[package]] name = "deno_ops" -version = "0.60.0" +version = "0.61.0" dependencies = [ "lazy-regex", "once_cell", @@ -1230,7 +1230,7 @@ dependencies = [ [[package]] name = "deno_runtime" -version = "0.108.0" +version = "0.109.0" dependencies = [ "atty", "console_static_text", @@ -1311,7 +1311,7 @@ dependencies = [ [[package]] name = "deno_tls" -version = "0.87.0" +version = "0.88.0" dependencies = [ "deno_core", "once_cell", @@ -1325,7 +1325,7 @@ dependencies = [ [[package]] name = "deno_url" -version = "0.100.0" +version = "0.101.0" dependencies = [ "deno_bench_util", "deno_core", @@ -1337,7 +1337,7 @@ dependencies = [ [[package]] name = "deno_web" -version = "0.131.0" +version = "0.132.0" dependencies = [ "async-trait", "base64-simd", @@ -1355,7 +1355,7 @@ dependencies = [ [[package]] name = "deno_webidl" -version = "0.100.0" +version = "0.101.0" dependencies = [ "deno_bench_util", "deno_core", @@ -1363,7 +1363,7 @@ dependencies = [ [[package]] name = "deno_websocket" -version = "0.105.0" +version = "0.106.0" dependencies = [ "bytes", "deno_core", @@ -1379,7 +1379,7 @@ dependencies = [ [[package]] name = "deno_webstorage" -version = "0.95.0" +version = "0.96.0" dependencies = [ "deno_core", "deno_web", @@ -2981,7 +2981,7 @@ dependencies = [ [[package]] name = "napi_sym" -version = "0.30.0" +version = "0.31.0" dependencies = [ "proc-macro2 1.0.56", "quote 1.0.26", @@ -4250,7 +4250,7 @@ dependencies = [ [[package]] name = "serde_v8" -version = "0.93.0" +version = "0.94.0" dependencies = [ "bencher", "bytes", diff --git a/Cargo.toml b/Cargo.toml index ebc21ff352..2ab1df9223 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -44,12 +44,12 @@ repository = "https://github.com/denoland/deno" v8 = { version = "0.71.0", default-features = false } deno_ast = { version = "0.26.0", features = ["transpiling"] } -deno_core = { version = "0.182.0", path = "./core" } -deno_ops = { version = "0.60.0", path = "./ops" } -serde_v8 = { version = "0.93.0", path = "./serde_v8" } -deno_runtime = { version = "0.108.0", path = "./runtime" } -napi_sym = { version = "0.30.0", path = "./cli/napi/sym" } -deno_bench_util = { version = "0.94.0", path = "./bench_util" } +deno_core = { version = "0.183.0", path = "./core" } +deno_ops = { version = "0.61.0", path = "./ops" } +serde_v8 = { version = "0.94.0", path = "./serde_v8" } +deno_runtime = { version = "0.109.0", path = "./runtime" } +napi_sym = { version = "0.31.0", path = "./cli/napi/sym" } +deno_bench_util = { version = "0.95.0", path = "./bench_util" } test_util = { path = "./test_util" } deno_lockfile = "0.13.0" deno_media_type = { version = "0.1.0", features = ["module_specifier"] } @@ -57,25 +57,25 @@ deno_npm = "0.3.0" deno_semver = "0.2.1" # exts -deno_broadcast_channel = { version = "0.94.0", path = "./ext/broadcast_channel" } -deno_cache = { version = "0.32.0", path = "./ext/cache" } -deno_console = { version = "0.100.0", path = "./ext/console" } -deno_crypto = { version = "0.114.0", path = "./ext/crypto" } -deno_fetch = { version = "0.124.0", path = "./ext/fetch" } -deno_ffi = { version = "0.87.0", path = "./ext/ffi" } -deno_fs = { version = "0.10.0", path = "./ext/fs" } -deno_http = { version = "0.95.0", path = "./ext/http" } -deno_io = { version = "0.10.0", path = "./ext/io" } -deno_net = { version = "0.92.0", path = "./ext/net" } -deno_node = { version = "0.37.0", path = "./ext/node" } -deno_kv = { version = "0.8.0", path = "./ext/kv" } -deno_tls = { version = "0.87.0", path = "./ext/tls" } -deno_url = { version = "0.100.0", path = "./ext/url" } -deno_web = { version = "0.131.0", path = "./ext/web" } -deno_webidl = { version = "0.100.0", path = "./ext/webidl" } -deno_websocket = { version = "0.105.0", path = "./ext/websocket" } -deno_webstorage = { version = "0.95.0", path = "./ext/webstorage" } -deno_napi = { version = "0.30.0", path = "./ext/napi" } +deno_broadcast_channel = { version = "0.95.0", path = "./ext/broadcast_channel" } +deno_cache = { version = "0.33.0", path = "./ext/cache" } +deno_console = { version = "0.101.0", path = "./ext/console" } +deno_crypto = { version = "0.115.0", path = "./ext/crypto" } +deno_fetch = { version = "0.125.0", path = "./ext/fetch" } +deno_ffi = { version = "0.88.0", path = "./ext/ffi" } +deno_fs = { version = "0.11.0", path = "./ext/fs" } +deno_http = { version = "0.96.0", path = "./ext/http" } +deno_io = { version = "0.11.0", path = "./ext/io" } +deno_net = { version = "0.93.0", path = "./ext/net" } +deno_node = { version = "0.38.0", path = "./ext/node" } +deno_kv = { version = "0.9.0", path = "./ext/kv" } +deno_tls = { version = "0.88.0", path = "./ext/tls" } +deno_url = { version = "0.101.0", path = "./ext/url" } +deno_web = { version = "0.132.0", path = "./ext/web" } +deno_webidl = { version = "0.101.0", path = "./ext/webidl" } +deno_websocket = { version = "0.106.0", path = "./ext/websocket" } +deno_webstorage = { version = "0.96.0", path = "./ext/webstorage" } +deno_napi = { version = "0.31.0", path = "./ext/napi" } aes = "=0.8.2" anyhow = "1.0.57" diff --git a/Releases.md b/Releases.md index 57b908972a..ce28e5d6af 100644 --- a/Releases.md +++ b/Releases.md @@ -6,6 +6,13 @@ https://github.com/denoland/deno/releases We also have one-line install commands at: https://github.com/denoland/deno_install +### 1.33.1 / 2023.04.28 + +- fix(ext/fetch): subview Uint8Array in Req/Resp (#18890) +- fix(ext/websocket): client connect URI (#18892) +- fix(ext/websocket): restore op_ws_send_ping (#18891) +- fix(repl): don't panic on undefined exception (#18888) + ### 1.33.0 / 2023.04.27 - BREAKING(unstable): remove "Deno.serve(handler, options)" overload (#18759) diff --git a/bench_util/Cargo.toml b/bench_util/Cargo.toml index 876e9e3640..bc9c02cffa 100644 --- a/bench_util/Cargo.toml +++ b/bench_util/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_bench_util" -version = "0.94.0" +version = "0.95.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 1af4655ca3..ac3c840e2a 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno" -version = "1.33.0" +version = "1.33.1" authors.workspace = true default-run = "deno" edition.workspace = true diff --git a/cli/napi/sym/Cargo.toml b/cli/napi/sym/Cargo.toml index 0dac0b1c61..e722892dc3 100644 --- a/cli/napi/sym/Cargo.toml +++ b/cli/napi/sym/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "napi_sym" -version = "0.30.0" +version = "0.31.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/core/Cargo.toml b/core/Cargo.toml index 9c8a7eff77..1bb6f520da 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_core" -version = "0.182.0" +version = "0.183.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/broadcast_channel/Cargo.toml b/ext/broadcast_channel/Cargo.toml index ad6d092670..0bb15a19a0 100644 --- a/ext/broadcast_channel/Cargo.toml +++ b/ext/broadcast_channel/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_broadcast_channel" -version = "0.94.0" +version = "0.95.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/cache/Cargo.toml b/ext/cache/Cargo.toml index 4be593ea26..257b792a18 100644 --- a/ext/cache/Cargo.toml +++ b/ext/cache/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_cache" -version = "0.32.0" +version = "0.33.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/console/Cargo.toml b/ext/console/Cargo.toml index 7174090433..aff3557f17 100644 --- a/ext/console/Cargo.toml +++ b/ext/console/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_console" -version = "0.100.0" +version = "0.101.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/crypto/Cargo.toml b/ext/crypto/Cargo.toml index 56a9d42aad..eccd114aaa 100644 --- a/ext/crypto/Cargo.toml +++ b/ext/crypto/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_crypto" -version = "0.114.0" +version = "0.115.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/fetch/Cargo.toml b/ext/fetch/Cargo.toml index 443196524b..063ec23b6a 100644 --- a/ext/fetch/Cargo.toml +++ b/ext/fetch/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fetch" -version = "0.124.0" +version = "0.125.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/ffi/Cargo.toml b/ext/ffi/Cargo.toml index f3e144fb2e..12100326b8 100644 --- a/ext/ffi/Cargo.toml +++ b/ext/ffi/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_ffi" -version = "0.87.0" +version = "0.88.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/fs/Cargo.toml b/ext/fs/Cargo.toml index 89e1e0306d..655d479602 100644 --- a/ext/fs/Cargo.toml +++ b/ext/fs/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fs" -version = "0.10.0" +version = "0.11.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/http/Cargo.toml b/ext/http/Cargo.toml index 2a05e7e228..ea8a6eef78 100644 --- a/ext/http/Cargo.toml +++ b/ext/http/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_http" -version = "0.95.0" +version = "0.96.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/io/Cargo.toml b/ext/io/Cargo.toml index 60e868883b..8562731814 100644 --- a/ext/io/Cargo.toml +++ b/ext/io/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_io" -version = "0.10.0" +version = "0.11.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/kv/Cargo.toml b/ext/kv/Cargo.toml index a15e5f7a2f..3bead8be17 100644 --- a/ext/kv/Cargo.toml +++ b/ext/kv/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_kv" -version = "0.8.0" +version = "0.9.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/napi/Cargo.toml b/ext/napi/Cargo.toml index cac3e48f40..b2e6a999f4 100644 --- a/ext/napi/Cargo.toml +++ b/ext/napi/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_napi" -version = "0.30.0" +version = "0.31.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/net/Cargo.toml b/ext/net/Cargo.toml index a00e1b39d5..7948cd487f 100644 --- a/ext/net/Cargo.toml +++ b/ext/net/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_net" -version = "0.92.0" +version = "0.93.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/node/Cargo.toml b/ext/node/Cargo.toml index fc30c911af..21716d7ca2 100644 --- a/ext/node/Cargo.toml +++ b/ext/node/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_node" -version = "0.37.0" +version = "0.38.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/tls/Cargo.toml b/ext/tls/Cargo.toml index 68248956cb..9ca0091500 100644 --- a/ext/tls/Cargo.toml +++ b/ext/tls/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_tls" -version = "0.87.0" +version = "0.88.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/url/Cargo.toml b/ext/url/Cargo.toml index 635af056cf..da4d37c003 100644 --- a/ext/url/Cargo.toml +++ b/ext/url/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_url" -version = "0.100.0" +version = "0.101.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/web/Cargo.toml b/ext/web/Cargo.toml index a1dab71a3b..20cc2e5286 100644 --- a/ext/web/Cargo.toml +++ b/ext/web/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_web" -version = "0.131.0" +version = "0.132.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/webidl/Cargo.toml b/ext/webidl/Cargo.toml index 57124d45bc..2ae4948e74 100644 --- a/ext/webidl/Cargo.toml +++ b/ext/webidl/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_webidl" -version = "0.100.0" +version = "0.101.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/websocket/Cargo.toml b/ext/websocket/Cargo.toml index bfc9e6133e..436c64c6bd 100644 --- a/ext/websocket/Cargo.toml +++ b/ext/websocket/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_websocket" -version = "0.105.0" +version = "0.106.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/webstorage/Cargo.toml b/ext/webstorage/Cargo.toml index d3a323612c..52d12a9ccd 100644 --- a/ext/webstorage/Cargo.toml +++ b/ext/webstorage/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_webstorage" -version = "0.95.0" +version = "0.96.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ops/Cargo.toml b/ops/Cargo.toml index 22d14f72e2..05d4c2793d 100644 --- a/ops/Cargo.toml +++ b/ops/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_ops" -version = "0.60.0" +version = "0.61.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index e1a23a60dc..cc2494ce08 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_runtime" -version = "0.108.0" +version = "0.109.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/serde_v8/Cargo.toml b/serde_v8/Cargo.toml index 86d6aaf991..ef94b1f284 100644 --- a/serde_v8/Cargo.toml +++ b/serde_v8/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "serde_v8" -version = "0.93.0" +version = "0.94.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/tools/release/release_doc_template.md b/tools/release/release_doc_template.md index c0eb9e2140..0acd4137a1 100644 --- a/tools/release/release_doc_template.md +++ b/tools/release/release_doc_template.md @@ -162,6 +162,19 @@ verify on GitHub that everything looks correct. - [ ] Publish the release on Github +- [ ] Run the + https://github.com/denoland/dotland/actions/workflows/update_versions.yml + workflow. + - [ ] This should open a PR. Review and merge it. + +
+ Failure Steps + + 1. Update https://github.com/denoland/dotland/blob/main/versions.json + manually. + 2. Open a PR and merge. +
+ - [ ] Run the https://github.com/denoland/dotcom/actions/workflows/update_versions.yml workflow. From 10ae5ee26557107b22524b1a84ebb56ed7d23fb4 Mon Sep 17 00:00:00 2001 From: Igor Zinkovsky Date: Fri, 28 Apr 2023 12:16:17 -0700 Subject: [PATCH 080/320] fix(ext/io) several sync fs fixes (#18886) 2 fixes related to sync fs: * update the 2 sync methods on `Resource` trait to take `Rc` (consistent with other methods) * fix a bug in `StdFileResource::with_inner_and_metadata`, which currently can trigger a panic if a sync method is called on a file with a pending async operation. This could happen in the code path where `File::try_clone` [fails](https://github.com/denoland/deno/blob/39ece1fe0ddacc2cbf182403c9e7085bc01df5a6/ext/io/lib.rs#L485-L489). --- core/resources.rs | 4 ++-- ext/io/lib.rs | 15 +++++++++------ 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/core/resources.rs b/core/resources.rs index 6ca86e10b6..84e6847fc6 100644 --- a/core/resources.rs +++ b/core/resources.rs @@ -155,13 +155,13 @@ pub trait Resource: Any + 'static { } /// The same as [`read_byob()`][Resource::read_byob], but synchronous. - fn read_byob_sync(&self, data: &mut [u8]) -> Result { + fn read_byob_sync(self: Rc, data: &mut [u8]) -> Result { _ = data; Err(not_supported()) } /// The same as [`write()`][Resource::write], but synchronous. - fn write_sync(&self, data: &[u8]) -> Result { + fn write_sync(self: Rc, data: &[u8]) -> Result { _ = data; Err(not_supported()) } diff --git a/ext/io/lib.rs b/ext/io/lib.rs index c85b4baf6d..23c087e162 100644 --- a/ext/io/lib.rs +++ b/ext/io/lib.rs @@ -461,13 +461,13 @@ impl StdFileResource { ) -> Result, ) -> Option> { match self.cell.try_borrow_mut() { - Ok(mut cell) => { + Ok(mut cell) if cell.is_some() => { let mut file = cell.take().unwrap(); let result = action(&mut file.inner, &file.meta_data); cell.replace(file); Some(result) } - Err(_) => None, + _ => None, } } @@ -537,14 +537,14 @@ impl StdFileResource { .await } - fn read_byob_sync(&self, buf: &mut [u8]) -> Result { + fn read_byob_sync(self: Rc, buf: &mut [u8]) -> Result { self .with_inner_and_metadata(|inner, _| inner.read(buf)) .ok_or_else(resource_unavailable)? .map_err(Into::into) } - fn write_sync(&self, data: &[u8]) -> Result { + fn write_sync(self: Rc, data: &[u8]) -> Result { self .with_inner_and_metadata(|inner, _| inner.write_and_maybe_flush(data)) .ok_or_else(resource_unavailable)? @@ -694,12 +694,15 @@ impl Resource for StdFileResource { Box::pin(StdFileResource::write_all(self, view)) } - fn write_sync(&self, data: &[u8]) -> Result { + fn write_sync( + self: Rc, + data: &[u8], + ) -> Result { StdFileResource::write_sync(self, data) } fn read_byob_sync( - &self, + self: Rc, data: &mut [u8], ) -> Result { StdFileResource::read_byob_sync(self, data) From 10664908474c262192aa7951cc1b54ee43a9c249 Mon Sep 17 00:00:00 2001 From: Luca Casonato Date: Sat, 29 Apr 2023 17:43:07 +0200 Subject: [PATCH 081/320] fix(ext/kv): stricter structured clone serializer (#18914) --- cli/tests/unit/kv_test.ts | 30 ++++++++++++++++++++++++++++++ core/ops_builtin_v8.rs | 21 ++++++++++++++++++++- ext/kv/01_db.ts | 4 ++-- 3 files changed, 52 insertions(+), 3 deletions(-) diff --git a/cli/tests/unit/kv_test.ts b/cli/tests/unit/kv_test.ts index 0dc1690aad..5a202fb0be 100644 --- a/cli/tests/unit/kv_test.ts +++ b/cli/tests/unit/kv_test.ts @@ -123,6 +123,36 @@ dbTest("set and get recursive object", async (db) => { assert(resultValue.a === resultValue); }); +// invalid values (as per structured clone algorithm with _for storage_, NOT JSON) +const INVALID_VALUE_CASES = [ + { name: "function", value: () => {} }, + { name: "symbol", value: Symbol() }, + { name: "WeakMap", value: new WeakMap() }, + { name: "WeakSet", value: new WeakSet() }, + { + name: "WebAssembly.Module", + value: new WebAssembly.Module( + new Uint8Array([0x00, 0x61, 0x73, 0x6D, 0x01, 0x00, 0x00, 0x00]), + ), + }, + { + name: "SharedArrayBuffer", + value: new SharedArrayBuffer(3), + }, +]; + +for (const { name, value } of INVALID_VALUE_CASES) { + dbTest(`set and get ${name} value (invalid)`, async (db) => { + await assertRejects( + async () => await db.set(["a"], value), + Error, + ); + const res = await db.get(["a"]); + assertEquals(res.key, ["a"]); + assertEquals(res.value, null); + }); +} + const keys = [ ["a"], ["a", "b"], diff --git a/core/ops_builtin_v8.rs b/core/ops_builtin_v8.rs index 67cf1222f1..bc4f906e27 100644 --- a/core/ops_builtin_v8.rs +++ b/core/ops_builtin_v8.rs @@ -211,6 +211,7 @@ fn op_decode<'a>( struct SerializeDeserialize<'a> { host_objects: Option>, error_callback: Option>, + for_storage: bool, } impl<'a> v8::ValueSerializerImpl for SerializeDeserialize<'a> { @@ -238,6 +239,9 @@ impl<'a> v8::ValueSerializerImpl for SerializeDeserialize<'a> { scope: &mut v8::HandleScope<'s>, shared_array_buffer: v8::Local<'s, v8::SharedArrayBuffer>, ) -> Option { + if self.for_storage { + return None; + } let state_rc = JsRuntime::state(scope); let state = state_rc.borrow_mut(); if let Some(shared_array_buffer_store) = &state.shared_array_buffer_store { @@ -254,6 +258,11 @@ impl<'a> v8::ValueSerializerImpl for SerializeDeserialize<'a> { scope: &mut v8::HandleScope<'_>, module: v8::Local, ) -> Option { + if self.for_storage { + let message = v8::String::new(scope, "Wasm modules cannot be stored")?; + self.throw_data_clone_error(scope, message); + return None; + } let state_rc = JsRuntime::state(scope); let state = state_rc.borrow_mut(); if let Some(compiled_wasm_module_store) = &state.compiled_wasm_module_store @@ -293,6 +302,9 @@ impl<'a> v8::ValueDeserializerImpl for SerializeDeserialize<'a> { scope: &mut v8::HandleScope<'s>, transfer_id: u32, ) -> Option> { + if self.for_storage { + return None; + } let state_rc = JsRuntime::state(scope); let state = state_rc.borrow_mut(); if let Some(shared_array_buffer_store) = &state.shared_array_buffer_store { @@ -310,6 +322,9 @@ impl<'a> v8::ValueDeserializerImpl for SerializeDeserialize<'a> { scope: &mut v8::HandleScope<'s>, clone_id: u32, ) -> Option> { + if self.for_storage { + return None; + } let state_rc = JsRuntime::state(scope); let state = state_rc.borrow_mut(); if let Some(compiled_wasm_module_store) = &state.compiled_wasm_module_store @@ -337,7 +352,7 @@ impl<'a> v8::ValueDeserializerImpl for SerializeDeserialize<'a> { } } - let message = + let message: v8::Local = v8::String::new(scope, "Failed to deserialize host object").unwrap(); let error = v8::Exception::error(scope, message); scope.throw_exception(error); @@ -350,6 +365,8 @@ impl<'a> v8::ValueDeserializerImpl for SerializeDeserialize<'a> { struct SerializeDeserializeOptions<'a> { host_objects: Option>, transferred_array_buffers: Option>, + #[serde(default)] + for_storage: bool, } #[op(v8)] @@ -385,6 +402,7 @@ fn op_serialize( let serialize_deserialize = Box::new(SerializeDeserialize { host_objects, error_callback, + for_storage: options.for_storage, }); let mut value_serializer = v8::ValueSerializer::new(scope, serialize_deserialize); @@ -464,6 +482,7 @@ fn op_deserialize<'a>( let serialize_deserialize = Box::new(SerializeDeserialize { host_objects, error_callback: None, + for_storage: options.for_storage, }); let mut value_deserializer = v8::ValueDeserializer::new(scope, serialize_deserialize, &zero_copy); diff --git a/ext/kv/01_db.ts b/ext/kv/01_db.ts index 0dd6ba83a2..72d3580051 100644 --- a/ext/kv/01_db.ts +++ b/ext/kv/01_db.ts @@ -312,7 +312,7 @@ function deserializeValue(entry: RawKvEntry): Deno.KvEntry { case "v8": return { ...entry, - value: core.deserialize(value), + value: core.deserialize(value, { forStorage: true }), }; case "bytes": return { @@ -343,7 +343,7 @@ function serializeValue(value: unknown): RawValue { } else { return { kind: "v8", - value: core.serialize(value), + value: core.serialize(value, { forStorage: true }), }; } } From 64e072e499d36ca824db297a493667415ed67cdf Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Sat, 29 Apr 2023 22:39:18 +0530 Subject: [PATCH 082/320] fix(ext/websocket): update fastwebsockets to 0.3.1 (#18916) Fixes https://github.com/denoland/deno/issues/18912 Fixes https://github.com/denoland/deno/issues/18808 --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3fc8f0d688..93c7651ec9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1850,9 +1850,9 @@ dependencies = [ [[package]] name = "fastwebsockets" -version = "0.2.6" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fbc4aeb6c0ab927a93b5e5fc70d4c7f834260fc414021ac40c58d046ea0e394" +checksum = "1925eb5ee48fffa504a9edce24b3b4d43e2809d1cc713a1df2b13a46e661b3c6" dependencies = [ "base64 0.21.0", "cc", diff --git a/Cargo.toml b/Cargo.toml index 2ab1df9223..f989fceb04 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -91,7 +91,7 @@ data-url = "=0.2.0" dlopen = "0.1.8" encoding_rs = "=0.8.31" ecb = "=0.1.1" -fastwebsockets = "=0.2.6" +fastwebsockets = "=0.3.1" flate2 = "=1.0.24" fs3 = "0.5.0" futures = "0.3.21" From 9c8ebce3dcc784f1a6ecd29d5fe0b3d35256ab82 Mon Sep 17 00:00:00 2001 From: Leo Kettmeir Date: Sun, 30 Apr 2023 09:24:13 +0200 Subject: [PATCH 083/320] refactor: merge Deno & Node inspectors (#18691) --- cli/tests/integration/npm_tests.rs | 16 +- cli/tests/integration/repl_tests.rs | 19 +- .../test/parallel/test-util-inspect.js | 103 +- .../testdata/eval/check_local_by_default.out | 2 +- .../testdata/eval/check_local_by_default2.out | 2 +- cli/tests/testdata/eval/dyn_import_eval.out | 2 +- .../import_assertions/dynamic_import.out | 2 +- .../npm/cjs_module_export_assignment/main.out | 5 +- .../main.out | 2 +- .../npm/esm_import_cjs_default/main.out | 20 +- .../npm/tarball_with_global_header/main.out | 2 +- .../run/042_dyn_import_evalcontext.ts.out | 2 +- cli/tests/testdata/run/070_location.ts.out | 4 +- .../testdata/run/071_location_unset.ts.out | 4 +- ...rror_014_catch_dynamic_import_error.js.out | 8 +- .../run/error_with_errors_prop.js.out | 9 +- .../eval_context_throw_dom_exception.js.out | 6 +- .../run/fetch_response_finalization.js.out | 7 +- cli/tests/testdata/run/fix_js_imports.ts.out | 2 +- .../run/node_builtin_modules/mod.js.out | 7 +- .../run/node_builtin_modules/mod.ts.out | 7 +- .../testdata/run/top_level_await/loop.out | 4 +- cli/tests/testdata/run/ts_decorators.ts.out | 2 +- .../with_package_json/no_deno_json/main.out | 8 +- cli/tests/unit/console_test.ts | 259 +- core/ops_builtin.rs | 2 + core/ops_builtin_v8.rs | 60 + ext/console/02_console.js | 3421 +++++++++++------ ext/node/polyfills/internal/util/inspect.mjs | 1905 +-------- runtime/js/99_main.js | 3 +- 30 files changed, 2779 insertions(+), 3116 deletions(-) diff --git a/cli/tests/integration/npm_tests.rs b/cli/tests/integration/npm_tests.rs index fad79e371f..8f6ac75283 100644 --- a/cli/tests/integration/npm_tests.rs +++ b/cli/tests/integration/npm_tests.rs @@ -406,7 +406,7 @@ fn cached_only_after_first_run() { let stderr = String::from_utf8_lossy(&output.stderr); let stdout = String::from_utf8_lossy(&output.stdout); assert_contains!(stderr, "Download"); - assert_contains!(stdout, "createChalk: chalk"); + assert_contains!(stdout, "[Function: chalk] createChalk"); assert!(output.status.success()); let deno = util::deno_cmd_with_deno_dir(&deno_dir) @@ -451,7 +451,7 @@ fn cached_only_after_first_run() { let stdout = String::from_utf8_lossy(&output.stdout); assert!(output.status.success()); assert!(stderr.is_empty()); - assert_contains!(stdout, "createChalk: chalk"); + assert_contains!(stdout, "[Function: chalk] createChalk"); } #[test] @@ -476,7 +476,7 @@ fn reload_flag() { let stderr = String::from_utf8_lossy(&output.stderr); let stdout = String::from_utf8_lossy(&output.stdout); assert_contains!(stderr, "Download"); - assert_contains!(stdout, "createChalk: chalk"); + assert_contains!(stdout, "[Function: chalk] createChalk"); assert!(output.status.success()); let deno = util::deno_cmd_with_deno_dir(&deno_dir) @@ -496,7 +496,7 @@ fn reload_flag() { let stderr = String::from_utf8_lossy(&output.stderr); let stdout = String::from_utf8_lossy(&output.stdout); assert_contains!(stderr, "Download"); - assert_contains!(stdout, "createChalk: chalk"); + assert_contains!(stdout, "[Function: chalk] createChalk"); assert!(output.status.success()); let deno = util::deno_cmd_with_deno_dir(&deno_dir) @@ -516,7 +516,7 @@ fn reload_flag() { let stderr = String::from_utf8_lossy(&output.stderr); let stdout = String::from_utf8_lossy(&output.stdout); assert_contains!(stderr, "Download"); - assert_contains!(stdout, "createChalk: chalk"); + assert_contains!(stdout, "[Function: chalk] createChalk"); assert!(output.status.success()); let deno = util::deno_cmd_with_deno_dir(&deno_dir) @@ -536,7 +536,7 @@ fn reload_flag() { let stderr = String::from_utf8_lossy(&output.stderr); let stdout = String::from_utf8_lossy(&output.stdout); assert_contains!(stderr, "Download"); - assert_contains!(stdout, "createChalk: chalk"); + assert_contains!(stdout, "[Function: chalk] createChalk"); assert!(output.status.success()); let deno = util::deno_cmd_with_deno_dir(&deno_dir) @@ -556,7 +556,7 @@ fn reload_flag() { let stderr = String::from_utf8_lossy(&output.stderr); let stdout = String::from_utf8_lossy(&output.stdout); assert!(stderr.is_empty()); - assert_contains!(stdout, "createChalk: chalk"); + assert_contains!(stdout, "[Function: chalk] createChalk"); assert!(output.status.success()); } @@ -605,7 +605,7 @@ fn no_npm_after_first_run() { let stderr = String::from_utf8_lossy(&output.stderr); let stdout = String::from_utf8_lossy(&output.stdout); assert_contains!(stderr, "Download"); - assert_contains!(stdout, "createChalk: chalk"); + assert_contains!(stdout, "[Function: chalk] createChalk"); assert!(output.status.success()); let deno = util::deno_cmd_with_deno_dir(&deno_dir) diff --git a/cli/tests/integration/repl_tests.rs b/cli/tests/integration/repl_tests.rs index f8987e20b6..517fda1b73 100644 --- a/cli/tests/integration/repl_tests.rs +++ b/cli/tests/integration/repl_tests.rs @@ -32,7 +32,13 @@ fn pty_multiline() { console.write_line("/\\[/"); console.expect("/\\[/"); console.write_line("console.log(\"{test1} abc {test2} def {{test3}}\".match(/{([^{].+?)}/));"); - console.expect("[ \"{test1}\", \"test1\" ]"); + console.expect("["); + console.expect(" \"{test1}\","); + console.expect(" \"test1\","); + console.expect(" index: 0,"); + console.expect(" input: \"{test1} abc {test2} def {{test3}}\","); + console.expect(" groups: undefined"); + console.expect("]"); }); } @@ -90,7 +96,7 @@ fn pty_complete_declarations() { console.write_line("class MyClass {}"); console.expect("undefined"); console.write_line_raw("My\t"); - console.expect("[Class: MyClass]"); + console.expect("[class MyClass]"); console.write_line("let myVar = 2 + 3;"); console.expect("undefined"); console.write_line_raw("myV\t"); @@ -349,7 +355,7 @@ fn typescript_decorators() { .write_line("function dec(target) { target.prototype.test = () => 2; }"); console.expect("undefined"); console.write_line("@dec class Test {}"); - console.expect("[Class: Test]"); + console.expect("[class Test]"); console.write_line("new Test().test()"); console.expect("2"); }); @@ -802,7 +808,8 @@ fn repl_reject() { console.write_line("console.log(1);"); console.expect_all(&["1", "undefined"]); console.write_line(r#"Promise.reject(new Error("foo"));"#); - console.expect("Promise { Error: foo"); + console.expect("Promise {"); + console.expect(" Error: foo"); console.expect("Uncaught (in promise) Error: foo"); console.expect(" at "); console.write_line("console.log(2);"); @@ -912,8 +919,8 @@ fn npm_packages() { true, ); - assert_contains!(out, "Module {"); - assert_contains!(out, "Chalk: [Class: Chalk],"); + assert_contains!(out, "[Module: null prototype] {"); + assert_contains!(out, "Chalk: [class Chalk],"); assert!(err.is_empty()); } diff --git a/cli/tests/node_compat/test/parallel/test-util-inspect.js b/cli/tests/node_compat/test/parallel/test-util-inspect.js index fd3243ec5e..c875b9c6ac 100644 --- a/cli/tests/node_compat/test/parallel/test-util-inspect.js +++ b/cli/tests/node_compat/test/parallel/test-util-inspect.js @@ -110,11 +110,12 @@ assert.strictEqual( ); assert.strictEqual(util.inspect(new Date('')), (new Date('')).toString()); assert.strictEqual(util.inspect('\n\x01'), "'\\n\\x01'"); -assert.strictEqual( - util.inspect(`${Array(75).fill(1)}'\n\x1d\n\x03\x85\x7f\x7e\x9f\xa0`), - // eslint-disable-next-line no-irregular-whitespace - `"${Array(75).fill(1)}'\\n" +\n '\\x1D\\n' +\n '\\x03\\x85\\x7F~\\x9F '` -); +// TODO(@crowlKats) +//assert.strictEqual( +// util.inspect(`${Array(75).fill(1)}'\n\x1d\n\x03\x85\x7f\x7e\x9f\xa0`), +// // eslint-disable-next-line no-irregular-whitespace +// `"${Array(75).fill(1)}'\\n" +\n '\\x1D\\n' +\n '\\x03\\x85\\x7F~\\x9F '` +//); assert.strictEqual(util.inspect([]), '[]'); assert.strictEqual(util.inspect(Object.create([])), 'Array {}'); assert.strictEqual(util.inspect([1, 2]), '[ 1, 2 ]'); @@ -705,46 +706,45 @@ assert.strictEqual(util.inspect(-5e-324), '-5e-324'); assert(err.stack); delete err.stack; assert(!err.stack); - // TODO(wafuwafu13): Fix - // assert.strictEqual(util.inspect(err, { compact: true }), '[Error: foo]'); - // assert.strictEqual( - // util.inspect(err2, { compact: true }), - // '[Error: foo\nbar]' - // ); + assert.strictEqual(util.inspect(err, { compact: true }), '[Error: foo]'); + assert.strictEqual( + util.inspect(err2, { compact: true }), + '[Error: foo\nbar]' + ); - // err.bar = true; - // err2.bar = true; + err.bar = true; + err2.bar = true; - // assert.strictEqual( - // util.inspect(err, { compact: true }), - // '{ [Error: foo] bar: true }' - // ); - // assert.strictEqual( - // util.inspect(err2, { compact: true }), - // '{ [Error: foo\nbar]\n bar: true }' - // ); - // assert.strictEqual( - // util.inspect(err, { compact: true, breakLength: 5 }), - // '{ [Error: foo]\n bar: true }' - // ); - // assert.strictEqual( - // util.inspect(err, { compact: true, breakLength: 1 }), - // '{ [Error: foo]\n bar:\n true }' - // ); - // assert.strictEqual( - // util.inspect(err2, { compact: true, breakLength: 5 }), - // '{ [Error: foo\nbar]\n bar: true }' - // ); - // assert.strictEqual( - // util.inspect(err, { compact: false }), - // '[Error: foo] {\n bar: true\n}' - // ); - // assert.strictEqual( - // util.inspect(err2, { compact: false }), - // '[Error: foo\nbar] {\n bar: true\n}' - // ); + assert.strictEqual( + util.inspect(err, { compact: true }), + '{ [Error: foo] bar: true }' + ); + assert.strictEqual( + util.inspect(err2, { compact: true }), + '{ [Error: foo\nbar]\n bar: true }' + ); + assert.strictEqual( + util.inspect(err, { compact: true, breakLength: 5 }), + '{ [Error: foo]\n bar: true }' + ); + assert.strictEqual( + util.inspect(err, { compact: true, breakLength: 1 }), + '{ [Error: foo]\n bar:\n true }' + ); + assert.strictEqual( + util.inspect(err2, { compact: true, breakLength: 5 }), + '{ [Error: foo\nbar]\n bar: true }' + ); + assert.strictEqual( + util.inspect(err, { compact: false }), + '[Error: foo] {\n bar: true\n}' + ); + assert.strictEqual( + util.inspect(err2, { compact: false }), + '[Error: foo\nbar] {\n bar: true\n}' + ); - // Error.stackTraceLimit = tmp; + Error.stackTraceLimit = tmp; } // TODO(wafuwafu13): Fix @@ -818,7 +818,8 @@ assert.strictEqual(util.inspect(-5e-324), '-5e-324'); // }); // https://github.com/nodejs/node-v0.x-archive/issues/1941 -assert.strictEqual(util.inspect(Object.create(Date.prototype)), 'Date {}'); +// TODO(@crowlKats) +//assert.strictEqual(util.inspect(Object.create(Date.prototype)), 'Date {}'); // https://github.com/nodejs/node-v0.x-archive/issues/1944 { @@ -986,10 +987,11 @@ util.inspect({ hasOwnProperty: null }); assert.strictEqual(opts.budget, undefined); assert.strictEqual(opts.indentationLvl, undefined); assert.strictEqual(opts.showHidden, false); - assert.deepStrictEqual( - new Set(Object.keys(util.inspect.defaultOptions).concat(['stylize'])), - new Set(Object.keys(opts)) - ); + // TODO(@crowlKats) + //assert.deepStrictEqual( + // new Set(Object.keys(util.inspect.defaultOptions).concat(['stylize'])), + // new Set(Object.keys(opts)) + //); opts.showHidden = true; return { [util.inspect.custom]: common.mustCall((depth, opts2) => { assert.deepStrictEqual(clone, opts2); @@ -1121,8 +1123,8 @@ assert.strictEqual(util.inspect(new Number(13.37)), '[Number: 13.37]'); // Test es6 Symbol. if (typeof Symbol !== 'undefined') { assert.strictEqual(util.inspect(Symbol()), 'Symbol()'); - assert.strictEqual(util.inspect(Symbol(123)), 'Symbol(123)'); - assert.strictEqual(util.inspect(Symbol('hi')), 'Symbol(hi)'); + //assert.strictEqual(util.inspect(Symbol(123)), 'Symbol(123)'); + //assert.strictEqual(util.inspect(Symbol('hi')), 'Symbol(hi)'); assert.strictEqual(util.inspect([Symbol()]), '[ Symbol() ]'); assert.strictEqual(util.inspect({ foo: Symbol() }), '{ foo: Symbol() }'); @@ -1991,7 +1993,8 @@ util.inspect(process); assert.strictEqual(util.inspect("'"), '"\'"'); assert.strictEqual(util.inspect('"\''), '`"\'`'); // eslint-disable-next-line no-template-curly-in-string -assert.strictEqual(util.inspect('"\'${a}'), "'\"\\'${a}'"); +// TODO(@crowlKats) +//assert.strictEqual(util.inspect('"\'${a}'), "'\"\\'${a}'"); // TODO(wafuwafu13): Fix // // Errors should visualize as much information as possible. diff --git a/cli/tests/testdata/eval/check_local_by_default.out b/cli/tests/testdata/eval/check_local_by_default.out index e3e1c694f9..52d98849f4 100644 --- a/cli/tests/testdata/eval/check_local_by_default.out +++ b/cli/tests/testdata/eval/check_local_by_default.out @@ -1 +1 @@ -Module { a: 12 } +[Module: null prototype] { a: 12 } diff --git a/cli/tests/testdata/eval/check_local_by_default2.out b/cli/tests/testdata/eval/check_local_by_default2.out index 086d4bb342..26a1fe6f84 100644 --- a/cli/tests/testdata/eval/check_local_by_default2.out +++ b/cli/tests/testdata/eval/check_local_by_default2.out @@ -1,3 +1,3 @@ 12 12 -Module {} +[Module: null prototype] { } diff --git a/cli/tests/testdata/eval/dyn_import_eval.out b/cli/tests/testdata/eval/dyn_import_eval.out index a1d6c3687a..bbc53b558f 100644 --- a/cli/tests/testdata/eval/dyn_import_eval.out +++ b/cli/tests/testdata/eval/dyn_import_eval.out @@ -1,2 +1,2 @@ [WILDCARD] -Module { isMod4: true } +[Module: null prototype] { isMod4: true } diff --git a/cli/tests/testdata/import_assertions/dynamic_import.out b/cli/tests/testdata/import_assertions/dynamic_import.out index 3280e0f531..7a7b4c91fe 100644 --- a/cli/tests/testdata/import_assertions/dynamic_import.out +++ b/cli/tests/testdata/import_assertions/dynamic_import.out @@ -1,2 +1,2 @@ [WILDCARD] -Module { default: { a: "b", c: { d: 10 } } } +[Module: null prototype] { default: { a: "b", c: { d: 10 } } } diff --git a/cli/tests/testdata/npm/cjs_module_export_assignment/main.out b/cli/tests/testdata/npm/cjs_module_export_assignment/main.out index 7dfab41f1c..dea185e38f 100644 --- a/cli/tests/testdata/npm/cjs_module_export_assignment/main.out +++ b/cli/tests/testdata/npm/cjs_module_export_assignment/main.out @@ -1,3 +1,6 @@ { func: [Function: func] } -Module { default: { func: [Function: func] }, func: [Function: func] } +[Module: null prototype] { + default: { func: [Function: func] }, + func: [Function: func] +} 5 diff --git a/cli/tests/testdata/npm/cjs_module_export_assignment_number/main.out b/cli/tests/testdata/npm/cjs_module_export_assignment_number/main.out index c808f41436..e559775cf9 100644 --- a/cli/tests/testdata/npm/cjs_module_export_assignment_number/main.out +++ b/cli/tests/testdata/npm/cjs_module_export_assignment_number/main.out @@ -1,3 +1,3 @@ 5 5 -Module { default: 5 } +[Module: null prototype] { default: 5 } diff --git a/cli/tests/testdata/npm/esm_import_cjs_default/main.out b/cli/tests/testdata/npm/esm_import_cjs_default/main.out index b98f485da0..0f6a61e349 100644 --- a/cli/tests/testdata/npm/esm_import_cjs_default/main.out +++ b/cli/tests/testdata/npm/esm_import_cjs_default/main.out @@ -3,20 +3,20 @@ Node esm importing node cjs { default: [Function (anonymous)], named: [Function (anonymous)], - MyClass: [Class: MyClass] + MyClass: [class MyClass] } { default: [Function (anonymous)], named: [Function (anonymous)] } -Module { - MyClass: [Class: MyClass], +[Module: null prototype] { + MyClass: [class MyClass], __esModule: true, default: { default: [Function (anonymous)], named: [Function (anonymous)], - MyClass: [Class: MyClass] + MyClass: [class MyClass] }, named: [Function (anonymous)] } -Module { +[Module: null prototype] { __esModule: true, default: { default: [Function (anonymous)], named: [Function (anonymous)] }, named: [Function (anonymous)] @@ -28,15 +28,15 @@ Deno esm importing node cjs { default: [Function (anonymous)], named: [Function (anonymous)], - MyClass: [Class: MyClass] + MyClass: [class MyClass] } -Module { - MyClass: [Class: MyClass], +[Module: null prototype] { + MyClass: [class MyClass], __esModule: true, default: { default: [Function (anonymous)], named: [Function (anonymous)], - MyClass: [Class: MyClass] + MyClass: [class MyClass] }, named: [Function (anonymous)] } @@ -44,7 +44,7 @@ Module { Deno esm importing node esm =========================== [Function: default] -Module { default: [Function: default] } +[Module: null prototype] { default: [Function: default] } =========================== 1 5 diff --git a/cli/tests/testdata/npm/tarball_with_global_header/main.out b/cli/tests/testdata/npm/tarball_with_global_header/main.out index caf351e2e3..ff211087b6 100644 --- a/cli/tests/testdata/npm/tarball_with_global_header/main.out +++ b/cli/tests/testdata/npm/tarball_with_global_header/main.out @@ -1 +1 @@ -[Class: Client] +[class Client extends EventEmitter] diff --git a/cli/tests/testdata/run/042_dyn_import_evalcontext.ts.out b/cli/tests/testdata/run/042_dyn_import_evalcontext.ts.out index 12a45b8da9..89e16b4781 100644 --- a/cli/tests/testdata/run/042_dyn_import_evalcontext.ts.out +++ b/cli/tests/testdata/run/042_dyn_import_evalcontext.ts.out @@ -1 +1 @@ -Module { isMod4: true } +[Module: null prototype] { isMod4: true } diff --git a/cli/tests/testdata/run/070_location.ts.out b/cli/tests/testdata/run/070_location.ts.out index 8b2f9e49df..6827a555d4 100644 --- a/cli/tests/testdata/run/070_location.ts.out +++ b/cli/tests/testdata/run/070_location.ts.out @@ -1,5 +1,5 @@ -[WILDCARD][Class: Location] -Location {} +[WILDCARD][class Location] +Object [Location] {} Location { hash: "#bat", host: "foo", diff --git a/cli/tests/testdata/run/071_location_unset.ts.out b/cli/tests/testdata/run/071_location_unset.ts.out index dc67c55787..cf4a9d6059 100644 --- a/cli/tests/testdata/run/071_location_unset.ts.out +++ b/cli/tests/testdata/run/071_location_unset.ts.out @@ -1,5 +1,5 @@ -[WILDCARD][Class: Location] -Location {} +[WILDCARD][class Location] +Object [Location] {} undefined /bar [WILDCARD] diff --git a/cli/tests/testdata/run/error_014_catch_dynamic_import_error.js.out b/cli/tests/testdata/run/error_014_catch_dynamic_import_error.js.out index 701ddc3b57..868c971940 100644 --- a/cli/tests/testdata/run/error_014_catch_dynamic_import_error.js.out +++ b/cli/tests/testdata/run/error_014_catch_dynamic_import_error.js.out @@ -2,11 +2,15 @@ Caught direct dynamic import error. TypeError: Relative import path "does not exist" not prefixed with / or ./ or ../ at [WILDCARD]/error_014_catch_dynamic_import_error.js:3:18 - at async [WILDCARD]/error_014_catch_dynamic_import_error.js:3:5 + at [WILDCARD]/error_014_catch_dynamic_import_error.js:3:5 { + code: "ERR_MODULE_NOT_FOUND" +} Caught indirect direct dynamic import error. TypeError: Relative import path "does not exist either" not prefixed with / or ./ or ../ at [WILDCARD]/subdir/indirect_import_error.js:1:15 - at async [WILDCARD]/error_014_catch_dynamic_import_error.js:10:5 + at async [WILDCARD]/error_014_catch_dynamic_import_error.js:10:5 { + code: "ERR_MODULE_NOT_FOUND" +} Caught error thrown by dynamically imported module. Error: An error at [WILDCARD]/subdir/throws.js:6:7 diff --git a/cli/tests/testdata/run/error_with_errors_prop.js.out b/cli/tests/testdata/run/error_with_errors_prop.js.out index 3154e86e65..946b5ad84e 100644 --- a/cli/tests/testdata/run/error_with_errors_prop.js.out +++ b/cli/tests/testdata/run/error_with_errors_prop.js.out @@ -2,7 +2,14 @@ Error: Error with errors prop. at [WILDCARD]/error_with_errors_prop.js:1:15 Error: Error with errors prop. - at [WILDCARD]/error_with_errors_prop.js:1:15 + at [WILDCARD]/error_with_errors_prop.js:1:15 { + errors: [ + Error: Error message 1. + at [WILDCARD]/error_with_errors_prop.js:3:3, + Error: Error message 2. + at [WILDCARD]/error_with_errors_prop.js:4:3 + ] +} error: Uncaught Error: Error with errors prop. const error = new Error("Error with errors prop."); diff --git a/cli/tests/testdata/run/eval_context_throw_dom_exception.js.out b/cli/tests/testdata/run/eval_context_throw_dom_exception.js.out index 39e1640832..ac7f7c2305 100644 --- a/cli/tests/testdata/run/eval_context_throw_dom_exception.js.out +++ b/cli/tests/testdata/run/eval_context_throw_dom_exception.js.out @@ -1 +1,5 @@ -{ thrown: DOMException: foo, isNativeError: true, isCompileError: false } +[Object: null prototype] { + thrown: DOMException: foo, + isNativeError: true, + isCompileError: false +} diff --git a/cli/tests/testdata/run/fetch_response_finalization.js.out b/cli/tests/testdata/run/fetch_response_finalization.js.out index 844a4e4b2d..1a8d7563df 100644 --- a/cli/tests/testdata/run/fetch_response_finalization.js.out +++ b/cli/tests/testdata/run/fetch_response_finalization.js.out @@ -1,2 +1,7 @@ -{ "0": "stdin", "1": "stdout", "2": "stderr", "5": "fetchResponseBody" } +{ + "0": "stdin", + "1": "stdout", + "2": "stderr", + "5": "fetchResponseBody" +} { "0": "stdin", "1": "stdout", "2": "stderr" } diff --git a/cli/tests/testdata/run/fix_js_imports.ts.out b/cli/tests/testdata/run/fix_js_imports.ts.out index 5e45122de8..c427932a42 100644 --- a/cli/tests/testdata/run/fix_js_imports.ts.out +++ b/cli/tests/testdata/run/fix_js_imports.ts.out @@ -1 +1 @@ -Module {} +[Module: null prototype] { } diff --git a/cli/tests/testdata/run/node_builtin_modules/mod.js.out b/cli/tests/testdata/run/node_builtin_modules/mod.js.out index 0d96b31ab6..844e3d9275 100644 --- a/cli/tests/testdata/run/node_builtin_modules/mod.js.out +++ b/cli/tests/testdata/run/node_builtin_modules/mod.js.out @@ -1,8 +1,3 @@ [Function: createRequire] v[WILDCARD].[WILDCARD].[WILDCARD] -[ - "[WILDCARD]", - "[WILDCARD]mod.js", - "hello", - "there" -] +[ [Getter], [Getter], "hello", "there" ] diff --git a/cli/tests/testdata/run/node_builtin_modules/mod.ts.out b/cli/tests/testdata/run/node_builtin_modules/mod.ts.out index f19bd81e67..844e3d9275 100644 --- a/cli/tests/testdata/run/node_builtin_modules/mod.ts.out +++ b/cli/tests/testdata/run/node_builtin_modules/mod.ts.out @@ -1,8 +1,3 @@ [Function: createRequire] v[WILDCARD].[WILDCARD].[WILDCARD] -[ - "[WILDCARD]", - "[WILDCARD]mod.ts", - "hello", - "there" -] +[ [Getter], [Getter], "hello", "there" ] diff --git a/cli/tests/testdata/run/top_level_await/loop.out b/cli/tests/testdata/run/top_level_await/loop.out index 7f72048c2d..1bdffbf660 100644 --- a/cli/tests/testdata/run/top_level_await/loop.out +++ b/cli/tests/testdata/run/top_level_await/loop.out @@ -1,5 +1,5 @@ loading [WILDCARD]a.js -loaded Module { default: [Class: Foo] } +loaded [Module: null prototype] { default: [class Foo] } loading [WILDCARD]b.js -loaded Module { default: [Class: Bar] } +loaded [Module: null prototype] { default: [class Bar] } all loaded diff --git a/cli/tests/testdata/run/ts_decorators.ts.out b/cli/tests/testdata/run/ts_decorators.ts.out index 381c7a8091..ee77417cf2 100644 --- a/cli/tests/testdata/run/ts_decorators.ts.out +++ b/cli/tests/testdata/run/ts_decorators.ts.out @@ -1,2 +1,2 @@ Check [WILDCARD] -{ someField: "asdf" } +SomeClass { someField: "asdf" } diff --git a/cli/tests/testdata/run/with_package_json/no_deno_json/main.out b/cli/tests/testdata/run/with_package_json/no_deno_json/main.out index 45bcbb819c..b3af7331d7 100644 --- a/cli/tests/testdata/run/with_package_json/no_deno_json/main.out +++ b/cli/tests/testdata/run/with_package_json/no_deno_json/main.out @@ -1,13 +1,13 @@ [WILDCARD]package.json file found at '[WILDCARD]with_package_json[WILDCARD]package.json' [WILDCARD] ok -[Chalk (anonymous)] { +[Function (anonymous)] Chalk { constructor: [Function (anonymous)], - Instance: [Class: ChalkClass], + Instance: [class ChalkClass], supportsColor: false, - stderr: [Chalk (anonymous)] { + stderr: [Function (anonymous)] Chalk { constructor: [Function (anonymous)], - Instance: [Class: ChalkClass], + Instance: [class ChalkClass], supportsColor: false } } diff --git a/cli/tests/unit/console_test.ts b/cli/tests/unit/console_test.ts index 3f0f4b7023..0bd53dc779 100644 --- a/cli/tests/unit/console_test.ts +++ b/cli/tests/unit/console_test.ts @@ -152,16 +152,16 @@ Deno.test( }, ), `{ - [Symbol("foo\\b")]: 'Symbol("foo\\n\")', - [Symbol("bar\\n")]: 'Symbol("bar\\n\")', - [Symbol("bar\\r")]: 'Symbol("bar\\r\")', - [Symbol("baz\\t")]: 'Symbol("baz\\t\")', - [Symbol("qux\\x00")]: 'Symbol(\"qux\\x00")' + [Symbol("foo\\b")]: 'Symbol("foo\\n")', + [Symbol("bar\\n")]: 'Symbol("bar\\n")', + [Symbol("bar\\r")]: 'Symbol("bar\\r")', + [Symbol("baz\\t")]: 'Symbol("baz\\t")', + [Symbol("qux\\x00")]: 'Symbol("qux\\x00")' }`, ); assertEquals( stringify(new Set(["foo\n", "foo\r", "foo\0"])), - `Set { "foo\\n", "foo\\r", "foo\\x00" }`, + `Set(3) { "foo\\n", "foo\\r", "foo\\x00" }`, ); }, ); @@ -236,8 +236,8 @@ Deno.test(function consoleTestStringifyCircular() { nu: null, arrowFunc: [Function: arrowFunc], extendedClass: Extended { a: 1, b: 2 }, - nFunc: [Function (anonymous)], - extendedCstr: [Class: Extended], + nFunc: [Function: anonymous], + extendedCstr: [class Extended extends Base], o: { num: 2, bool: false, @@ -267,7 +267,7 @@ Deno.test(function consoleTestStringifyCircular() { stringify(new Date("2018-12-10T02:26:59.002Z")), "2018-12-10T02:26:59.002Z", ); - assertEquals(stringify(new Set([1, 2, 3])), "Set { 1, 2, 3 }"); + assertEquals(stringify(new Set([1, 2, 3])), "Set(3) { 1, 2, 3 }"); assertEquals( stringify( new Map([ @@ -275,10 +275,10 @@ Deno.test(function consoleTestStringifyCircular() { [2, "two"], ]), ), - `Map { 1 => "one", 2 => "two" }`, + `Map(2) { 1 => "one", 2 => "two" }`, ); - assertEquals(stringify(new WeakSet()), "WeakSet { [items unknown] }"); - assertEquals(stringify(new WeakMap()), "WeakMap { [items unknown] }"); + assertEquals(stringify(new WeakSet()), "WeakSet { }"); + assertEquals(stringify(new WeakMap()), "WeakMap { }"); assertEquals(stringify(Symbol(1)), `Symbol("1")`); assertEquals(stringify(Object(Symbol(1))), `[Symbol: Symbol("1")]`); assertEquals(stringify(null), "null"); @@ -304,19 +304,23 @@ Deno.test(function consoleTestStringifyCircular() { stringify(new Uint8Array([1, 2, 3])), "Uint8Array(3) [ 1, 2, 3 ]", ); - assertEquals(stringify(Uint8Array.prototype), "Uint8Array {}"); + assertEquals(stringify(Uint8Array.prototype), "TypedArray {}"); assertEquals( stringify({ a: { b: { c: { d: new Set([1]) } } } }), - "{ a: { b: { c: { d: [Set] } } } }", + `{ + a: { + b: { c: { d: Set(1) { 1 } } } + } +}`, ); assertEquals(stringify(nestedObj), nestedObjExpected); assertEquals( stringify(JSON), - "JSON {}", + "Object [JSON] {}", ); assertEquals( stringify(new Console(() => {})), - `console { + `Object [console] { log: [Function: log], debug: [Function: debug], info: [Function: info], @@ -345,15 +349,11 @@ Deno.test(function consoleTestStringifyCircular() { ); assertEquals( stringify({ str: 1, [Symbol.for("sym")]: 2, [Symbol.toStringTag]: "TAG" }), - 'TAG { str: 1, [Symbol(sym)]: 2, [Symbol(Symbol.toStringTag)]: "TAG" }', - ); - assertEquals( - stringify({ - [Symbol.for("Deno.customInspect")]: function () { - return Deno.inspect(this); - }, - }), - "[Circular *1]", + `Object [TAG] { + str: 1, + [Symbol(sym)]: 2, + [Symbol(Symbol.toStringTag)]: "TAG" +}`, ); // test inspect is working the same assertEquals(stripColor(Deno.inspect(nestedObj)), nestedObjExpected); @@ -363,26 +363,28 @@ Deno.test(function consoleTestStringifyMultipleCircular() { const y = { a: { b: {} }, foo: { bar: {} } }; y.a.b = y.a; y.foo.bar = y.foo; - console.log(y); assertEquals( stringify(y), - "{ a: { b: [Circular *1] }, foo: { bar: [Circular *2] } }", + "{\n" + + " a: { b: [Circular *1] },\n" + + " foo: { bar: [Circular *2] }\n" + + "}", ); }); Deno.test(function consoleTestStringifyFunctionWithPrototypeRemoved() { const f = function f() {}; Reflect.setPrototypeOf(f, null); - assertEquals(stringify(f), "[Function: f]"); + assertEquals(stringify(f), "[Function (null prototype): f]"); const af = async function af() {}; Reflect.setPrototypeOf(af, null); - assertEquals(stringify(af), "[Function: af]"); + assertEquals(stringify(af), "[Function (null prototype): af]"); const gf = function* gf() {}; Reflect.setPrototypeOf(gf, null); - assertEquals(stringify(gf), "[Function: gf]"); + assertEquals(stringify(gf), "[Function (null prototype): gf]"); const agf = async function* agf() {}; Reflect.setPrototypeOf(agf, null); - assertEquals(stringify(agf), "[Function: agf]"); + assertEquals(stringify(agf), "[Function (null prototype): agf]"); }); Deno.test(function consoleTestStringifyFunctionWithProperties() { @@ -400,7 +402,7 @@ Deno.test(function consoleTestStringifyFunctionWithProperties() { y: 3, z: [Function (anonymous)], b: [Function: bar], - a: Map {} + a: Map(0) {} } }`, ); @@ -417,7 +419,7 @@ Deno.test(function consoleTestStringifyFunctionWithProperties() { y: 3, z: [Function (anonymous)], b: [Function: bar], - a: Map {}, + a: Map(0) {}, s: [Circular *1], t: [Function: t] { x: [Circular *1] } } @@ -431,7 +433,75 @@ Deno.test(function consoleTestStringifyFunctionWithProperties() { assertEquals( stripColor(Deno.inspect(Array, { showHidden: true })), - `[Function: Array] { [Symbol(Symbol.species)]: [Getter] }`, + ` [Function: Array] { + [length]: 1, + [name]: "Array", + [prototype]: Object(0) [ + [length]: 0, + [constructor]: [Circular *1], + [at]: [Function: at] { [length]: 1, [name]: "at" }, + [concat]: [Function: concat] { [length]: 1, [name]: "concat" }, + [copyWithin]: [Function: copyWithin] { [length]: 2, [name]: "copyWithin" }, + [fill]: [Function: fill] { [length]: 1, [name]: "fill" }, + [find]: [Function: find] { [length]: 1, [name]: "find" }, + [findIndex]: [Function: findIndex] { [length]: 1, [name]: "findIndex" }, + [findLast]: [Function: findLast] { [length]: 1, [name]: "findLast" }, + [findLastIndex]: [Function: findLastIndex] { [length]: 1, [name]: "findLastIndex" }, + [lastIndexOf]: [Function: lastIndexOf] { [length]: 1, [name]: "lastIndexOf" }, + [pop]: [Function: pop] { [length]: 0, [name]: "pop" }, + [push]: [Function: push] { [length]: 1, [name]: "push" }, + [reverse]: [Function: reverse] { [length]: 0, [name]: "reverse" }, + [shift]: [Function: shift] { [length]: 0, [name]: "shift" }, + [unshift]: [Function: unshift] { [length]: 1, [name]: "unshift" }, + [slice]: [Function: slice] { [length]: 2, [name]: "slice" }, + [sort]: [Function: sort] { [length]: 1, [name]: "sort" }, + [splice]: [Function: splice] { [length]: 2, [name]: "splice" }, + [includes]: [Function: includes] { [length]: 1, [name]: "includes" }, + [indexOf]: [Function: indexOf] { [length]: 1, [name]: "indexOf" }, + [join]: [Function: join] { [length]: 1, [name]: "join" }, + [keys]: [Function: keys] { [length]: 0, [name]: "keys" }, + [entries]: [Function: entries] { [length]: 0, [name]: "entries" }, + [values]: [Function: values] { [length]: 0, [name]: "values" }, + [forEach]: [Function: forEach] { [length]: 1, [name]: "forEach" }, + [filter]: [Function: filter] { [length]: 1, [name]: "filter" }, + [flat]: [Function: flat] { [length]: 0, [name]: "flat" }, + [flatMap]: [Function: flatMap] { [length]: 1, [name]: "flatMap" }, + [map]: [Function: map] { [length]: 1, [name]: "map" }, + [every]: [Function: every] { [length]: 1, [name]: "every" }, + [some]: [Function: some] { [length]: 1, [name]: "some" }, + [reduce]: [Function: reduce] { [length]: 1, [name]: "reduce" }, + [reduceRight]: [Function: reduceRight] { [length]: 1, [name]: "reduceRight" }, + [toLocaleString]: [Function: toLocaleString] { [length]: 0, [name]: "toLocaleString" }, + [toString]: [Function: toString] { [length]: 0, [name]: "toString" }, + [toReversed]: [Function: toReversed] { [length]: 0, [name]: "toReversed" }, + [toSorted]: [Function: toSorted] { [length]: 1, [name]: "toSorted" }, + [toSpliced]: [Function: toSpliced] { [length]: 2, [name]: "toSpliced" }, + [with]: [Function: with] { [length]: 2, [name]: "with" }, + [Symbol(Symbol.iterator)]: [Function: values] { [length]: 0, [name]: "values" }, + [Symbol(Symbol.unscopables)]: [Object: null prototype] { + at: true, + copyWithin: true, + entries: true, + fill: true, + find: true, + findIndex: true, + findLast: true, + findLastIndex: true, + flat: true, + flatMap: true, + includes: true, + keys: true, + values: true, + toReversed: true, + toSorted: true, + toSpliced: true + } + ], + [isArray]: [Function: isArray] { [length]: 1, [name]: "isArray" }, + [from]: [Function: from] { [length]: 1, [name]: "from" }, + [of]: [Function: of] { [length]: 0, [name]: "of" }, + [Symbol(Symbol.species)]: [Getter] +}`, ); }); @@ -440,21 +510,24 @@ Deno.test(function consoleTestStringifyWithDepth() { const nestedObj: any = { a: { b: { c: { d: { e: { f: 42 } } } } } }; assertEquals( stripColor(inspectArgs([nestedObj], { depth: 3 })), - "{ a: { b: { c: [Object] } } }", + "{\n a: { b: { c: { d: [Object] } } }\n}", ); assertEquals( stripColor(inspectArgs([nestedObj], { depth: 4 })), - "{ a: { b: { c: { d: [Object] } } } }", + "{\n a: {\n b: { c: { d: { e: [Object] } } }\n }\n}", + ); + assertEquals( + stripColor(inspectArgs([nestedObj], { depth: 0 })), + "{ a: [Object] }", ); - assertEquals(stripColor(inspectArgs([nestedObj], { depth: 0 })), "[Object]"); assertEquals( stripColor(inspectArgs([nestedObj])), - "{ a: { b: { c: { d: [Object] } } } }", + "{\n a: {\n b: { c: { d: { e: [Object] } } }\n }\n}", ); // test inspect is working the same way assertEquals( stripColor(Deno.inspect(nestedObj, { depth: 4 })), - "{ a: { b: { c: { d: [Object] } } } }", + "{\n a: {\n b: { c: { d: { e: [Object] } } }\n }\n}", ); }); @@ -502,13 +575,15 @@ Deno.test(function consoleTestStringifyIterable() { assertEquals( stringify(longArray), `[ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, ... 100 more items ]`, ); @@ -519,13 +594,15 @@ Deno.test(function consoleTestStringifyIterable() { `{ a: "a", longArray: [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, ... 100 more items ] }`, @@ -535,7 +612,7 @@ Deno.test(function consoleTestStringifyIterable() { ["a", 0], ["b", 1], ]); - assertEquals(stringify(shortMap), `Map { "a" => 0, "b" => 1 }`); + assertEquals(stringify(shortMap), `Map(2) { "a" => 0, "b" => 1 }`); const longMap = new Map(); for (const key of Array(200).keys()) { @@ -543,7 +620,7 @@ Deno.test(function consoleTestStringifyIterable() { } assertEquals( stringify(longMap), - `Map { + `Map(200) { "0" => 0, "1" => 1, "2" => 2, @@ -649,14 +726,14 @@ Deno.test(function consoleTestStringifyIterable() { ); const shortSet = new Set([1, 2, 3]); - assertEquals(stringify(shortSet), `Set { 1, 2, 3 }`); + assertEquals(stringify(shortSet), `Set(3) { 1, 2, 3 }`); const longSet = new Set(); for (const key of Array(200).keys()) { longSet.add(key); } assertEquals( stringify(longSet), - `Set { + `Set(200) { 0, 1, 2, @@ -1059,7 +1136,7 @@ Deno.test(function consoleTestWithObjectFormatSpecifier() { assertEquals(stringify("%o", { a: 42 }), "{ a: 42 }"); assertEquals( stringify("%o", { a: { b: { c: { d: new Set([1]) } } } }), - "{ a: { b: { c: { d: [Set] } } } }", + "{\n a: {\n b: { c: { d: Set(1) { 1 } } }\n }\n}", ); }); @@ -1503,15 +1580,15 @@ Deno.test(function consoleTable() { assertEquals( stripColor(out.toString()), `\ -┌───────┬───────────┬───────────────────┬────────┐ -│ (idx) │ c │ e │ Values │ -├───────┼───────────┼───────────────────┼────────┤ -│ a │ │ │ true │ -│ b │ { d: 10 } │ [ 1, 2, [Array] ] │ │ -│ f │ │ │ "test" │ -│ g │ │ │ │ -│ h │ │ │ │ -└───────┴───────────┴───────────────────┴────────┘ +┌───────┬───────────┬────────────────────┬────────┐ +│ (idx) │ c │ e │ Values │ +├───────┼───────────┼────────────────────┼────────┤ +│ a │ │ │ true │ +│ b │ { d: 10 } │ [ 1, 2, [ 5, 6 ] ] │ │ +│ f │ │ │ "test" │ +│ g │ │ │ │ +│ h │ │ │ │ +└───────┴───────────┴────────────────────┴────────┘ `, ); }); @@ -1797,7 +1874,7 @@ Deno.test(function inspectGetters() { return 0; }, }, { getters: true })), - "{ foo: 0 }", + "{ foo: [Getter: 0] }", ); assertEquals( @@ -1806,13 +1883,13 @@ Deno.test(function inspectGetters() { throw new Error("bar"); }, }, { getters: true }), - "{ foo: [Thrown Error: bar] }", + "{ foo: [Getter: ] }", ); }); Deno.test(function inspectPrototype() { class A {} - assertEquals(Deno.inspect(A.prototype), "A {}"); + assertEquals(Deno.inspect(A.prototype), "{}"); }); Deno.test(function inspectSorted() { @@ -1822,7 +1899,7 @@ Deno.test(function inspectSorted() { ); assertEquals( stripColor(Deno.inspect(new Set(["b", "a"]), { sorted: true })), - `Set { "a", "b" }`, + `Set(2) { "a", "b" }`, ); assertEquals( stripColor(Deno.inspect( @@ -1832,7 +1909,7 @@ Deno.test(function inspectSorted() { ]), { sorted: true }, )), - `Map { "a" => 1, "b" => 2 }`, + `Map(2) { "a" => 1, "b" => 2 }`, ); }); @@ -1871,7 +1948,7 @@ Deno.test(function inspectTrailingComma() { ]), { trailingComma: true }, )), - `Set { + `Set(2) { "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", }`, @@ -1884,7 +1961,7 @@ Deno.test(function inspectTrailingComma() { ]), { trailingComma: true }, )), - `Map { + `Map(2) { "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" => 1, "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" => 2, }`, @@ -1904,11 +1981,11 @@ Deno.test(function inspectCompact() { Deno.test(function inspectIterableLimit() { assertEquals( stripColor(Deno.inspect(["a", "b", "c"], { iterableLimit: 2 })), - `[ "a", "b", ... 1 more items ]`, + `[ "a", "b", ... 1 more item ]`, ); assertEquals( stripColor(Deno.inspect(new Set(["a", "b", "c"]), { iterableLimit: 2 })), - `Set { "a", "b", ... 1 more items }`, + `Set(3) { "a", "b", ... 1 more item }`, ); assertEquals( stripColor(Deno.inspect( @@ -1919,7 +1996,7 @@ Deno.test(function inspectIterableLimit() { ]), { iterableLimit: 2 }, )), - `Map { "a" => 1, "b" => 2, ... 1 more items }`, + `Map(3) { "a" => 1, "b" => 2, ... 1 more item }`, ); }); @@ -1958,7 +2035,7 @@ Deno.test(function inspectProxy() { }, }), )), - `MyProxy { prop1: 5, prop2: 5 }`, + `Object [MyProxy] { prop1: 5, prop2: 5 }`, ); assertEquals( stripColor(Deno.inspect( @@ -1983,10 +2060,13 @@ Deno.test(function inspectProxy() { new Proxy([1, 2, 3, 4, 5, 6, 7], { get() {} }), { showProxy: true }, )), - `Proxy [ [ + `Proxy [ + [ 1, 2, 3, 4, 5, 6, 7 - ], { get: [Function: get] } ]`, + ], + { get: [Function: get] } +]`, ); assertEquals( stripColor(Deno.inspect( @@ -2057,7 +2137,7 @@ Deno.test(function inspectEmptyArray() { compact: false, trailingComma: true, }), - "[\n]", + "[]", ); }); @@ -2072,8 +2152,7 @@ Deno.test(function inspectDeepEmptyArray() { trailingComma: true, }), `{ - arr: [ - ], + arr: [], }`, ); }); @@ -2086,11 +2165,11 @@ Deno.test(function inspectEmptyMap() { compact: false, trailingComma: true, }), - "Map {\n}", + "Map(0) {}", ); }); -Deno.test(function inspectEmptyMap() { +Deno.test(function inspectEmptySet() { const set = new Set(); assertEquals( @@ -2098,11 +2177,11 @@ Deno.test(function inspectEmptyMap() { compact: false, trailingComma: true, }), - "Set {\n}", + "Set(0) {}", ); }); -Deno.test(function inspectEmptyMap() { +Deno.test(function inspectEmptyUint8Array() { const typedArray = new Uint8Array(0); assertEquals( @@ -2110,7 +2189,7 @@ Deno.test(function inspectEmptyMap() { compact: false, trailingComma: true, }), - "Uint8Array(0) [\n]", + "Uint8Array(0) []", ); }); @@ -2124,12 +2203,12 @@ Deno.test(function inspectStringAbbreviation() { assertEquals( Deno.inspect(obj, { strAbbreviateSize: 10 }), - '{ str: "This is a ..." }', + '{ str: "This is a "... 59 more characters }', ); assertEquals( Deno.inspect(arr, { strAbbreviateSize: 10 }), - '[ "This is a ..." ]', + '[ "This is a "... 59 more characters ]', ); }); diff --git a/core/ops_builtin.rs b/core/ops_builtin.rs index ea85b4f00c..0c071a9186 100644 --- a/core/ops_builtin.rs +++ b/core/ops_builtin.rs @@ -58,6 +58,8 @@ crate::extension!( ops_builtin_v8::op_set_promise_hooks, ops_builtin_v8::op_get_promise_details, ops_builtin_v8::op_get_proxy_details, + ops_builtin_v8::op_get_non_index_property_names, + ops_builtin_v8::op_get_constructor_name, ops_builtin_v8::op_memory_usage, ops_builtin_v8::op_set_wasm_streaming_callback, ops_builtin_v8::op_abort_wasm_streaming, diff --git a/core/ops_builtin_v8.rs b/core/ops_builtin_v8.rs index bc4f906e27..a77e7a7e6a 100644 --- a/core/ops_builtin_v8.rs +++ b/core/ops_builtin_v8.rs @@ -614,6 +614,66 @@ fn op_get_proxy_details<'a>( Some((target.into(), handler.into())) } +#[op(v8)] +fn op_get_non_index_property_names<'a>( + scope: &mut v8::HandleScope<'a>, + obj: serde_v8::Value<'a>, + filter: u32, +) -> Option> { + let obj = match v8::Local::::try_from(obj.v8_value) { + Ok(proxy) => proxy, + Err(_) => return None, + }; + + let mut property_filter = v8::ALL_PROPERTIES; + if filter & 1 == 1 { + property_filter = property_filter | v8::ONLY_WRITABLE + } + if filter & 2 == 2 { + property_filter = property_filter | v8::ONLY_ENUMERABLE + } + if filter & 4 == 4 { + property_filter = property_filter | v8::ONLY_CONFIGURABLE + } + if filter & 8 == 8 { + property_filter = property_filter | v8::SKIP_STRINGS + } + if filter & 16 == 16 { + property_filter = property_filter | v8::SKIP_SYMBOLS + } + + let maybe_names = obj.get_property_names( + scope, + v8::GetPropertyNamesArgs { + mode: v8::KeyCollectionMode::OwnOnly, + property_filter, + index_filter: v8::IndexFilter::SkipIndices, + ..Default::default() + }, + ); + + if let Some(names) = maybe_names { + let names_val: v8::Local = names.into(); + Some(names_val.into()) + } else { + None + } +} + +#[op(v8)] +fn op_get_constructor_name<'a>( + scope: &mut v8::HandleScope<'a>, + obj: serde_v8::Value<'a>, +) -> Option { + let obj = match v8::Local::::try_from(obj.v8_value) { + Ok(proxy) => proxy, + Err(_) => return None, + }; + + let name = obj.get_constructor_name().to_rust_string_lossy(scope); + Some(name) +} + // HeapStats stores values from a isolate.get_heap_statistics() call #[derive(Serialize)] #[serde(rename_all = "camelCase")] diff --git a/ext/console/02_console.js b/ext/console/02_console.js index 5873a2ec2e..51e8278764 100644 --- a/ext/console/02_console.js +++ b/ext/console/02_console.js @@ -6,123 +6,2250 @@ const core = globalThis.Deno.core; const internals = globalThis.__bootstrap.internals; const primordials = globalThis.__bootstrap.primordials; const { - AggregateErrorPrototype, - ArrayPrototypeUnshift, - isNaN, - DatePrototype, DateNow, - DatePrototypeGetTime, - DatePrototypeToISOString, Boolean, - BooleanPrototype, - BooleanPrototypeToString, ObjectKeys, ObjectAssign, ObjectCreate, ObjectFreeze, - ObjectIs, ObjectValues, ObjectFromEntries, - ObjectGetPrototypeOf, - ObjectGetOwnPropertyDescriptor, - ObjectGetOwnPropertySymbols, ObjectPrototypeHasOwnProperty, ObjectPrototypeIsPrototypeOf, - ObjectPrototypePropertyIsEnumerable, - PromisePrototype, + ObjectDefineProperty, String, - StringPrototype, + SafeStringIterator, + DatePrototype, + MapPrototypeEntries, + SetPrototypeGetSize, StringPrototypeRepeat, + StringPrototypeEndsWith, + StringPrototypeIndexOf, + RegExpPrototypeExec, + RegExpPrototypeSymbolReplace, StringPrototypeReplace, StringPrototypeReplaceAll, + ObjectPrototype, + FunctionPrototypeCall, StringPrototypeSplit, StringPrototypeSlice, - StringPrototypeCodePointAt, StringPrototypeCharCodeAt, - StringPrototypeNormalize, + MathFloor, + StringPrototypePadEnd, + ObjectGetOwnPropertySymbols, + ObjectGetOwnPropertyNames, + SymbolPrototypeGetDescription, + SymbolPrototypeToString, + ArrayPrototypePushApply, + ObjectPrototypePropertyIsEnumerable, StringPrototypeMatch, StringPrototypePadStart, - StringPrototypeLocaleCompare, - StringPrototypeToString, StringPrototypeTrim, StringPrototypeIncludes, - StringPrototypeStartsWith, - TypeError, NumberIsInteger, NumberParseInt, - RegExpPrototype, - RegExpPrototypeTest, - RegExpPrototypeToString, SafeArrayIterator, SafeMap, - SafeStringIterator, - SafeSetIterator, + ArrayPrototypeShift, + AggregateErrorPrototype, + RegExpPrototypeTest, + ObjectPrototypeToString, + ArrayPrototypeSort, + ArrayPrototypeUnshift, + DatePrototypeGetTime, + DatePrototypeToISOString, SafeRegExp, SetPrototype, - SetPrototypeEntries, - SetPrototypeGetSize, Symbol, - SymbolPrototype, - SymbolPrototypeToString, - SymbolPrototypeValueOf, - SymbolPrototypeGetDescription, SymbolToStringTag, SymbolHasInstance, SymbolFor, + ObjectGetOwnPropertyDescriptor, + ObjectIs, + Uint8Array, + isNaN, + TypedArrayPrototypeGetSymbolToStringTag, + TypedArrayPrototypeGetLength, + ReflectOwnKeys, Array, + RegExpPrototypeToString, ArrayIsArray, + SymbolIterator, + ArrayBufferIsView, ArrayPrototypeJoin, ArrayPrototypeMap, ArrayPrototypeReduce, - ArrayPrototypeEntries, + ObjectSetPrototypeOf, ArrayPrototypePush, - ArrayPrototypePop, - ArrayPrototypeSort, - ArrayPrototypeSlice, - ArrayPrototypeShift, ArrayPrototypeIncludes, ArrayPrototypeFill, ArrayPrototypeFilter, ArrayPrototypeFind, FunctionPrototypeBind, - FunctionPrototypeToString, MapPrototype, MapPrototypeHas, MapPrototypeGet, MapPrototypeSet, MapPrototypeDelete, - MapPrototypeEntries, MapPrototypeForEach, MapPrototypeGetSize, Error, ErrorPrototype, ErrorCaptureStackTrace, + MathSqrt, MathAbs, MathMax, MathMin, - MathSqrt, MathRound, - MathFloor, Number, - NumberPrototype, NumberPrototypeToString, - NumberPrototypeValueOf, - BigIntPrototype, - BigIntPrototypeToString, Proxy, ReflectGet, ReflectGetOwnPropertyDescriptor, ReflectGetPrototypeOf, ReflectHas, - TypedArrayPrototypeGetLength, - TypedArrayPrototypeGetSymbolToStringTag, - WeakMapPrototype, - WeakSetPrototype, + BigIntPrototypeValueOf, + ObjectGetPrototypeOf, + FunctionPrototypeToString, + StringPrototypeStartsWith, + SetPrototypeValues, + SafeSet, + SafeSetIterator, + TypedArrayPrototypeGetByteLength, + SafeMapIterator, + ArrayBufferPrototype, } = primordials; -import * as colors from "ext:deno_console/01_colors.js"; +import * as colors_ from "ext:deno_console/01_colors.js"; -function isInvalidDate(x) { - return isNaN(DatePrototypeGetTime(x)); +// Don't use 'blue' not visible on cmd.exe +const styles = { + special: "cyan", + number: "yellow", + bigint: "yellow", + boolean: "yellow", + undefined: "grey", + null: "bold", + string: "green", + symbol: "green", + date: "magenta", + // "name": intentionally not styling + // TODO(BridgeAR): Highlight regular expressions properly. + regexp: "red", + module: "underline", +}; + +const defaultFG = 39; +const defaultBG = 49; + +// Set Graphics Rendition https://en.wikipedia.org/wiki/ANSI_escape_code#graphics +// Each color consists of an array with the color code as first entry and the +// reset code as second entry. +const colors = { + reset: [0, 0], + bold: [1, 22], + dim: [2, 22], // Alias: faint + italic: [3, 23], + underline: [4, 24], + blink: [5, 25], + // Swap foreground and background colors + inverse: [7, 27], // Alias: swapcolors, swapColors + hidden: [8, 28], // Alias: conceal + strikethrough: [9, 29], // Alias: strikeThrough, crossedout, crossedOut + doubleunderline: [21, 24], // Alias: doubleUnderline + black: [30, defaultFG], + red: [31, defaultFG], + green: [32, defaultFG], + yellow: [33, defaultFG], + blue: [34, defaultFG], + magenta: [35, defaultFG], + cyan: [36, defaultFG], + white: [37, defaultFG], + bgBlack: [40, defaultBG], + bgRed: [41, defaultBG], + bgGreen: [42, defaultBG], + bgYellow: [43, defaultBG], + bgBlue: [44, defaultBG], + bgMagenta: [45, defaultBG], + bgCyan: [46, defaultBG], + bgWhite: [47, defaultBG], + framed: [51, 54], + overlined: [53, 55], + gray: [90, defaultFG], // Alias: grey, blackBright + redBright: [91, defaultFG], + greenBright: [92, defaultFG], + yellowBright: [93, defaultFG], + blueBright: [94, defaultFG], + magentaBright: [95, defaultFG], + cyanBright: [96, defaultFG], + whiteBright: [97, defaultFG], + bgGray: [100, defaultBG], // Alias: bgGrey, bgBlackBright + bgRedBright: [101, defaultBG], + bgGreenBright: [102, defaultBG], + bgYellowBright: [103, defaultBG], + bgBlueBright: [104, defaultBG], + bgMagentaBright: [105, defaultBG], + bgCyanBright: [106, defaultBG], + bgWhiteBright: [107, defaultBG], +}; + +function defineColorAlias(target, alias) { + ObjectDefineProperty(colors, alias, { + get() { + return this[target]; + }, + set(value) { + this[target] = value; + }, + configurable: true, + enumerable: false, + }); +} + +defineColorAlias("gray", "grey"); +defineColorAlias("gray", "blackBright"); +defineColorAlias("bgGray", "bgGrey"); +defineColorAlias("bgGray", "bgBlackBright"); +defineColorAlias("dim", "faint"); +defineColorAlias("strikethrough", "crossedout"); +defineColorAlias("strikethrough", "strikeThrough"); +defineColorAlias("strikethrough", "crossedOut"); +defineColorAlias("hidden", "conceal"); +defineColorAlias("inverse", "swapColors"); +defineColorAlias("inverse", "swapcolors"); +defineColorAlias("doubleunderline", "doubleUnderline"); + +// https://tc39.es/ecma262/#sec-boolean.prototype.valueof +const _booleanValueOf = Boolean.prototype.valueOf; + +// https://tc39.es/ecma262/#sec-number.prototype.valueof +const _numberValueOf = Number.prototype.valueOf; + +// https://tc39.es/ecma262/#sec-string.prototype.valueof +const _stringValueOf = String.prototype.valueOf; + +// https://tc39.es/ecma262/#sec-symbol.prototype.valueof +const _symbolValueOf = Symbol.prototype.valueOf; + +// https://tc39.es/ecma262/#sec-weakmap.prototype.has +const _weakMapHas = WeakMap.prototype.has; + +// https://tc39.es/ecma262/#sec-weakset.prototype.has +const _weakSetHas = WeakSet.prototype.has; + +// https://tc39.es/ecma262/#sec-get-arraybuffer.prototype.bytelength +const _getArrayBufferByteLength = ObjectGetOwnPropertyDescriptor( + ArrayBufferPrototype, + "byteLength", +).get; + +// https://tc39.es/ecma262/#sec-get-sharedarraybuffer.prototype.bytelength +let _getSharedArrayBufferByteLength; + +// https://tc39.es/ecma262/#sec-get-set.prototype.size +const _getSetSize = ObjectGetOwnPropertyDescriptor( + SetPrototype, + "size", +).get; + +// https://tc39.es/ecma262/#sec-get-map.prototype.size +const _getMapSize = ObjectGetOwnPropertyDescriptor( + MapPrototype, + "size", +).get; + +function isObjectLike(value) { + return value !== null && typeof value === "object"; +} + +export function isAnyArrayBuffer(value) { + return isArrayBuffer(value) || isSharedArrayBuffer(value); +} + +export function isArgumentsObject(value) { + return ( + isObjectLike(value) && + value[SymbolToStringTag] === undefined && + ObjectPrototypeToString(value) === "[object Arguments]" + ); +} + +export function isArrayBuffer(value) { + try { + _getArrayBufferByteLength.call(value); + return true; + } catch { + return false; + } +} + +export function isAsyncFunction(value) { + return ( + typeof value === "function" && + (value[SymbolToStringTag] === "AsyncFunction") + ); +} + +export function isAsyncGeneratorFunction(value) { + return ( + typeof value === "function" && + (value[SymbolToStringTag] === "AsyncGeneratorFunction") + ); +} + +export function isBooleanObject(value) { + if (!isObjectLike(value)) { + return false; + } + + try { + _booleanValueOf.call(value); + return true; + } catch { + return false; + } +} + +export function isBoxedPrimitive( + value, +) { + return ( + isBooleanObject(value) || + isStringObject(value) || + isNumberObject(value) || + isSymbolObject(value) || + isBigIntObject(value) + ); +} + +export function isDataView(value) { + return ( + ArrayBufferIsView(value) && + TypedArrayPrototypeGetSymbolToStringTag(value) === undefined + ); +} + +export function isTypedArray(value) { + return TypedArrayPrototypeGetSymbolToStringTag(value) !== undefined; +} + +export function isGeneratorFunction( + value, +) { + return ( + typeof value === "function" && + value[SymbolToStringTag] === "GeneratorFunction" + ); +} + +export function isMap(value) { + try { + _getMapSize.call(value); + return true; + } catch { + return false; + } +} + +export function isMapIterator( + value, +) { + return ( + isObjectLike(value) && + value[SymbolToStringTag] === "Map Iterator" + ); +} + +export function isModuleNamespaceObject( + value, +) { + return ( + isObjectLike(value) && + value[SymbolToStringTag] === "Module" + ); +} + +export function isNativeError(value) { + return ( + isObjectLike(value) && + value[SymbolToStringTag] === undefined && + ObjectPrototypeToString(value) === "[object Error]" + ); +} + +export function isNumberObject(value) { + if (!isObjectLike(value)) { + return false; + } + + try { + _numberValueOf.call(value); + return true; + } catch { + return false; + } +} + +export function isBigIntObject(value) { + if (!isObjectLike(value)) { + return false; + } + + try { + BigIntPrototypeValueOf(value); + return true; + } catch { + return false; + } +} + +export function isPromise(value) { + return ( + isObjectLike(value) && + value[SymbolToStringTag] === "Promise" + ); +} +export function isRegExp(value) { + return ( + isObjectLike(value) && + value[SymbolToStringTag] === undefined && + ObjectPrototypeToString(value) === "[object RegExp]" + ); +} + +export function isSet(value) { + try { + _getSetSize.call(value); + return true; + } catch { + return false; + } +} + +export function isSetIterator( + value, +) { + return ( + isObjectLike(value) && + value[SymbolToStringTag] === "Set Iterator" + ); +} + +export function isSharedArrayBuffer( + value, +) { + // TODO(kt3k): add SharedArrayBuffer to primordials + _getSharedArrayBufferByteLength ??= ObjectGetOwnPropertyDescriptor( + // deno-lint-ignore prefer-primordials + SharedArrayBuffer.prototype, + "byteLength", + ).get; + + try { + _getSharedArrayBufferByteLength.call(value); + return true; + } catch { + return false; + } +} + +export function isStringObject(value) { + if (!isObjectLike(value)) { + return false; + } + + try { + _stringValueOf.call(value); + return true; + } catch { + return false; + } +} + +export function isSymbolObject(value) { + if (!isObjectLike(value)) { + return false; + } + + try { + _symbolValueOf.call(value); + return true; + } catch { + return false; + } +} + +export function isWeakMap( + value, +) { + try { + _weakMapHas.call(value, null); + return true; + } catch { + return false; + } +} + +export function isWeakSet( + value, +) { + try { + _weakSetHas.call(value, null); + return true; + } catch { + return false; + } +} + +const kObjectType = 0; +const kArrayType = 1; +const kArrayExtrasType = 2; + +const kMinLineLength = 16; + +// Constants to map the iterator state. +const kWeak = 0; +const kIterator = 1; +const kMapEntries = 2; + +// Escaped control characters (plus the single quote and the backslash). Use +// empty strings to fill up unused entries. +// deno-fmt-ignore +const meta = [ + '\\x00', '\\x01', '\\x02', '\\x03', '\\x04', '\\x05', '\\x06', '\\x07', // x07 + '\\b', '\\t', '\\n', '\\x0B', '\\f', '\\r', '\\x0E', '\\x0F', // x0F + '\\x10', '\\x11', '\\x12', '\\x13', '\\x14', '\\x15', '\\x16', '\\x17', // x17 + '\\x18', '\\x19', '\\x1A', '\\x1B', '\\x1C', '\\x1D', '\\x1E', '\\x1F', // x1F + '', '', '', '', '', '', '', "\\'", '', '', '', '', '', '', '', '', // x2F + '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', // x3F + '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', // x4F + '', '', '', '', '', '', '', '', '', '', '', '', '\\\\', '', '', '', // x5F + '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', // x6F + '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '\\x7F', // x7F + '\\x80', '\\x81', '\\x82', '\\x83', '\\x84', '\\x85', '\\x86', '\\x87', // x87 + '\\x88', '\\x89', '\\x8A', '\\x8B', '\\x8C', '\\x8D', '\\x8E', '\\x8F', // x8F + '\\x90', '\\x91', '\\x92', '\\x93', '\\x94', '\\x95', '\\x96', '\\x97', // x97 + '\\x98', '\\x99', '\\x9A', '\\x9B', '\\x9C', '\\x9D', '\\x9E', '\\x9F', // x9F +]; + +// https://tc39.es/ecma262/#sec-IsHTMLDDA-internal-slot +const isUndetectableObject = (v) => typeof v === "undefined" && v !== undefined; + +const strEscapeSequencesReplacer = new SafeRegExp( + "[\x00-\x1f\x27\x5c\x7f-\x9f]", + "g", +); + +const keyStrRegExp = new SafeRegExp("^[a-zA-Z_][a-zA-Z_0-9]*$"); +const numberRegExp = new SafeRegExp("^(0|[1-9][0-9]*)$"); + +// TODO(wafuwafu13): Figure out +const escapeFn = (str) => meta[str.charCodeAt(0)]; + +function stylizeNoColor(str) { + return str; +} + +// node custom inspect symbol +const nodeCustomInspectSymbol = SymbolFor("nodejs.util.inspect.custom"); + +// This non-unique symbol is used to support op_crates, ie. +// in extensions/web we don't want to depend on public +// Symbol.for("Deno.customInspect") symbol defined in the public API. +// Internal only, shouldn't be used by users. +const privateCustomInspect = SymbolFor("Deno.privateCustomInspect"); + +function getUserOptions(ctx, isCrossContext) { + const ret = { + stylize: ctx.stylize, + showHidden: ctx.showHidden, + depth: ctx.depth, + colors: ctx.colors, + customInspect: ctx.customInspect, + showProxy: ctx.showProxy, + maxArrayLength: ctx.maxArrayLength, + maxStringLength: ctx.maxStringLength, + breakLength: ctx.breakLength, + compact: ctx.compact, + sorted: ctx.sorted, + getters: ctx.getters, + numericSeparator: ctx.numericSeparator, + ...ctx.userOptions, + }; + + // Typically, the target value will be an instance of `Object`. If that is + // *not* the case, the object may come from another vm.Context, and we want + // to avoid passing it objects from this Context in that case, so we remove + // the prototype from the returned object itself + the `stylize()` function, + // and remove all other non-primitives, including non-primitive user options. + if (isCrossContext) { + ObjectSetPrototypeOf(ret, null); + for (const key of new SafeArrayIterator(ObjectKeys(ret))) { + if ( + (typeof ret[key] === "object" || typeof ret[key] === "function") && + ret[key] !== null + ) { + delete ret[key]; + } + } + ret.stylize = ObjectSetPrototypeOf((value, flavour) => { + let stylized; + try { + stylized = `${ctx.stylize(value, flavour)}`; + } catch { + // Continue regardless of error. + } + + if (typeof stylized !== "string") return value; + // `stylized` is a string as it should be, which is safe to pass along. + return stylized; + }, null); + } + + return ret; +} + +// Note: using `formatValue` directly requires the indentation level to be +// corrected by setting `ctx.indentationLvL += diff` and then to decrease the +// value afterwards again. +function formatValue( + ctx, + value, + recurseTimes, + typedArray, +) { + // Primitive types cannot have properties. + if ( + typeof value !== "object" && + typeof value !== "function" && + !isUndetectableObject(value) + ) { + return formatPrimitive(ctx.stylize, value, ctx); + } + if (value === null) { + return ctx.stylize("null", "null"); + } + + // Memorize the context for custom inspection on proxies. + const context = value; + // Always check for proxies to prevent side effects and to prevent triggering + // any proxy handlers. + // TODO(wafuwafu13): Set Proxy + const proxyDetails = core.getProxyDetails(value); + // const proxy = getProxyDetails(value, !!ctx.showProxy); + // if (proxy !== undefined) { + // if (ctx.showProxy) { + // return formatProxy(ctx, proxy, recurseTimes); + // } + // value = proxy; + // } + + // Provide a hook for user-specified inspect functions. + // Check that value is an object with an inspect function on it. + if (ctx.customInspect) { + if ( + ReflectHas(value, customInspect) && + typeof value[customInspect] === "function" + ) { + return String(value[customInspect](inspect, ctx)); + } else if ( + ReflectHas(value, privateCustomInspect) && + typeof value[privateCustomInspect] === "function" + ) { + // TODO(nayeemrmn): `inspect` is passed as an argument because custom + // inspect implementations in `extensions` need it, but may not have access + // to the `Deno` namespace in web workers. Remove when the `Deno` + // namespace is always enabled. + return String(value[privateCustomInspect](inspect, ctx)); + } else if (ReflectHas(value, nodeCustomInspectSymbol)) { + const maybeCustom = value[nodeCustomInspectSymbol]; + if ( + typeof maybeCustom === "function" && + // Filter out the util module, its inspect function is special. + maybeCustom !== ctx.inspect && + // Also filter out any prototype objects using the circular check. + !(value.constructor && value.constructor.prototype === value) + ) { + // This makes sure the recurseTimes are reported as before while using + // a counter internally. + const depth = ctx.depth === null ? null : ctx.depth - recurseTimes; + // TODO(@crowlKats): proxy handling + const isCrossContext = !ObjectPrototypeIsPrototypeOf( + ObjectPrototype, + context, + ); + const ret = FunctionPrototypeCall( + maybeCustom, + context, + depth, + getUserOptions(ctx, isCrossContext), + ctx.inspect, + ); + // If the custom inspection method returned `this`, don't go into + // infinite recursion. + if (ret !== context) { + if (typeof ret !== "string") { + return formatValue(ctx, ret, recurseTimes); + } + return StringPrototypeReplaceAll( + ret, + "\n", + `\n${StringPrototypeRepeat(" ", ctx.indentationLvl)}`, + ); + } + } + } + } + + // Using an array here is actually better for the average case than using + // a Set. `seen` will only check for the depth and will never grow too large. + if (ctx.seen.includes(value)) { + let index = 1; + if (ctx.circular === undefined) { + ctx.circular = new SafeMap(); + ctx.circular.set(value, index); + } else { + index = ctx.circular.get(value); + if (index === undefined) { + index = ctx.circular.size + 1; + ctx.circular.set(value, index); + } + } + return ctx.stylize(`[Circular *${index}]`, "special"); + } + + return formatRaw(ctx, value, recurseTimes, typedArray, proxyDetails); +} + +function getClassBase(value, constructor, tag) { + const hasName = ObjectPrototypeHasOwnProperty(value, "name"); + const name = (hasName && value.name) || "(anonymous)"; + let base = `class ${name}`; + if (constructor !== "Function" && constructor !== null) { + base += ` [${constructor}]`; + } + if (tag !== "" && constructor !== tag) { + base += ` [${tag}]`; + } + if (constructor !== null) { + const superName = ObjectGetPrototypeOf(value).name; + if (superName) { + base += ` extends ${superName}`; + } + } else { + base += " extends [null prototype]"; + } + return `[${base}]`; +} + +const stripCommentsRegExp = new SafeRegExp( + "(\\/\\/.*?\\n)|(\\/\\*(.|\\n)*?\\*\\/)", + "g", +); +const classRegExp = new SafeRegExp("^(\\s+[^(]*?)\\s*{"); + +function getFunctionBase(value, constructor, tag) { + const stringified = FunctionPrototypeToString(value); + if ( + StringPrototypeStartsWith(stringified, "class") && + StringPrototypeEndsWith(stringified, "}") + ) { + const slice = StringPrototypeSlice(stringified, 5, -1); + const bracketIndex = StringPrototypeIndexOf(slice, "{"); + if ( + bracketIndex !== -1 && + (!StringPrototypeIncludes( + StringPrototypeSlice(slice, 0, bracketIndex), + "(", + ) || + // Slow path to guarantee that it's indeed a class. + RegExpPrototypeExec( + classRegExp, + RegExpPrototypeSymbolReplace(stripCommentsRegExp, slice), + ) !== null) + ) { + return getClassBase(value, constructor, tag); + } + } + let type = "Function"; + if (isGeneratorFunction(value)) { + type = `Generator${type}`; + } + if (isAsyncFunction(value)) { + type = `Async${type}`; + } + if (isAsyncGeneratorFunction(value)) { + type = `AsyncGenerator${type}`; + } + let base = `[${type}`; + if (constructor === null) { + base += " (null prototype)"; + } + if (value.name === "") { + base += " (anonymous)"; + } else { + base += `: ${value.name}`; + } + base += "]"; + if (constructor !== type && constructor !== null) { + base += ` ${constructor}`; + } + if (tag !== "" && constructor !== tag) { + base += ` [${tag}]`; + } + return base; +} + +function formatRaw(ctx, value, recurseTimes, typedArray, proxyDetails) { + let keys; + let protoProps; + if (ctx.showHidden && (recurseTimes <= ctx.depth || ctx.depth === null)) { + protoProps = []; + } + + const constructor = getConstructorName(value, ctx, recurseTimes, protoProps); + // Reset the variable to check for this later on. + if (protoProps !== undefined && protoProps.length === 0) { + protoProps = undefined; + } + + let tag = value[SymbolToStringTag]; + // Only list the tag in case it's non-enumerable / not an own property. + // Otherwise we'd print this twice. + if ( + typeof tag !== "string" + // TODO(wafuwafu13): Implement + // (tag !== "" && + // (ctx.showHidden + // ? Object.prototype.hasOwnProperty + // : Object.prototype.propertyIsEnumerable)( + // value, + // Symbol.toStringTag, + // )) + ) { + tag = ""; + } + let base = ""; + let formatter = () => []; + let braces; + let noIterator = true; + let i = 0; + const filter = ctx.showHidden ? 0 : 2; + + let extrasType = kObjectType; + + if (proxyDetails != null && ctx.showProxy) { + return `Proxy ` + formatValue(ctx, proxyDetails, recurseTimes); + } else { + // Iterators and the rest are split to reduce checks. + // We have to check all values in case the constructor is set to null. + // Otherwise it would not possible to identify all types properly. + if (ReflectHas(value, SymbolIterator) || constructor === null) { + noIterator = false; + if (ArrayIsArray(value)) { + // Only set the constructor for non ordinary ("Array [...]") arrays. + const prefix = (constructor !== "Array" || tag !== "") + ? getPrefix(constructor, tag, "Array", `(${value.length})`) + : ""; + keys = core.ops.op_get_non_index_property_names(value, filter); + braces = [`${prefix}[`, "]"]; + if ( + value.length === 0 && keys.length === 0 && protoProps === undefined + ) { + return `${braces[0]}]`; + } + extrasType = kArrayExtrasType; + formatter = formatArray; + } else if (isSet(value)) { + const size = SetPrototypeGetSize(value); + const prefix = getPrefix(constructor, tag, "Set", `(${size})`); + keys = getKeys(value, ctx.showHidden); + formatter = constructor !== null + ? FunctionPrototypeBind(formatSet, null, value) + : FunctionPrototypeBind(formatSet, null, SetPrototypeValues(value)); + if (size === 0 && keys.length === 0 && protoProps === undefined) { + return `${prefix}{}`; + } + braces = [`${prefix}{`, "}"]; + } else if (isMap(value)) { + const size = MapPrototypeGetSize(value); + const prefix = getPrefix(constructor, tag, "Map", `(${size})`); + keys = getKeys(value, ctx.showHidden); + formatter = constructor !== null + ? FunctionPrototypeBind(formatMap, null, value) + : FunctionPrototypeBind(formatMap, null, MapPrototypeEntries(value)); + if (size === 0 && keys.length === 0 && protoProps === undefined) { + return `${prefix}{}`; + } + braces = [`${prefix}{`, "}"]; + } else if (isTypedArray(value)) { + keys = core.ops.op_get_non_index_property_names(value, filter); + const bound = value; + const fallback = ""; + if (constructor === null) { + // TODO(wafuwafu13): Implement + // fallback = TypedArrayPrototypeGetSymbolToStringTag(value); + // // Reconstruct the array information. + // bound = new primordials[fallback](value); + } + const size = TypedArrayPrototypeGetLength(value); + const prefix = getPrefix(constructor, tag, fallback, `(${size})`); + braces = [`${prefix}[`, "]"]; + if (value.length === 0 && keys.length === 0 && !ctx.showHidden) { + return `${braces[0]}]`; + } + // Special handle the value. The original value is required below. The + // bound function is required to reconstruct missing information. + formatter = FunctionPrototypeBind(formatTypedArray, null, bound, size); + extrasType = kArrayExtrasType; + } else if (isMapIterator(value)) { + keys = getKeys(value, ctx.showHidden); + braces = getIteratorBraces("Map", tag); + // Add braces to the formatter parameters. + formatter = FunctionPrototypeBind(formatIterator, null, braces); + } else if (isSetIterator(value)) { + keys = getKeys(value, ctx.showHidden); + braces = getIteratorBraces("Set", tag); + // Add braces to the formatter parameters. + formatter = FunctionPrototypeBind(formatIterator, null, braces); + } else { + noIterator = true; + } + } + if (noIterator) { + keys = getKeys(value, ctx.showHidden); + braces = ["{", "}"]; + if (constructor === "Object") { + if (isArgumentsObject(value)) { + braces[0] = "[Arguments] {"; + } else if (tag !== "") { + braces[0] = `${getPrefix(constructor, tag, "Object")}{`; + } + if (keys.length === 0 && protoProps === undefined) { + return `${braces[0]}}`; + } + } else if (typeof value === "function") { + base = getFunctionBase(value, constructor, tag); + if (keys.length === 0 && protoProps === undefined) { + return ctx.stylize(base, "special"); + } + } else if (isRegExp(value)) { + // Make RegExps say that they are RegExps + base = RegExpPrototypeToString( + constructor !== null ? value : new SafeRegExp(value), + ); + const prefix = getPrefix(constructor, tag, "RegExp"); + if (prefix !== "RegExp ") { + base = `${prefix}${base}`; + } + if ( + (keys.length === 0 && protoProps === undefined) || + (recurseTimes > ctx.depth && ctx.depth !== null) + ) { + return ctx.stylize(base, "regexp"); + } + } else if (ObjectPrototypeIsPrototypeOf(DatePrototype, value)) { + const date = proxyDetails ? proxyDetails[0] : value; + if (isNaN(DatePrototypeGetTime(date))) { + return ctx.stylize("Invalid Date", "date"); + } else { + base = DatePrototypeToISOString(date); + if (keys.length === 0 && protoProps === undefined) { + return ctx.stylize(base, "date"); + } + } + } else if (ObjectPrototypeIsPrototypeOf(ErrorPrototype, value)) { + base = inspectError(value, ctx); + if (keys.length === 0 && protoProps === undefined) { + return base; + } + } else if (isAnyArrayBuffer(value)) { + // Fast path for ArrayBuffer and SharedArrayBuffer. + // Can't do the same for DataView because it has a non-primitive + // .buffer property that we need to recurse for. + const arrayType = isArrayBuffer(value) + ? "ArrayBuffer" + : "SharedArrayBuffer"; + + const prefix = getPrefix(constructor, tag, arrayType); + if (typedArray === undefined) { + formatter = formatArrayBuffer; + } else if (keys.length === 0 && protoProps === undefined) { + return prefix + + `{ byteLength: ${ + formatNumber(ctx.stylize, TypedArrayPrototypeGetByteLength(value)) + } }`; + } + braces[0] = `${prefix}{`; + ArrayPrototypeUnshift(keys, "byteLength"); + } else if (isDataView(value)) { + braces[0] = `${getPrefix(constructor, tag, "DataView")}{`; + // .buffer goes last, it's not a primitive like the others. + ArrayPrototypeUnshift(keys, "byteLength", "byteOffset", "buffer"); + } else if (isPromise(value)) { + braces[0] = `${getPrefix(constructor, tag, "Promise")}{`; + formatter = formatPromise; + } else if (isWeakSet(value)) { + braces[0] = `${getPrefix(constructor, tag, "WeakSet")}{`; + formatter = ctx.showHidden ? formatWeakSet : formatWeakCollection; + } else if (isWeakMap(value)) { + braces[0] = `${getPrefix(constructor, tag, "WeakMap")}{`; + formatter = ctx.showHidden ? formatWeakMap : formatWeakCollection; + } else if (isModuleNamespaceObject(value)) { + braces[0] = `${getPrefix(constructor, tag, "Module")}{`; + // Special handle keys for namespace objects. + formatter = formatNamespaceObject.bind(null, keys); + } else if (isBoxedPrimitive(value)) { + base = getBoxedBase(value, ctx, keys, constructor, tag); + if (keys.length === 0 && protoProps === undefined) { + return base; + } + } else { + if (keys.length === 0 && protoProps === undefined) { + // TODO(wafuwafu13): Implement + // if (isExternal(value)) { + // const address = getExternalValue(value).toString(16); + // return ctx.stylize(`[External: ${address}]`, 'special'); + // } + return `${getCtxStyle(value, constructor, tag)}{}`; + } + braces[0] = `${getCtxStyle(value, constructor, tag)}{`; + } + } + } + + if (recurseTimes > ctx.depth && ctx.depth !== null) { + let constructorName = StringPrototypeSlice( + getCtxStyle(value, constructor, tag), + 0, + -1, + ); + if (constructor !== null) { + constructorName = `[${constructorName}]`; + } + return ctx.stylize(constructorName, "special"); + } + recurseTimes += 1; + + ctx.seen.push(value); + ctx.currentDepth = recurseTimes; + let output; + const indentationLvl = ctx.indentationLvl; + try { + output = formatter(ctx, value, recurseTimes); + for (i = 0; i < keys.length; i++) { + ArrayPrototypePush( + output, + formatProperty(ctx, value, recurseTimes, keys[i], extrasType), + ); + } + if (protoProps !== undefined) { + ArrayPrototypePushApply(output, protoProps); + } + } catch (err) { + const constructorName = StringPrototypeSlice( + getCtxStyle(value, constructor, tag), + 0, + -1, + ); + return handleMaxCallStackSize(ctx, err, constructorName, indentationLvl); + } + + if (ctx.circular !== undefined) { + const index = ctx.circular.get(value); + if (index !== undefined) { + const reference = ctx.stylize(``, "special"); + // Add reference always to the very beginning of the output. + if (ctx.compact !== true) { + base = base === "" ? reference : `${reference} ${base}`; + } else { + braces[0] = `${reference} ${braces[0]}`; + } + } + } + ctx.seen.pop(); + + if (ctx.sorted) { + const comparator = ctx.sorted === true ? undefined : ctx.sorted; + if (extrasType === kObjectType) { + output = ArrayPrototypeSort(output, comparator); + } else if (keys.length > 1) { + const sorted = output.slice(output.length - keys.length).sort(comparator); + output.splice( + output.length - keys.length, + keys.length, + ...new SafeArrayIterator(sorted), + ); + } + } + + const res = reduceToSingleString( + ctx, + output, + base, + braces, + extrasType, + recurseTimes, + value, + ); + const budget = ctx.budget[ctx.indentationLvl] || 0; + const newLength = budget + res.length; + ctx.budget[ctx.indentationLvl] = newLength; + // If any indentationLvl exceeds this limit, limit further inspecting to the + // minimum. Otherwise the recursive algorithm might continue inspecting the + // object even though the maximum string size (~2 ** 28 on 32 bit systems and + // ~2 ** 30 on 64 bit systems) exceeded. The actual output is not limited at + // exactly 2 ** 27 but a bit higher. This depends on the object shape. + // This limit also makes sure that huge objects don't block the event loop + // significantly. + if (newLength > 2 ** 27) { + ctx.depth = -1; + } + return res; +} + +const builtInObjectsRegExp = new SafeRegExp("^[A-Z][a-zA-Z0-9]+$"); +const builtInObjects = new SafeSet( + ObjectGetOwnPropertyNames(globalThis).filter((e) => + builtInObjectsRegExp.test(e) + ), +); + +function addPrototypeProperties( + ctx, + main, + obj, + recurseTimes, + output, +) { + let depth = 0; + let keys; + let keySet; + do { + if (depth !== 0 || main === obj) { + obj = ObjectGetPrototypeOf(obj); + // Stop as soon as a null prototype is encountered. + if (obj === null) { + return; + } + // Stop as soon as a built-in object type is detected. + const descriptor = ObjectGetOwnPropertyDescriptor(obj, "constructor"); + if ( + descriptor !== undefined && + typeof descriptor.value === "function" && + builtInObjects.has(descriptor.value.name) + ) { + return; + } + } + + if (depth === 0) { + keySet = new SafeSet(); + } else { + Array.prototype.forEach.call(keys, (key) => keySet.add(key)); + } + // Get all own property names and symbols. + keys = ReflectOwnKeys(obj); + Array.prototype.push.call(ctx.seen, main); + for (const key of new SafeArrayIterator(keys)) { + // Ignore the `constructor` property and keys that exist on layers above. + if ( + key === "constructor" || + // deno-lint-ignore no-prototype-builtins + main.hasOwnProperty(key) || + (depth !== 0 && keySet.has(key)) + ) { + continue; + } + const desc = ObjectGetOwnPropertyDescriptor(obj, key); + if (typeof desc.value === "function") { + continue; + } + const value = formatProperty( + ctx, + obj, + recurseTimes, + key, + kObjectType, + desc, + main, + ); + if (ctx.colors) { + // Faint! + Array.prototype.push.call(output, `\u001b[2m${value}\u001b[22m`); + } else { + Array.prototype.push.call(output, value); + } + } + Array.prototype.pop.call(ctx.seen); + // Limit the inspection to up to three prototype layers. Using `recurseTimes` + // is not a good choice here, because it's as if the properties are declared + // on the current object from the users perspective. + } while (++depth !== 3); +} + +function isInstanceof(proto, object) { + try { + return ObjectPrototypeIsPrototypeOf(proto, object); + } catch { + return false; + } +} + +function getConstructorName(obj, ctx, recurseTimes, protoProps) { + let firstProto; + const tmp = obj; + while (obj || isUndetectableObject(obj)) { + const descriptor = ObjectGetOwnPropertyDescriptor(obj, "constructor"); + if ( + descriptor !== undefined && + typeof descriptor.value === "function" && + descriptor.value.name !== "" && + isInstanceof(descriptor.value.prototype, tmp) + ) { + if ( + protoProps !== undefined && + (firstProto !== obj || + !builtInObjects.has(descriptor.value.name)) + ) { + addPrototypeProperties( + ctx, + tmp, + firstProto || tmp, + recurseTimes, + protoProps, + ); + } + return String(descriptor.value.name); + } + + obj = ObjectGetPrototypeOf(obj); + if (firstProto === undefined) { + firstProto = obj; + } + } + + if (firstProto === null) { + return null; + } + + const res = core.ops.op_get_constructor_name(tmp); + + if (recurseTimes > ctx.depth && ctx.depth !== null) { + return `${res} `; + } + + const protoConstr = getConstructorName( + firstProto, + ctx, + recurseTimes + 1, + protoProps, + ); + + if (protoConstr === null) { + return `${res} <${ + inspect(firstProto, { + ...ctx, + customInspect: false, + depth: -1, + }) + }>`; + } + + return `${res} <${protoConstr}>`; +} + +const formatPrimitiveRegExp = new SafeRegExp("(?<=\n)"); +function formatPrimitive(fn, value, ctx) { + if (typeof value === "string") { + let trailer = ""; + if (value.length > ctx.maxStringLength) { + const remaining = value.length - ctx.maxStringLength; + value = value.slice(0, ctx.maxStringLength); + trailer = `... ${remaining} more character${remaining > 1 ? "s" : ""}`; + } + if ( + ctx.compact !== true && + // TODO(BridgeAR): Add unicode support. Use the readline getStringWidth + // function. + value.length > kMinLineLength && + value.length > ctx.breakLength - ctx.indentationLvl - 4 + ) { + return value + .split(formatPrimitiveRegExp) + .map((line) => fn(quoteString(line, ctx), "string")) + .join(` +\n${" ".repeat(ctx.indentationLvl + 2)}`) + trailer; + } + return fn(quoteString(value, ctx), "string") + trailer; + } + if (typeof value === "number") { + return formatNumber(fn, value); + } + if (typeof value === "bigint") { + return formatBigInt(fn, value); + } + if (typeof value === "boolean") { + return fn(`${value}`, "boolean"); + } + if (typeof value === "undefined") { + return fn("undefined", "undefined"); + } + // es6 symbol primitive + return fn(maybeQuoteSymbol(value, ctx), "symbol"); +} + +function getPrefix(constructor, tag, fallback, size = "") { + if (constructor === null) { + if (tag !== "" && fallback !== tag) { + return `[${fallback}${size}: null prototype] [${tag}] `; + } + return `[${fallback}${size}: null prototype] `; + } + + if (tag !== "" && constructor !== tag) { + return `${constructor}${size} [${tag}] `; + } + return `${constructor}${size} `; +} + +function formatArray(ctx, value, recurseTimes) { + const valLen = value.length; + const len = MathMin(MathMax(0, ctx.maxArrayLength), valLen); + + const remaining = valLen - len; + const output = []; + for (let i = 0; i < len; i++) { + // Special handle sparse arrays. + // deno-lint-ignore no-prototype-builtins + if (!value.hasOwnProperty(i)) { + return formatSpecialArray(ctx, value, recurseTimes, len, output, i); + } + output.push(formatProperty(ctx, value, recurseTimes, i, kArrayType)); + } + if (remaining > 0) { + output.push(`... ${remaining} more item${remaining > 1 ? "s" : ""}`); + } + return output; +} + +function getCtxStyle(value, constructor, tag) { + let fallback = ""; + if (constructor === null) { + fallback = core.ops.op_get_constructor_name(value); + if (fallback === tag) { + fallback = "Object"; + } + } + return getPrefix(constructor, tag, fallback); +} + +// Look up the keys of the object. +function getKeys(value, showHidden) { + let keys; + const symbols = ObjectGetOwnPropertySymbols(value); + if (showHidden) { + keys = ObjectGetOwnPropertyNames(value); + if (symbols.length !== 0) { + ArrayPrototypePushApply(keys, symbols); + } + } else { + // This might throw if `value` is a Module Namespace Object from an + // unevaluated module, but we don't want to perform the actual type + // check because it's expensive. + // TODO(devsnek): track https://github.com/tc39/ecma262/issues/1209 + // and modify this logic as needed. + try { + keys = ObjectKeys(value); + } catch (err) { + assert( + isNativeError(err) && err.name === "ReferenceError" && + isModuleNamespaceObject(value), + ); + keys = ObjectGetOwnPropertyNames(value); + } + if (symbols.length !== 0) { + const filter = (key) => ObjectPrototypePropertyIsEnumerable(value, key); + ArrayPrototypePushApply(keys, ArrayPrototypeFilter(symbols, filter)); + } + } + return keys; +} + +function formatSet(value, ctx, _ignored, recurseTimes) { + ctx.indentationLvl += 2; + + const values = [...new SafeSetIterator(value)]; + const valLen = SetPrototypeGetSize(value); + const len = MathMin(MathMax(0, ctx.iterableLimit), valLen); + + const remaining = valLen - len; + const output = []; + for (let i = 0; i < len; i++) { + output.push(formatValue(ctx, values[i], recurseTimes)); + } + if (remaining > 0) { + output.push(`... ${remaining} more item${remaining > 1 ? "s" : ""}`); + } + + ctx.indentationLvl -= 2; + return output; +} + +function formatMap(value, ctx, _gnored, recurseTimes) { + ctx.indentationLvl += 2; + + const values = [...new SafeMapIterator(value)]; + const valLen = MapPrototypeGetSize(value); + const len = MathMin(MathMax(0, ctx.iterableLimit), valLen); + + const remaining = valLen - len; + const output = []; + for (let i = 0; i < len; i++) { + output.push( + `${formatValue(ctx, values[i][0], recurseTimes)} => ${ + formatValue(ctx, values[i][1], recurseTimes) + }`, + ); + } + if (remaining > 0) { + output.push(`... ${remaining} more item${remaining > 1 ? "s" : ""}`); + } + + ctx.indentationLvl -= 2; + return output; +} + +function formatTypedArray( + value, + length, + ctx, + _ignored, + recurseTimes, +) { + const maxLength = MathMin(MathMax(0, ctx.maxArrayLength), length); + const remaining = value.length - maxLength; + const output = new Array(maxLength); + const elementFormatter = value.length > 0 && typeof value[0] === "number" + ? formatNumber + : formatBigInt; + for (let i = 0; i < maxLength; ++i) { + output[i] = elementFormatter(ctx.stylize, value[i]); + } + if (remaining > 0) { + output[maxLength] = `... ${remaining} more item${remaining > 1 ? "s" : ""}`; + } + if (ctx.showHidden) { + // .buffer goes last, it's not a primitive like the others. + // All besides `BYTES_PER_ELEMENT` are actually getters. + ctx.indentationLvl += 2; + for ( + const key of new SafeArrayIterator([ + "BYTES_PER_ELEMENT", + "length", + "byteLength", + "byteOffset", + "buffer", + ]) + ) { + const str = formatValue(ctx, value[key], recurseTimes, true); + Array.prototype.push.call(output, `[${key}]: ${str}`); + } + ctx.indentationLvl -= 2; + } + return output; +} + +function getIteratorBraces(type, tag) { + if (tag !== `${type} Iterator`) { + if (tag !== "") { + tag += "] ["; + } + tag += `${type} Iterator`; + } + return [`[${tag}] {`, "}"]; +} + +const iteratorRegExp = new SafeRegExp(" Iterator] {$"); +function formatIterator(braces, ctx, value, recurseTimes) { + // TODO(wafuwafu13): Implement + // const { 0: entries, 1: isKeyValue } = previewEntries(value, true); + const { 0: entries, 1: isKeyValue } = value; + if (isKeyValue) { + // Mark entry iterators as such. + braces[0] = braces[0].replace(iteratorRegExp, " Entries] {"); + return formatMapIterInner(ctx, recurseTimes, entries, kMapEntries); + } + + return formatSetIterInner(ctx, recurseTimes, entries, kIterator); +} + +function handleCircular(value, ctx) { + let index = 1; + if (ctx.circular === undefined) { + ctx.circular = new SafeMap(); + MapPrototypeSet(ctx.circular, value, index); + } else { + index = MapPrototypeGet(ctx.circular, value); + if (index === undefined) { + index = MapPrototypeGetSize(ctx.circular) + 1; + MapPrototypeSet(ctx.circular, value, index); + } + } + // Circular string is cyan + return ctx.stylize(`[Circular *${index}]`, "special"); +} + +const AGGREGATE_ERROR_HAS_AT_PATTERN = new SafeRegExp(/\s+at/); +const AGGREGATE_ERROR_NOT_EMPTY_LINE_PATTERN = new SafeRegExp(/^(?!\s*$)/gm); + +function inspectError(value, ctx) { + const causes = [value]; + + let err = value; + while (err.cause) { + if (ArrayPrototypeIncludes(causes, err.cause)) { + ArrayPrototypePush(causes, handleCircular(err.cause, ctx)); + break; + } else { + ArrayPrototypePush(causes, err.cause); + err = err.cause; + } + } + + const refMap = new SafeMap(); + for (let i = 0; i < causes.length; ++i) { + const cause = causes[i]; + if (ctx.circular !== undefined) { + const index = MapPrototypeGet(ctx.circular, cause); + if (index !== undefined) { + MapPrototypeSet( + refMap, + cause, + ctx.stylize(` `, "special"), + ); + } + } + } + ArrayPrototypeShift(causes); + + let finalMessage = MapPrototypeGet(refMap, value) ?? ""; + + if (ObjectPrototypeIsPrototypeOf(AggregateErrorPrototype, value)) { + const stackLines = StringPrototypeSplit(value.stack, "\n"); + while (true) { + const line = ArrayPrototypeShift(stackLines); + if (RegExpPrototypeTest(AGGREGATE_ERROR_HAS_AT_PATTERN, line)) { + ArrayPrototypeUnshift(stackLines, line); + break; + } else if (typeof line === "undefined") { + break; + } + + finalMessage += line; + finalMessage += "\n"; + } + const aggregateMessage = ArrayPrototypeJoin( + ArrayPrototypeMap( + value.errors, + (error) => + StringPrototypeReplace( + inspectArgs([error]), + AGGREGATE_ERROR_NOT_EMPTY_LINE_PATTERN, + StringPrototypeRepeat(" ", 4), + ), + ), + "\n", + ); + finalMessage += aggregateMessage; + finalMessage += "\n"; + finalMessage += ArrayPrototypeJoin(stackLines, "\n"); + } else { + const stack = value.stack; + if (stack?.includes("\n at")) { + finalMessage += stack; + } else { + finalMessage += `[${stack || value.toString()}]`; + } + } + finalMessage += ArrayPrototypeJoin( + ArrayPrototypeMap( + causes, + (cause) => + "\nCaused by " + (MapPrototypeGet(refMap, cause) ?? "") + + (cause?.stack ?? cause), + ), + "", + ); + + return finalMessage; +} + +const hexSliceLookupTable = function () { + const alphabet = "0123456789abcdef"; + const table = new Array(256); + for (let i = 0; i < 16; ++i) { + const i16 = i * 16; + for (let j = 0; j < 16; ++j) { + table[i16 + j] = alphabet[i] + alphabet[j]; + } + } + return table; +}(); + +function hexSlice(buf, start, end) { + const len = buf.length; + if (!start || start < 0) { + start = 0; + } + if (!end || end < 0 || end > len) { + end = len; + } + let out = ""; + for (let i = start; i < end; ++i) { + out += hexSliceLookupTable[buf[i]]; + } + return out; +} + +const arrayBufferRegExp = new SafeRegExp("(.{2})", "g"); +function formatArrayBuffer(ctx, value) { + let buffer; + try { + buffer = new Uint8Array(value); + } catch { + return [ctx.stylize("(detached)", "special")]; + } + let str = hexSlice(buffer, 0, MathMin(ctx.maxArrayLength, buffer.length)) + .replace(arrayBufferRegExp, "$1 ").trim(); + + const remaining = buffer.length - ctx.maxArrayLength; + if (remaining > 0) { + str += ` ... ${remaining} more byte${remaining > 1 ? "s" : ""}`; + } + return [`${ctx.stylize("[Uint8Contents]", "special")}: <${str}>`]; +} + +function formatNumber(fn, value) { + // Format -0 as '-0'. Checking `value === -0` won't distinguish 0 from -0. + return fn(ObjectIs(value, -0) ? "-0" : `${value}`, "number"); +} + +const PromiseState = { + Pending: 0, + Fulfilled: 1, + Rejected: 2, +}; + +function formatPromise(ctx, value, recurseTimes) { + let output; + // TODO(wafuwafu13): Implement + const { 0: state, 1: result } = core.getPromiseDetails(value); + if (state === PromiseState.Pending) { + output = [ctx.stylize("", "special")]; + } else { + ctx.indentationLvl += 2; + const str = formatValue(ctx, result, recurseTimes); + ctx.indentationLvl -= 2; + output = [ + state === PromiseState.Rejected + ? `${ctx.stylize("", "special")} ${str}` + : str, + ]; + } + return output; +} + +function formatWeakCollection(ctx) { + return [ctx.stylize("", "special")]; +} + +function formatWeakSet(ctx, value, recurseTimes) { + // TODO(wafuwafu13): Implement + // const entries = previewEntries(value); + const entries = value; + return formatSetIterInner(ctx, recurseTimes, entries, kWeak); +} + +function formatWeakMap(ctx, value, recurseTimes) { + // TODO(wafuwafu13): Implement + // const entries = previewEntries(value); + const entries = value; + return formatMapIterInner(ctx, recurseTimes, entries, kWeak); +} + +function formatProperty( + ctx, + value, + recurseTimes, + key, + type, + desc, + original = value, +) { + let name, str; + let extra = " "; + desc = desc || ObjectGetOwnPropertyDescriptor(value, key) || + { value: value[key], enumerable: true }; + if (desc.value !== undefined) { + const diff = (ctx.compact !== true || type !== kObjectType) ? 2 : 3; + ctx.indentationLvl += diff; + str = formatValue(ctx, desc.value, recurseTimes); + if (diff === 3 && ctx.breakLength < getStringWidth(str, ctx.colors)) { + extra = `\n${" ".repeat(ctx.indentationLvl)}`; + } + ctx.indentationLvl -= diff; + } else if (desc.get !== undefined) { + const label = desc.set !== undefined ? "Getter/Setter" : "Getter"; + const s = ctx.stylize; + const sp = "special"; + if ( + ctx.getters && (ctx.getters === true || + (ctx.getters === "get" && desc.set === undefined) || + (ctx.getters === "set" && desc.set !== undefined)) + ) { + try { + const tmp = desc.get.call(original); + ctx.indentationLvl += 2; + if (tmp === null) { + str = `${s(`[${label}:`, sp)} ${s("null", "null")}${s("]", sp)}`; + } else if (typeof tmp === "object") { + str = `${s(`[${label}]`, sp)} ${formatValue(ctx, tmp, recurseTimes)}`; + } else { + const primitive = formatPrimitive(s, tmp, ctx); + str = `${s(`[${label}:`, sp)} ${primitive}${s("]", sp)}`; + } + ctx.indentationLvl -= 2; + } catch (err) { + const message = ``; + str = `${s(`[${label}:`, sp)} ${message}${s("]", sp)}`; + } + } else { + str = ctx.stylize(`[${label}]`, sp); + } + } else if (desc.set !== undefined) { + str = ctx.stylize("[Setter]", "special"); + } else { + str = ctx.stylize("undefined", "undefined"); + } + if (type === kArrayType) { + return str; + } + if (typeof key === "symbol") { + name = `[${ctx.stylize(maybeQuoteSymbol(key, ctx), "symbol")}]`; + } else if (key === "__proto__") { + name = "['__proto__']"; + } else if (desc.enumerable === false) { + const tmp = key.replace(strEscapeSequencesReplacer, escapeFn); + + name = `[${tmp}]`; + } else if (keyStrRegExp.test(key)) { + name = ctx.stylize(key, "name"); + } else { + name = ctx.stylize(quoteString(key, ctx), "string"); + } + return `${name}:${extra}${str}`; +} + +function handleMaxCallStackSize( + _ctx, + _err, + _constructorName, + _indentationLvl, +) { + // TODO(wafuwafu13): Implement + // if (isStackOverflowError(err)) { + // ctx.seen.pop(); + // ctx.indentationLvl = indentationLvl; + // return ctx.stylize( + // `[${constructorName}: Inspection interrupted ` + + // 'prematurely. Maximum call stack size exceeded.]', + // 'special' + // ); + // } + // /* c8 ignore next */ + // assert.fail(err.stack); +} + +const colorRegExp = new SafeRegExp("\u001b\\[\\d\\d?m", "g"); +function removeColors(str) { + return str.replace(colorRegExp, ""); +} + +function isBelowBreakLength(ctx, output, start, base) { + // Each entry is separated by at least a comma. Thus, we start with a total + // length of at least `output.length`. In addition, some cases have a + // whitespace in-between each other that is added to the total as well. + // TODO(BridgeAR): Add unicode support. Use the readline getStringWidth + // function. Check the performance overhead and make it an opt-in in case it's + // significant. + let totalLength = output.length + start; + if (totalLength + output.length > ctx.breakLength) { + return false; + } + for (let i = 0; i < output.length; i++) { + if (ctx.colors) { + totalLength += removeColors(output[i]).length; + } else { + totalLength += output[i].length; + } + if (totalLength > ctx.breakLength) { + return false; + } + } + // Do not line up properties on the same line if `base` contains line breaks. + return base === "" || !StringPrototypeIncludes(base, "\n"); +} + +function formatBigInt(fn, value) { + return fn(`${value}n`, "bigint"); +} + +function formatNamespaceObject( + keys, + ctx, + value, + recurseTimes, +) { + const output = new Array(keys.length); + for (let i = 0; i < keys.length; i++) { + try { + output[i] = formatProperty( + ctx, + value, + recurseTimes, + keys[i], + kObjectType, + ); + } catch (_err) { + // TODO(wafuwfu13): Implement + // assert(isNativeError(err) && err.name === 'ReferenceError'); + // Use the existing functionality. This makes sure the indentation and + // line breaks are always correct. Otherwise it is very difficult to keep + // this aligned, even though this is a hacky way of dealing with this. + const tmp = { [keys[i]]: "" }; + output[i] = formatProperty(ctx, tmp, recurseTimes, keys[i], kObjectType); + const pos = output[i].lastIndexOf(" "); + // We have to find the last whitespace and have to replace that value as + // it will be visualized as a regular string. + output[i] = output[i].slice(0, pos + 1) + + ctx.stylize("", "special"); + } + } + // Reset the keys to an empty array. This prevents duplicated inspection. + keys.length = 0; + return output; +} + +// The array is sparse and/or has extra keys +function formatSpecialArray( + ctx, + value, + recurseTimes, + maxLength, + output, + i, +) { + const keys = ObjectKeys(value); + let index = i; + for (; i < keys.length && output.length < maxLength; i++) { + const key = keys[i]; + const tmp = +key; + // Arrays can only have up to 2^32 - 1 entries + if (tmp > 2 ** 32 - 2) { + break; + } + if (`${index}` !== key) { + if (!numberRegExp.test(key)) { + break; + } + const emptyItems = tmp - index; + const ending = emptyItems > 1 ? "s" : ""; + const message = `<${emptyItems} empty item${ending}>`; + output.push(ctx.stylize(message, "undefined")); + index = tmp; + if (output.length === maxLength) { + break; + } + } + output.push(formatProperty(ctx, value, recurseTimes, key, kArrayType)); + index++; + } + const remaining = value.length - index; + if (output.length !== maxLength) { + if (remaining > 0) { + const ending = remaining > 1 ? "s" : ""; + const message = `<${remaining} empty item${ending}>`; + output.push(ctx.stylize(message, "undefined")); + } + } else if (remaining > 0) { + output.push(`... ${remaining} more item${remaining > 1 ? "s" : ""}`); + } + return output; +} + +function getBoxedBase( + value, + ctx, + keys, + constructor, + tag, +) { + let type; + if (isNumberObject(value)) { + type = "Number"; + } else if (isStringObject(value)) { + type = "String"; + // For boxed Strings, we have to remove the 0-n indexed entries, + // since they just noisy up the output and are redundant + // Make boxed primitive Strings look like such + keys.splice(0, value.length); + } else if (isBooleanObject(value)) { + type = "Boolean"; + } else if (isBigIntObject(value)) { + type = "BigInt"; + } else { + type = "Symbol"; + } + let base = `[${type}`; + if (type !== constructor) { + if (constructor === null) { + base += " (null prototype)"; + } else { + base += ` (${constructor})`; + } + } + + base += `: ${formatPrimitive(stylizeNoColor, value.valueOf(), ctx)}]`; + if (tag !== "" && tag !== constructor) { + base += ` [${tag}]`; + } + if (keys.length !== 0 || ctx.stylize === stylizeNoColor) { + return base; + } + return ctx.stylize(base, type.toLowerCase()); +} + +function reduceToSingleString( + ctx, + output, + base, + braces, + extrasType, + recurseTimes, + value, +) { + if (ctx.compact !== true) { + if (typeof ctx.compact === "number" && ctx.compact >= 1) { + // Memorize the original output length. In case the output is grouped, + // prevent lining up the entries on a single line. + const entries = output.length; + // Group array elements together if the array contains at least six + // separate entries. + if (extrasType === kArrayExtrasType && entries > 6) { + output = groupArrayElements(ctx, output, value); + } + // `ctx.currentDepth` is set to the most inner depth of the currently + // inspected object part while `recurseTimes` is the actual current depth + // that is inspected. + // + // Example: + // + // const a = { first: [ 1, 2, 3 ], second: { inner: [ 1, 2, 3 ] } } + // + // The deepest depth of `a` is 2 (a.second.inner) and `a.first` has a max + // depth of 1. + // + // Consolidate all entries of the local most inner depth up to + // `ctx.compact`, as long as the properties are smaller than + // `ctx.breakLength`. + if ( + ctx.currentDepth - recurseTimes < ctx.compact && + entries === output.length + ) { + // Line up all entries on a single line in case the entries do not + // exceed `breakLength`. Add 10 as constant to start next to all other + // factors that may reduce `breakLength`. + const start = output.length + ctx.indentationLvl + + braces[0].length + base.length + 10; + if (isBelowBreakLength(ctx, output, start, base)) { + const joinedOutput = ArrayPrototypeJoin(output, ", "); + if (!StringPrototypeIncludes(joinedOutput, "\n")) { + return `${base ? `${base} ` : ""}${braces[0]} ${joinedOutput}` + + ` ${braces[1]}`; + } + } + } + } + // Line up each entry on an individual line. + const indentation = `\n${StringPrototypeRepeat(" ", ctx.indentationLvl)}`; + return `${base ? `${base} ` : ""}${braces[0]}${indentation} ` + + `${ArrayPrototypeJoin(output, `,${indentation} `)}${ + ctx.trailingComma ? "," : "" + }${indentation}${braces[1]}`; + } + // Line up all entries on a single line in case the entries do not exceed + // `breakLength`. + if (isBelowBreakLength(ctx, output, 0, base)) { + return `${braces[0]}${base ? ` ${base}` : ""} ${ + ArrayPrototypeJoin(output, ", ") + } ` + + braces[1]; + } + const indentation = StringPrototypeRepeat(" ", ctx.indentationLvl); + // If the opening "brace" is too large, like in the case of "Set {", + // we need to force the first item to be on the next line or the + // items will not line up correctly. + const ln = base === "" && braces[0].length === 1 + ? " " + : `${base ? ` ${base}` : ""}\n${indentation} `; + // Line up each entry on an individual line. + return `${braces[0]}${ln}${ + ArrayPrototypeJoin(output, `,\n${indentation} `) + } ${braces[1]}`; +} + +function groupArrayElements(ctx, output, value) { + let totalLength = 0; + let maxLength = 0; + let i = 0; + let outputLength = output.length; + if (ctx.maxArrayLength < output.length) { + // This makes sure the "... n more items" part is not taken into account. + outputLength--; + } + const separatorSpace = 2; // Add 1 for the space and 1 for the separator. + const dataLen = new Array(outputLength); + // Calculate the total length of all output entries and the individual max + // entries length of all output entries. We have to remove colors first, + // otherwise the length would not be calculated properly. + for (; i < outputLength; i++) { + const len = getStringWidth(output[i], ctx.colors); + dataLen[i] = len; + totalLength += len + separatorSpace; + if (maxLength < len) { + maxLength = len; + } + } + // Add two to `maxLength` as we add a single whitespace character plus a comma + // in-between two entries. + const actualMax = maxLength + separatorSpace; + // Check if at least three entries fit next to each other and prevent grouping + // of arrays that contains entries of very different length (i.e., if a single + // entry is longer than 1/5 of all other entries combined). Otherwise the + // space in-between small entries would be enormous. + if ( + actualMax * 3 + ctx.indentationLvl < ctx.breakLength && + (totalLength / actualMax > 5 || maxLength <= 6) + ) { + const approxCharHeights = 2.5; + const averageBias = MathSqrt(actualMax - totalLength / output.length); + const biasedMax = MathMax(actualMax - 3 - averageBias, 1); + // Dynamically check how many columns seem possible. + const columns = MathMin( + // Ideally a square should be drawn. We expect a character to be about 2.5 + // times as high as wide. This is the area formula to calculate a square + // which contains n rectangles of size `actualMax * approxCharHeights`. + // Divide that by `actualMax` to receive the correct number of columns. + // The added bias increases the columns for short entries. + MathRound( + MathSqrt( + approxCharHeights * biasedMax * outputLength, + ) / biasedMax, + ), + // Do not exceed the breakLength. + MathFloor((ctx.breakLength - ctx.indentationLvl) / actualMax), + // Limit array grouping for small `compact` modes as the user requested + // minimal grouping. + ctx.compact * 4, + // Limit the columns to a maximum of fifteen. + 15, + ); + // Return with the original output if no grouping should happen. + if (columns <= 1) { + return output; + } + const tmp = []; + const maxLineLength = []; + for (let i = 0; i < columns; i++) { + let lineMaxLength = 0; + for (let j = i; j < output.length; j += columns) { + if (dataLen[j] > lineMaxLength) { + lineMaxLength = dataLen[j]; + } + } + lineMaxLength += separatorSpace; + maxLineLength[i] = lineMaxLength; + } + let order = StringPrototypePadStart; + if (value !== undefined) { + for (let i = 0; i < output.length; i++) { + if (typeof value[i] !== "number" && typeof value[i] !== "bigint") { + order = StringPrototypePadEnd; + break; + } + } + } + // Each iteration creates a single line of grouped entries. + for (let i = 0; i < outputLength; i += columns) { + // The last lines may contain less entries than columns. + const max = MathMin(i + columns, outputLength); + let str = ""; + let j = i; + for (; j < max - 1; j++) { + // Calculate extra color padding in case it's active. This has to be + // done line by line as some lines might contain more colors than + // others. + const padding = maxLineLength[j - i] + output[j].length - dataLen[j]; + str += order(`${output[j]}, `, padding, " "); + } + if (order === StringPrototypePadStart) { + const padding = maxLineLength[j - i] + + output[j].length - + dataLen[j] - + separatorSpace; + str += StringPrototypePadStart(output[j], padding, " "); + } else { + str += output[j]; + } + ArrayPrototypePush(tmp, str); + } + if (ctx.maxArrayLength < output.length) { + ArrayPrototypePush(tmp, output[outputLength]); + } + output = tmp; + } + return output; +} + +function formatMapIterInner( + ctx, + recurseTimes, + entries, + state, +) { + const maxArrayLength = MathMax(ctx.maxArrayLength, 0); + // Entries exist as [key1, val1, key2, val2, ...] + const len = entries.length / 2; + const remaining = len - maxArrayLength; + const maxLength = MathMin(maxArrayLength, len); + let output = new Array(maxLength); + let i = 0; + ctx.indentationLvl += 2; + if (state === kWeak) { + for (; i < maxLength; i++) { + const pos = i * 2; + output[i] = `${formatValue(ctx, entries[pos], recurseTimes)} => ${ + formatValue(ctx, entries[pos + 1], recurseTimes) + }`; + } + // Sort all entries to have a halfway reliable output (if more entries than + // retrieved ones exist, we can not reliably return the same output) if the + // output is not sorted anyway. + if (!ctx.sorted) { + output = output.sort(); + } + } else { + for (; i < maxLength; i++) { + const pos = i * 2; + const res = [ + formatValue(ctx, entries[pos], recurseTimes), + formatValue(ctx, entries[pos + 1], recurseTimes), + ]; + output[i] = reduceToSingleString( + ctx, + res, + "", + ["[", "]"], + kArrayExtrasType, + recurseTimes, + ); + } + } + ctx.indentationLvl -= 2; + if (remaining > 0) { + output.push(`... ${remaining} more item${remaining > 1 ? "s" : ""}`); + } + return output; +} + +function formatSetIterInner( + ctx, + recurseTimes, + entries, + state, +) { + const maxArrayLength = MathMax(ctx.maxArrayLength, 0); + const maxLength = MathMin(maxArrayLength, entries.length); + const output = new Array(maxLength); + ctx.indentationLvl += 2; + for (let i = 0; i < maxLength; i++) { + output[i] = formatValue(ctx, entries[i], recurseTimes); + } + ctx.indentationLvl -= 2; + if (state === kWeak && !ctx.sorted) { + // Sort all entries to have a halfway reliable output (if more entries than + // retrieved ones exist, we can not reliably return the same output) if the + // output is not sorted anyway. + output.sort(); + } + const remaining = entries.length - maxLength; + if (remaining > 0) { + Array.prototype.push.call( + output, + `... ${remaining} more item${remaining > 1 ? "s" : ""}`, + ); + } + return output; +} + +// Regex used for ansi escape code splitting +// Adopted from https://github.com/chalk/ansi-regex/blob/HEAD/index.js +// License: MIT, authors: @sindresorhus, Qix-, arjunmehta and LitoMore +// Matches all ansi escape code sequences in a string +const ansiPattern = "[\\u001B\\u009B][[\\]()#;?]*" + + "(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*" + + "|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)" + + "|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))"; +const ansi = new SafeRegExp(ansiPattern, "g"); + +/** + * Returns the number of columns required to display the given string. + */ +export function getStringWidth(str, removeControlChars = true) { + let width = 0; + + if (removeControlChars) { + str = stripVTControlCharacters(str); + } + str = str.normalize("NFC"); + for (const char of new SafeStringIterator(str)) { + const code = char.codePointAt(0); + if (isFullWidthCodePoint(code)) { + width += 2; + } else if (!isZeroWidthCodePoint(code)) { + width++; + } + } + + return width; +} + +const isZeroWidthCodePoint = (code) => { + return code <= 0x1F || // C0 control codes + (code >= 0x7F && code <= 0x9F) || // C1 control codes + (code >= 0x300 && code <= 0x36F) || // Combining Diacritical Marks + (code >= 0x200B && code <= 0x200F) || // Modifying Invisible Characters + // Combining Diacritical Marks for Symbols + (code >= 0x20D0 && code <= 0x20FF) || + (code >= 0xFE00 && code <= 0xFE0F) || // Variation Selectors + (code >= 0xFE20 && code <= 0xFE2F) || // Combining Half Marks + (code >= 0xE0100 && code <= 0xE01EF); // Variation Selectors +}; + +/** + * Remove all VT control characters. Use to estimate displayed string width. + */ +export function stripVTControlCharacters(str) { + return str.replace(ansi, ""); } function hasOwnProperty(obj, v) { @@ -132,24 +2259,9 @@ function hasOwnProperty(obj, v) { return ObjectPrototypeHasOwnProperty(obj, v); } -function propertyIsEnumerable(obj, prop) { - if ( - obj == null || - typeof obj.propertyIsEnumerable !== "function" - ) { - return false; - } - - return ObjectPrototypePropertyIsEnumerable(obj, prop); -} - // Copyright Joyent, Inc. and other Node contributors. MIT license. // Forked from Node's lib/internal/cli_table.js -function isTypedArray(x) { - return TypedArrayPrototypeGetSymbolToStringTag(x) !== undefined; -} - const tableChars = { middleMiddle: "\u2500", rowMiddle: "\u253c", @@ -205,17 +2317,6 @@ function isFullWidthCodePoint(code) { ); } -function getStringWidth(str) { - str = StringPrototypeNormalize(colors.stripColor(str), "NFC"); - let width = 0; - - for (const ch of new SafeStringIterator(str)) { - width += isFullWidthCodePoint(StringPrototypeCodePointAt(ch, 0)) ? 2 : 1; - } - - return width; -} - function renderRow(row, columnWidths, columnRightAlign) { let out = tableChars.left; for (let i = 0; i < row.length; i++) { @@ -292,454 +2393,72 @@ function cliTable(head, columns) { // We can match Node's quoting behavior exactly by swapping the double quote and // single quote in this array. That would give preference to single quotes. // However, we prefer double quotes as the default. -const QUOTES = ['"', "'", "`"]; -const DEFAULT_INSPECT_OPTIONS = { - depth: 4, - indentLevel: 0, - sorted: false, - trailingComma: false, - compact: true, - iterableLimit: 100, - showProxy: false, - colors: false, - getters: false, +const denoInspectDefaultOptions = { + indentationLvl: 0, + currentDepth: 0, + stylize: stylizeNoColor, + showHidden: false, - strAbbreviateSize: 100, + depth: 4, + colors: false, + showProxy: false, + breakLength: 80, + compact: 3, + sorted: false, + getters: false, + + // node only + maxArrayLength: 100, + maxStringLength: 100, // deno: strAbbreviateSize: 100 + customInspect: true, + + // deno only /** You can override the quotes preference in inspectString. * Used by util.inspect() */ // TODO(kt3k): Consider using symbol as a key to hide this from the public // API. - quotes: QUOTES, + quotes: ['"', "'", "`"], + iterableLimit: 100, // similar to node's maxArrayLength, but doesn't only apply to arrays + trailingComma: false, + + inspect, + + // TODO(@crowlKats): merge into indentationLvl + indentLevel: 0, }; +function getDefaultInspectOptions() { + return { + budget: {}, + seen: [], + ...denoInspectDefaultOptions, + }; +} + const DEFAULT_INDENT = " "; // Default indent string -const LINE_BREAKING_LENGTH = 80; -const MIN_GROUP_LENGTH = 6; const STR_ABBREVIATE_SIZE = 100; -const PROMISE_STRING_BASE_LENGTH = 12; - class CSI { static kClear = "\x1b[1;1H"; static kClearScreenDown = "\x1b[0J"; } -function getClassInstanceName(instance) { - if (typeof instance != "object") { - return ""; - } - const constructor = instance?.constructor; - if (typeof constructor == "function") { - return constructor.name ?? ""; - } - return ""; -} +const QUOTE_SYMBOL_REG = new SafeRegExp(/^[a-zA-Z_][a-zA-Z_.0-9]*$/); -function maybeColor(fn, inspectOptions) { - return inspectOptions.colors ? fn : (s) => s; -} +function maybeQuoteSymbol(symbol, ctx) { + const description = SymbolPrototypeGetDescription(symbol); -function inspectFunction(value, inspectOptions) { - const cyan = maybeColor(colors.cyan, inspectOptions); - if ( - ReflectHas(value, customInspect) && - typeof value[customInspect] === "function" - ) { - return String(value[customInspect](inspect, inspectOptions)); - } - // Might be Function/AsyncFunction/GeneratorFunction/AsyncGeneratorFunction - let cstrName = ObjectGetPrototypeOf(value)?.constructor?.name; - if (!cstrName) { - // If prototype is removed or broken, - // use generic 'Function' instead. - cstrName = "Function"; - } - const stringValue = FunctionPrototypeToString(value); - // Might be Class - if (StringPrototypeStartsWith(stringValue, "class")) { - cstrName = "Class"; + if (description === undefined) { + return SymbolPrototypeToString(symbol); } - // Our function may have properties, so we want to format those - // as if our function was an object - // If we didn't find any properties, we will just append an - // empty suffix. - let suffix = ``; - let refStr = ""; - if ( - ObjectKeys(value).length > 0 || - ObjectGetOwnPropertySymbols(value).length > 0 - ) { - const { 0: propString, 1: refIndex } = inspectRawObject( - value, - inspectOptions, - ); - refStr = refIndex; - // Filter out the empty string for the case we only have - // non-enumerable symbols. - if ( - propString.length > 0 && - propString !== "{}" - ) { - suffix = ` ${propString}`; - } + if (RegExpPrototypeTest(QUOTE_SYMBOL_REG, description)) { + return SymbolPrototypeToString(symbol); } - if (value.name && value.name !== "anonymous") { - // from MDN spec - return cyan(`${refStr}[${cstrName}: ${value.name}]`) + suffix; - } - return cyan(`${refStr}[${cstrName} (anonymous)]`) + suffix; -} - -function inspectIterable( - value, - options, - inspectOptions, -) { - const cyan = maybeColor(colors.cyan, inspectOptions); - if (inspectOptions.indentLevel >= inspectOptions.depth) { - return cyan(`[${options.typeName}]`); - } - - const entries = []; - let iter; - let valueIsTypedArray = false; - let entriesLength; - - switch (options.typeName) { - case "Map": - iter = MapPrototypeEntries(value); - entriesLength = MapPrototypeGetSize(value); - break; - case "Set": - iter = SetPrototypeEntries(value); - entriesLength = SetPrototypeGetSize(value); - break; - case "Array": - entriesLength = value.length; - break; - default: - if (isTypedArray(value)) { - entriesLength = TypedArrayPrototypeGetLength(value); - iter = ArrayPrototypeEntries(value); - valueIsTypedArray = true; - } else { - throw new TypeError("unreachable"); - } - } - - let entriesLengthWithoutEmptyItems = entriesLength; - if (options.typeName === "Array") { - for ( - let i = 0, j = 0; - i < entriesLength && j < inspectOptions.iterableLimit; - i++, j++ - ) { - inspectOptions.indentLevel++; - const { entry, skipTo } = options.entryHandler( - [i, value[i]], - inspectOptions, - ); - ArrayPrototypePush(entries, entry); - inspectOptions.indentLevel--; - - if (skipTo) { - // subtract skipped (empty) items - entriesLengthWithoutEmptyItems -= skipTo - i; - i = skipTo; - } - } - } else { - let i = 0; - while (true) { - let el; - try { - const res = iter.next(); - if (res.done) { - break; - } - el = res.value; - } catch (err) { - if (valueIsTypedArray) { - // TypedArray.prototype.entries doesn't throw, unless the ArrayBuffer - // is detached. We don't want to show the exception in that case, so - // we catch it here and pretend the ArrayBuffer has no entries (like - // Chrome DevTools does). - break; - } - throw err; - } - if (i < inspectOptions.iterableLimit) { - inspectOptions.indentLevel++; - ArrayPrototypePush( - entries, - options.entryHandler( - el, - inspectOptions, - ), - ); - inspectOptions.indentLevel--; - } else { - break; - } - i++; - } - } - - if (options.sort) { - ArrayPrototypeSort(entries); - } - - if (entriesLengthWithoutEmptyItems > inspectOptions.iterableLimit) { - const nmore = entriesLengthWithoutEmptyItems - - inspectOptions.iterableLimit; - ArrayPrototypePush(entries, `... ${nmore} more items`); - } - - const iPrefix = `${options.displayName ? options.displayName + " " : ""}`; - - const level = inspectOptions.indentLevel; - const initIndentation = `\n${ - StringPrototypeRepeat(DEFAULT_INDENT, level + 1) - }`; - const entryIndentation = `,\n${ - StringPrototypeRepeat(DEFAULT_INDENT, level + 1) - }`; - const closingDelimIndentation = StringPrototypeRepeat( - DEFAULT_INDENT, - level, - ); - const closingIndentation = `${ - inspectOptions.trailingComma ? "," : "" - }\n${closingDelimIndentation}`; - - let iContent; - if (entries.length === 0 && !inspectOptions.compact) { - iContent = `\n${closingDelimIndentation}`; - } else if (options.group && entries.length > MIN_GROUP_LENGTH) { - const groups = groupEntries(entries, level, value); - iContent = `${initIndentation}${ - ArrayPrototypeJoin(groups, entryIndentation) - }${closingIndentation}`; - } else { - iContent = entries.length === 0 - ? "" - : ` ${ArrayPrototypeJoin(entries, ", ")} `; - if ( - colors.stripColor(iContent).length > LINE_BREAKING_LENGTH || - !inspectOptions.compact - ) { - iContent = `${initIndentation}${ - ArrayPrototypeJoin(entries, entryIndentation) - }${closingIndentation}`; - } - } - - return `${iPrefix}${options.delims[0]}${iContent}${options.delims[1]}`; -} - -// Ported from Node.js -// Copyright Node.js contributors. All rights reserved. -function groupEntries( - entries, - level, - value, - iterableLimit = 100, -) { - let totalLength = 0; - let maxLength = 0; - let entriesLength = entries.length; - if (iterableLimit < entriesLength) { - // This makes sure the "... n more items" part is not taken into account. - entriesLength--; - } - const separatorSpace = 2; // Add 1 for the space and 1 for the separator. - const dataLen = new Array(entriesLength); - // Calculate the total length of all output entries and the individual max - // entries length of all output entries. - // IN PROGRESS: Colors are being taken into account. - for (let i = 0; i < entriesLength; i++) { - // Taking colors into account: removing the ANSI color - // codes from the string before measuring its length - const len = colors.stripColor(entries[i]).length; - dataLen[i] = len; - totalLength += len + separatorSpace; - if (maxLength < len) maxLength = len; - } - // Add two to `maxLength` as we add a single whitespace character plus a comma - // in-between two entries. - const actualMax = maxLength + separatorSpace; - // Check if at least three entries fit next to each other and prevent grouping - // of arrays that contains entries of very different length (i.e., if a single - // entry is longer than 1/5 of all other entries combined). Otherwise the - // space in-between small entries would be enormous. - if ( - actualMax * 3 + (level + 1) < LINE_BREAKING_LENGTH && - (totalLength / actualMax > 5 || maxLength <= 6) - ) { - const approxCharHeights = 2.5; - const averageBias = MathSqrt(actualMax - totalLength / entries.length); - const biasedMax = MathMax(actualMax - 3 - averageBias, 1); - // Dynamically check how many columns seem possible. - const columns = MathMin( - // Ideally a square should be drawn. We expect a character to be about 2.5 - // times as high as wide. This is the area formula to calculate a square - // which contains n rectangles of size `actualMax * approxCharHeights`. - // Divide that by `actualMax` to receive the correct number of columns. - // The added bias increases the columns for short entries. - MathRound( - MathSqrt(approxCharHeights * biasedMax * entriesLength) / biasedMax, - ), - // Do not exceed the breakLength. - MathFloor((LINE_BREAKING_LENGTH - (level + 1)) / actualMax), - // Limit the columns to a maximum of fifteen. - 15, - ); - // Return with the original output if no grouping should happen. - if (columns <= 1) { - return entries; - } - const tmp = []; - const maxLineLength = []; - for (let i = 0; i < columns; i++) { - let lineMaxLength = 0; - for (let j = i; j < entries.length; j += columns) { - if (dataLen[j] > lineMaxLength) lineMaxLength = dataLen[j]; - } - lineMaxLength += separatorSpace; - maxLineLength[i] = lineMaxLength; - } - let order = "padStart"; - if (value !== undefined) { - for (let i = 0; i < entries.length; i++) { - if ( - typeof value[i] !== "number" && - typeof value[i] !== "bigint" - ) { - order = "padEnd"; - break; - } - } - } - // Each iteration creates a single line of grouped entries. - for (let i = 0; i < entriesLength; i += columns) { - // The last lines may contain less entries than columns. - const max = MathMin(i + columns, entriesLength); - let str = ""; - let j = i; - for (; j < max - 1; j++) { - const lengthOfColorCodes = entries[j].length - dataLen[j]; - const padding = maxLineLength[j - i] + lengthOfColorCodes; - str += `${entries[j]}, `[order](padding, " "); - } - if (order === "padStart") { - const lengthOfColorCodes = entries[j].length - dataLen[j]; - const padding = maxLineLength[j - i] + - lengthOfColorCodes - - separatorSpace; - str += StringPrototypePadStart(entries[j], padding, " "); - } else { - str += entries[j]; - } - ArrayPrototypePush(tmp, str); - } - if (iterableLimit < entries.length) { - ArrayPrototypePush(tmp, entries[entriesLength]); - } - entries = tmp; - } - return entries; -} - -let circular; -function handleCircular(value, cyan) { - let index = 1; - if (circular === undefined) { - circular = new SafeMap(); - MapPrototypeSet(circular, value, index); - } else { - index = MapPrototypeGet(circular, value); - if (index === undefined) { - index = MapPrototypeGetSize(circular) + 1; - MapPrototypeSet(circular, value, index); - } - } - // Circular string is cyan - return cyan(`[Circular *${index}]`); -} - -function _inspectValue( - value, - inspectOptions, -) { - const proxyDetails = core.getProxyDetails(value); - if (proxyDetails != null && inspectOptions.showProxy) { - return inspectProxy(proxyDetails, inspectOptions); - } - - const green = maybeColor(colors.green, inspectOptions); - const yellow = maybeColor(colors.yellow, inspectOptions); - const gray = maybeColor(colors.gray, inspectOptions); - const cyan = maybeColor(colors.cyan, inspectOptions); - const bold = maybeColor(colors.bold, inspectOptions); - const red = maybeColor(colors.red, inspectOptions); - - switch (typeof value) { - case "string": - return green(quoteString(value, inspectOptions)); - case "number": // Numbers are yellow - // Special handling of -0 - return yellow(ObjectIs(value, -0) ? "-0" : `${value}`); - case "boolean": // booleans are yellow - return yellow(String(value)); - case "undefined": // undefined is gray - return gray(String(value)); - case "symbol": // Symbols are green - return green(maybeQuoteSymbol(value, inspectOptions)); - case "bigint": // Bigints are yellow - return yellow(`${value}n`); - case "function": // Function string is cyan - if (ctxHas(value)) { - // Circular string is cyan - return handleCircular(value, cyan); - } - - return inspectFunction(value, inspectOptions); - case "object": // null is bold - if (value === null) { - return bold("null"); - } - - if (ctxHas(value)) { - return handleCircular(value, cyan); - } - - return inspectObject( - value, - inspectOptions, - proxyDetails, - ); - default: - // Not implemented is red - return red("[Not Implemented]"); - } -} - -function inspectValue( - value, - inspectOptions, -) { - ArrayPrototypePush(CTX_STACK, value); - let x; - try { - x = _inspectValue(value, inspectOptions); - } finally { - ArrayPrototypePop(CTX_STACK); - } - return x; + return `Symbol(${quoteString(description, ctx)})`; } /** Surround the string in quotes. @@ -750,11 +2469,12 @@ function inspectValue( * Insert a backslash before any occurrence of the chosen quote symbol and * before any backslash. */ -function quoteString(string, inspectOptions = DEFAULT_INSPECT_OPTIONS) { - const quotes = inspectOptions.quotes; - const quote = - ArrayPrototypeFind(quotes, (c) => !StringPrototypeIncludes(string, c)) ?? - quotes[0]; +function quoteString(string, ctx) { + const quote = ArrayPrototypeFind( + ctx.quotes, + (c) => !StringPrototypeIncludes(string, c), + ) ?? + ctx.quotes[0]; const escapePattern = new SafeRegExp(`(?=[${quote}\\\\])`, "g"); string = StringPrototypeReplace(string, escapePattern, "\\"); string = replaceEscapeSequences(string); @@ -771,8 +2491,7 @@ const ESCAPE_MAP = ObjectFreeze({ "\v": "\\v", }); -// deno-lint-ignore no-control-regex -const ESCAPE_PATTERN2 = new SafeRegExp(/[\x00-\x1f\x7f-\x9f]/g); +const ESCAPE_PATTERN2 = new SafeRegExp("[\x00-\x1f\x7f-\x9f]", "g"); // Replace escape sequences that can modify output. function replaceEscapeSequences(string) { @@ -793,615 +2512,23 @@ function replaceEscapeSequences(string) { ); } -const QUOTE_STRING_PATTERN = new SafeRegExp(/^[a-zA-Z_][a-zA-Z_0-9]*$/); - -// Surround a string with quotes when it is required (e.g the string not a valid identifier). -function maybeQuoteString(string, inspectOptions) { - if ( - RegExpPrototypeTest(QUOTE_STRING_PATTERN, string) - ) { - return replaceEscapeSequences(string); - } - - return quoteString(string, inspectOptions); -} - -const QUOTE_SYMBOL_REG = new SafeRegExp(/^[a-zA-Z_][a-zA-Z_.0-9]*$/); - -// Surround a symbol's description in quotes when it is required (e.g the description has non printable characters). -function maybeQuoteSymbol(symbol, inspectOptions) { - const description = SymbolPrototypeGetDescription(symbol); - - if (description === undefined) { - return SymbolPrototypeToString(symbol); - } - - if (RegExpPrototypeTest(QUOTE_SYMBOL_REG, description)) { - return SymbolPrototypeToString(symbol); - } - - return `Symbol(${quoteString(description, inspectOptions)})`; -} - -const CTX_STACK = []; -function ctxHas(x) { - // Only check parent contexts - return ArrayPrototypeIncludes( - ArrayPrototypeSlice(CTX_STACK, 0, CTX_STACK.length - 1), - x, - ); -} - // Print strings when they are inside of arrays or objects with quotes function inspectValueWithQuotes( value, - inspectOptions, + ctx, ) { - const abbreviateSize = typeof inspectOptions.strAbbreviateSize === "undefined" + const abbreviateSize = typeof ctx.strAbbreviateSize === "undefined" ? STR_ABBREVIATE_SIZE - : inspectOptions.strAbbreviateSize; - const green = maybeColor(colors.green, inspectOptions); + : ctx.strAbbreviateSize; switch (typeof value) { case "string": { const trunc = value.length > abbreviateSize ? StringPrototypeSlice(value, 0, abbreviateSize) + "..." : value; - return green(quoteString(trunc, inspectOptions)); // Quoted strings are green + return ctx.stylize(quoteString(trunc, ctx), "string"); // Quoted strings are green } default: - return inspectValue(value, inspectOptions); - } -} - -function inspectArray( - value, - inspectOptions, -) { - const gray = maybeColor(colors.gray, inspectOptions); - let lastValidIndex = 0; - let keys; - const options = { - typeName: "Array", - displayName: "", - delims: ["[", "]"], - entryHandler: (entry, inspectOptions) => { - const { 0: index, 1: val } = entry; - let i = index; - lastValidIndex = index; - if (!ObjectPrototypeHasOwnProperty(value, i)) { - let skipTo; - keys = keys || ObjectKeys(value); - i = value.length; - if (keys.length === 0) { - // fast path, all items are empty - skipTo = i; - } else { - // Not all indexes are empty or there's a non-index property - // Find first non-empty array index - while (keys.length) { - const key = ArrayPrototypeShift(keys); - // check if it's a valid array index - if (key > lastValidIndex && key < 2 ** 32 - 1) { - i = Number(key); - break; - } - } - - skipTo = i - 1; - } - const emptyItems = i - index; - const ending = emptyItems > 1 ? "s" : ""; - return { - entry: gray(`<${emptyItems} empty item${ending}>`), - skipTo, - }; - } else { - return { entry: inspectValueWithQuotes(val, inspectOptions) }; - } - }, - group: inspectOptions.compact, - sort: false, - }; - return inspectIterable(value, options, inspectOptions); -} - -function inspectTypedArray( - typedArrayName, - value, - inspectOptions, -) { - const valueLength = value.length; - const options = { - typeName: typedArrayName, - displayName: `${typedArrayName}(${valueLength})`, - delims: ["[", "]"], - entryHandler: (entry, inspectOptions) => { - const val = entry[1]; - inspectOptions.indentLevel++; - const inspectedValue = inspectValueWithQuotes(val, inspectOptions); - inspectOptions.indentLevel--; - return inspectedValue; - }, - group: inspectOptions.compact, - sort: false, - }; - return inspectIterable(value, options, inspectOptions); -} - -function inspectSet( - value, - inspectOptions, -) { - const options = { - typeName: "Set", - displayName: "Set", - delims: ["{", "}"], - entryHandler: (entry, inspectOptions) => { - const val = entry[1]; - inspectOptions.indentLevel++; - const inspectedValue = inspectValueWithQuotes(val, inspectOptions); - inspectOptions.indentLevel--; - return inspectedValue; - }, - group: false, - sort: inspectOptions.sorted, - }; - return inspectIterable(value, options, inspectOptions); -} - -function inspectMap( - value, - inspectOptions, -) { - const options = { - typeName: "Map", - displayName: "Map", - delims: ["{", "}"], - entryHandler: (entry, inspectOptions) => { - const { 0: key, 1: val } = entry; - inspectOptions.indentLevel++; - const inspectedValue = `${ - inspectValueWithQuotes(key, inspectOptions) - } => ${inspectValueWithQuotes(val, inspectOptions)}`; - inspectOptions.indentLevel--; - return inspectedValue; - }, - group: false, - sort: inspectOptions.sorted, - }; - return inspectIterable( - value, - options, - inspectOptions, - ); -} - -function inspectWeakSet(inspectOptions) { - const cyan = maybeColor(colors.cyan, inspectOptions); - return `WeakSet { ${cyan("[items unknown]")} }`; // as seen in Node, with cyan color -} - -function inspectWeakMap(inspectOptions) { - const cyan = maybeColor(colors.cyan, inspectOptions); - return `WeakMap { ${cyan("[items unknown]")} }`; // as seen in Node, with cyan color -} - -function inspectDate(value, inspectOptions) { - // without quotes, ISO format, in magenta like before - const magenta = maybeColor(colors.magenta, inspectOptions); - return magenta( - isInvalidDate(value) ? "Invalid Date" : DatePrototypeToISOString(value), - ); -} - -function inspectRegExp(value, inspectOptions) { - const red = maybeColor(colors.red, inspectOptions); - return red(RegExpPrototypeToString(value)); // RegExps are red -} - -const AGGREGATE_ERROR_HAS_AT_PATTERN = new SafeRegExp(/\s+at/); -const AGGREGATE_ERROR_NOT_EMPTY_LINE_PATTERN = new SafeRegExp(/^(?!\s*$)/gm); - -function inspectError(value, cyan) { - const causes = [value]; - - let err = value; - while (err.cause) { - if (ArrayPrototypeIncludes(causes, err.cause)) { - ArrayPrototypePush(causes, handleCircular(err.cause, cyan)); - break; - } else { - ArrayPrototypePush(causes, err.cause); - err = err.cause; - } - } - - const refMap = new SafeMap(); - for (let i = 0; i < causes.length; ++i) { - const cause = causes[i]; - if (circular !== undefined) { - const index = MapPrototypeGet(circular, cause); - if (index !== undefined) { - MapPrototypeSet(refMap, cause, cyan(` `)); - } - } - } - ArrayPrototypeShift(causes); - - let finalMessage = MapPrototypeGet(refMap, value) ?? ""; - - if (ObjectPrototypeIsPrototypeOf(AggregateErrorPrototype, value)) { - const stackLines = StringPrototypeSplit(value.stack, "\n"); - while (true) { - const line = ArrayPrototypeShift(stackLines); - if (RegExpPrototypeTest(AGGREGATE_ERROR_HAS_AT_PATTERN, line)) { - ArrayPrototypeUnshift(stackLines, line); - break; - } else if (typeof line === "undefined") { - break; - } - - finalMessage += line; - finalMessage += "\n"; - } - const aggregateMessage = ArrayPrototypeJoin( - ArrayPrototypeMap( - value.errors, - (error) => - StringPrototypeReplace( - inspectArgs([error]), - AGGREGATE_ERROR_NOT_EMPTY_LINE_PATTERN, - StringPrototypeRepeat(" ", 4), - ), - ), - "\n", - ); - finalMessage += aggregateMessage; - finalMessage += "\n"; - finalMessage += ArrayPrototypeJoin(stackLines, "\n"); - } else { - finalMessage += value.stack; - } - - finalMessage += ArrayPrototypeJoin( - ArrayPrototypeMap( - causes, - (cause) => - "\nCaused by " + (MapPrototypeGet(refMap, cause) ?? "") + - (cause?.stack ?? cause), - ), - "", - ); - - return finalMessage; -} - -function inspectStringObject(value, inspectOptions) { - const cyan = maybeColor(colors.cyan, inspectOptions); - return cyan(`[String: "${StringPrototypeToString(value)}"]`); // wrappers are in cyan -} - -function inspectBooleanObject(value, inspectOptions) { - const cyan = maybeColor(colors.cyan, inspectOptions); - return cyan(`[Boolean: ${BooleanPrototypeToString(value)}]`); // wrappers are in cyan -} - -function inspectNumberObject(value, inspectOptions) { - const cyan = maybeColor(colors.cyan, inspectOptions); - // Special handling of -0 - return cyan( - `[Number: ${ - ObjectIs(NumberPrototypeValueOf(value), -0) - ? "-0" - : NumberPrototypeToString(value) - }]`, - ); // wrappers are in cyan -} - -function inspectBigIntObject(value, inspectOptions) { - const cyan = maybeColor(colors.cyan, inspectOptions); - return cyan(`[BigInt: ${BigIntPrototypeToString(value)}n]`); // wrappers are in cyan -} - -function inspectSymbolObject(value, inspectOptions) { - const cyan = maybeColor(colors.cyan, inspectOptions); - return cyan( - `[Symbol: ${ - maybeQuoteSymbol(SymbolPrototypeValueOf(value), inspectOptions) - }]`, - ); // wrappers are in cyan -} - -const PromiseState = { - Pending: 0, - Fulfilled: 1, - Rejected: 2, -}; - -function inspectPromise( - value, - inspectOptions, -) { - const cyan = maybeColor(colors.cyan, inspectOptions); - const red = maybeColor(colors.red, inspectOptions); - - const { 0: state, 1: result } = core.getPromiseDetails(value); - - if (state === PromiseState.Pending) { - return `Promise { ${cyan("")} }`; - } - - const prefix = state === PromiseState.Fulfilled - ? "" - : `${red("")} `; - - inspectOptions.indentLevel++; - const str = `${prefix}${inspectValueWithQuotes(result, inspectOptions)}`; - inspectOptions.indentLevel--; - - if (str.length + PROMISE_STRING_BASE_LENGTH > LINE_BREAKING_LENGTH) { - return `Promise {\n${ - StringPrototypeRepeat(DEFAULT_INDENT, inspectOptions.indentLevel + 1) - }${str}\n}`; - } - - return `Promise { ${str} }`; -} - -function inspectProxy( - targetAndHandler, - inspectOptions, -) { - return `Proxy ${inspectArray(targetAndHandler, inspectOptions)}`; -} - -function inspectRawObject( - value, - inspectOptions, -) { - const cyan = maybeColor(colors.cyan, inspectOptions); - - if (inspectOptions.indentLevel >= inspectOptions.depth) { - return [cyan("[Object]"), ""]; // wrappers are in cyan - } - - let baseString; - - let shouldShowDisplayName = false; - let displayName = value[ - SymbolToStringTag - ]; - if (!displayName) { - displayName = getClassInstanceName(value); - } - if ( - displayName && displayName !== "Object" && displayName !== "anonymous" - ) { - shouldShowDisplayName = true; - } - - const entries = []; - const stringKeys = ObjectKeys(value); - const symbolKeys = ObjectGetOwnPropertySymbols(value); - if (inspectOptions.sorted) { - ArrayPrototypeSort(stringKeys); - ArrayPrototypeSort( - symbolKeys, - (s1, s2) => - StringPrototypeLocaleCompare( - SymbolPrototypeGetDescription(s1) ?? "", - SymbolPrototypeGetDescription(s2) ?? "", - ), - ); - } - - const red = maybeColor(colors.red, inspectOptions); - - inspectOptions.indentLevel++; - - for (let i = 0; i < stringKeys.length; ++i) { - const key = stringKeys[i]; - if (inspectOptions.getters) { - let propertyValue; - let error = null; - try { - propertyValue = value[key]; - } catch (error_) { - error = error_; - } - const inspectedValue = error == null - ? inspectValueWithQuotes(propertyValue, inspectOptions) - : red(`[Thrown ${error.name}: ${error.message}]`); - ArrayPrototypePush( - entries, - `${maybeQuoteString(key, inspectOptions)}: ${inspectedValue}`, - ); - } else { - const descriptor = ObjectGetOwnPropertyDescriptor(value, key); - if (descriptor.get !== undefined && descriptor.set !== undefined) { - ArrayPrototypePush( - entries, - `${maybeQuoteString(key, inspectOptions)}: [Getter/Setter]`, - ); - } else if (descriptor.get !== undefined) { - ArrayPrototypePush( - entries, - `${maybeQuoteString(key, inspectOptions)}: [Getter]`, - ); - } else { - ArrayPrototypePush( - entries, - `${maybeQuoteString(key, inspectOptions)}: ${ - inspectValueWithQuotes(value[key], inspectOptions) - }`, - ); - } - } - } - - for (let i = 0; i < symbolKeys.length; ++i) { - const key = symbolKeys[i]; - if ( - !inspectOptions.showHidden && - !propertyIsEnumerable(value, key) - ) { - continue; - } - - if (inspectOptions.getters) { - let propertyValue; - let error; - try { - propertyValue = value[key]; - } catch (error_) { - error = error_; - } - const inspectedValue = error == null - ? inspectValueWithQuotes(propertyValue, inspectOptions) - : red(`Thrown ${error.name}: ${error.message}`); - ArrayPrototypePush( - entries, - `[${maybeQuoteSymbol(key, inspectOptions)}]: ${inspectedValue}`, - ); - } else { - const descriptor = ObjectGetOwnPropertyDescriptor(value, key); - if (descriptor.get !== undefined && descriptor.set !== undefined) { - ArrayPrototypePush( - entries, - `[${maybeQuoteSymbol(key, inspectOptions)}]: [Getter/Setter]`, - ); - } else if (descriptor.get !== undefined) { - ArrayPrototypePush( - entries, - `[${maybeQuoteSymbol(key, inspectOptions)}]: [Getter]`, - ); - } else { - ArrayPrototypePush( - entries, - `[${maybeQuoteSymbol(key, inspectOptions)}]: ${ - inspectValueWithQuotes(value[key], inspectOptions) - }`, - ); - } - } - } - - inspectOptions.indentLevel--; - - // Making sure color codes are ignored when calculating the total length - const entriesText = colors.stripColor(ArrayPrototypeJoin(entries, "")); - const totalLength = entries.length + inspectOptions.indentLevel + - entriesText.length; - - if (entries.length === 0) { - baseString = "{}"; - } else if ( - totalLength > LINE_BREAKING_LENGTH || - !inspectOptions.compact || - StringPrototypeIncludes(entriesText, "\n") - ) { - const entryIndent = StringPrototypeRepeat( - DEFAULT_INDENT, - inspectOptions.indentLevel + 1, - ); - const closingIndent = StringPrototypeRepeat( - DEFAULT_INDENT, - inspectOptions.indentLevel, - ); - baseString = `{\n${entryIndent}${ - ArrayPrototypeJoin(entries, `,\n${entryIndent}`) - }${inspectOptions.trailingComma ? "," : ""}\n${closingIndent}}`; - } else { - baseString = `{ ${ArrayPrototypeJoin(entries, ", ")} }`; - } - - if (shouldShowDisplayName) { - baseString = `${displayName} ${baseString}`; - } - - let refIndex = ""; - if (circular !== undefined) { - const index = MapPrototypeGet(circular, value); - if (index !== undefined) { - refIndex = cyan(` `); - } - } - - return [baseString, refIndex]; -} - -function inspectObject(value, inspectOptions, proxyDetails) { - if ( - ReflectHas(value, customInspect) && - typeof value[customInspect] === "function" - ) { - return String(value[customInspect](inspect, inspectOptions)); - } - // This non-unique symbol is used to support op_crates, ie. - // in extensions/web we don't want to depend on public - // Symbol.for("Deno.customInspect") symbol defined in the public API. - // Internal only, shouldn't be used by users. - const privateCustomInspect = SymbolFor("Deno.privateCustomInspect"); - if ( - ReflectHas(value, privateCustomInspect) && - typeof value[privateCustomInspect] === "function" - ) { - // TODO(nayeemrmn): `inspect` is passed as an argument because custom - // inspect implementations in `extensions` need it, but may not have access - // to the `Deno` namespace in web workers. Remove when the `Deno` - // namespace is always enabled. - return String( - value[privateCustomInspect](inspect, inspectOptions), - ); - } - if (ObjectPrototypeIsPrototypeOf(ErrorPrototype, value)) { - return inspectError(value, maybeColor(colors.cyan, inspectOptions)); - } else if (ArrayIsArray(value)) { - return inspectArray(value, inspectOptions); - } else if (ObjectPrototypeIsPrototypeOf(NumberPrototype, value)) { - return inspectNumberObject(value, inspectOptions); - } else if (ObjectPrototypeIsPrototypeOf(BigIntPrototype, value)) { - return inspectBigIntObject(value, inspectOptions); - } else if (ObjectPrototypeIsPrototypeOf(BooleanPrototype, value)) { - return inspectBooleanObject(value, inspectOptions); - } else if (ObjectPrototypeIsPrototypeOf(StringPrototype, value)) { - return inspectStringObject(value, inspectOptions); - } else if (ObjectPrototypeIsPrototypeOf(SymbolPrototype, value)) { - return inspectSymbolObject(value, inspectOptions); - } else if (ObjectPrototypeIsPrototypeOf(PromisePrototype, value)) { - return inspectPromise(value, inspectOptions); - } else if (ObjectPrototypeIsPrototypeOf(RegExpPrototype, value)) { - return inspectRegExp(value, inspectOptions); - } else if (ObjectPrototypeIsPrototypeOf(DatePrototype, value)) { - return inspectDate( - proxyDetails ? proxyDetails[0] : value, - inspectOptions, - ); - } else if (ObjectPrototypeIsPrototypeOf(SetPrototype, value)) { - return inspectSet( - proxyDetails ? proxyDetails[0] : value, - inspectOptions, - ); - } else if (ObjectPrototypeIsPrototypeOf(MapPrototype, value)) { - return inspectMap( - proxyDetails ? proxyDetails[0] : value, - inspectOptions, - ); - } else if (ObjectPrototypeIsPrototypeOf(WeakSetPrototype, value)) { - return inspectWeakSet(inspectOptions); - } else if (ObjectPrototypeIsPrototypeOf(WeakMapPrototype, value)) { - return inspectWeakMap(inspectOptions); - } else if (isTypedArray(value)) { - return inspectTypedArray( - ObjectGetPrototypeOf(value).constructor.name, - value, - inspectOptions, - ); - } else { - // Otherwise, default object formatting - let { 0: insp, 1: refIndex } = inspectRawObject(value, inspectOptions); - insp = refIndex + insp; - return insp; + return formatValue(ctx, value, 0); } } @@ -1890,10 +3017,21 @@ function cssToAnsi(css, prevCss = null) { } function inspectArgs(args, inspectOptions = {}) { - circular = undefined; + const ctx = { + ...getDefaultInspectOptions(), + ...inspectOptions, + }; + if (inspectOptions.iterableLimit !== undefined) { + ctx.maxArrayLength = inspectOptions.iterableLimit; + } + if (inspectOptions.strAbbreviateSize !== undefined) { + ctx.maxStringLength = inspectOptions.strAbbreviateSize; + } + if (ctx.colors) ctx.stylize = createStylizeWithColor(styles, colors); + if (ctx.maxArrayLength === null) ctx.maxArrayLength = Infinity; + if (ctx.maxStringLength === null) ctx.maxStringLength = Infinity; - const noColor = colors.getNoColor(); - const rInspectOptions = { ...DEFAULT_INSPECT_OPTIONS, ...inspectOptions }; + const noColor = colors_.getNoColor(); const first = args[0]; let a = 0; let string = ""; @@ -1933,7 +3071,7 @@ function inspectArgs(args, inspectOptions = {}) { } } else if (ArrayPrototypeIncludes(["O", "o"], char)) { // Format as an object. - formattedArg = inspectValue(args[a++], rInspectOptions); + formattedArg = formatValue(ctx, args[a++], 0); } else if (char == "c") { const value = args[a++]; if (!noColor) { @@ -1974,14 +3112,14 @@ function inspectArgs(args, inspectOptions = {}) { string += args[a]; } else { // Use default maximum depth for null or undefined arguments. - string += inspectValue(args[a], rInspectOptions); + string += formatValue(ctx, args[a], 0); } } - if (rInspectOptions.indentLevel > 0) { + if (ctx.indentLevel > 0) { const groupIndent = StringPrototypeRepeat( DEFAULT_INDENT, - rInspectOptions.indentLevel, + ctx.indentLevel, ); string = groupIndent + StringPrototypeReplaceAll(string, "\n", `\n${groupIndent}`); @@ -1990,14 +3128,29 @@ function inspectArgs(args, inspectOptions = {}) { return string; } +function createStylizeWithColor(styles, colors) { + return function stylizeWithColor(str, styleType) { + const style = styles[styleType]; + if (style !== undefined) { + const color = colors[style]; + if (color !== undefined) { + return `\u001b[${color[0]}m${str}\u001b[${color[1]}m`; + } + } + return str; + }; +} + const countMap = new SafeMap(); const timerMap = new SafeMap(); const isConsoleInstance = Symbol("isConsoleInstance"); function getConsoleInspectOptions() { + const color = !colors_.getNoColor(); return { - ...DEFAULT_INSPECT_OPTIONS, - colors: !colors.getNoColor(), + ...getDefaultInspectOptions(), + colors: color, + stylize: color ? createStylizeWithColor(styles, colors) : stylizeNoColor, }; } @@ -2146,8 +3299,9 @@ class Console { const stringifyValue = (value) => inspectValueWithQuotes(value, { - ...DEFAULT_INSPECT_OPTIONS, + ...getDefaultInspectOptions(), depth: 1, + compact: true, }); const toTable = (header, body) => this.log(cliTable(header, body)); @@ -2318,18 +3472,34 @@ function inspect( value, inspectOptions = {}, ) { - circular = undefined; - return inspectValue(value, { - ...DEFAULT_INSPECT_OPTIONS, + // Default options + const ctx = { + ...getDefaultInspectOptions(), ...inspectOptions, - }); + }; + if (inspectOptions.iterableLimit !== undefined) { + ctx.maxArrayLength = inspectOptions.iterableLimit; + } + if (inspectOptions.strAbbreviateSize !== undefined) { + ctx.maxStringLength = inspectOptions.strAbbreviateSize; + } + + if (ctx.colors) ctx.stylize = createStylizeWithColor(styles, colors); + if (ctx.maxArrayLength === null) ctx.maxArrayLength = Infinity; + if (ctx.maxStringLength === null) ctx.maxStringLength = Infinity; + return formatValue(ctx, value, 0); } /** Creates a proxy that represents a subset of the properties * of the original object optionally without evaluating the properties * in order to get the values. */ function createFilteredInspectProxy({ object, keys, evaluate }) { - return new Proxy({}, { + const obj = class {}; + if (object.constructor?.name) { + ObjectDefineProperty(obj, "name", { value: object.constructor.name }); + } + + return new Proxy(new obj(), { get(_target, key) { if (key === SymbolToStringTag) { return object.constructor?.name; @@ -2417,12 +3587,19 @@ internals.parseCss = parseCss; internals.parseCssColor = parseCssColor; export { + colors, Console, createFilteredInspectProxy, + createStylizeWithColor, CSI, customInspect, + formatBigInt, + formatNumber, + formatValue, + getDefaultInspectOptions, inspect, inspectArgs, quoteString, + styles, wrapConsole, }; diff --git a/ext/node/polyfills/internal/util/inspect.mjs b/ext/node/polyfills/internal/util/inspect.mjs index d8409f1988..cdaa3db816 100644 --- a/ext/node/polyfills/internal/util/inspect.mjs +++ b/ext/node/polyfills/internal/util/inspect.mjs @@ -20,215 +20,18 @@ // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. -import * as types from "ext:deno_node/internal/util/types.ts"; import { validateObject, validateString } from "ext:deno_node/internal/validators.mjs"; import { codes } from "ext:deno_node/internal/error_codes.ts"; +import { createStylizeWithColor, formatValue, formatNumber, formatBigInt, styles, colors } from "ext:deno_console/02_console.js"; -import { - ALL_PROPERTIES, - getOwnNonIndexProperties, - ONLY_ENUMERABLE, -} from "ext:deno_node/internal_binding/util.ts"; -const kObjectType = 0; -const kArrayType = 1; -const kArrayExtrasType = 2; - -const kMinLineLength = 16; - -// Constants to map the iterator state. -const kWeak = 0; -const kIterator = 1; -const kMapEntries = 2; - -const kPending = 0; -const kRejected = 2; - -// Escaped control characters (plus the single quote and the backslash). Use -// empty strings to fill up unused entries. -// deno-fmt-ignore -const meta = [ - '\\x00', '\\x01', '\\x02', '\\x03', '\\x04', '\\x05', '\\x06', '\\x07', // x07 - '\\b', '\\t', '\\n', '\\x0B', '\\f', '\\r', '\\x0E', '\\x0F', // x0F - '\\x10', '\\x11', '\\x12', '\\x13', '\\x14', '\\x15', '\\x16', '\\x17', // x17 - '\\x18', '\\x19', '\\x1A', '\\x1B', '\\x1C', '\\x1D', '\\x1E', '\\x1F', // x1F - '', '', '', '', '', '', '', "\\'", '', '', '', '', '', '', '', '', // x2F - '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', // x3F - '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', // x4F - '', '', '', '', '', '', '', '', '', '', '', '', '\\\\', '', '', '', // x5F - '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', // x6F - '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '\\x7F', // x7F - '\\x80', '\\x81', '\\x82', '\\x83', '\\x84', '\\x85', '\\x86', '\\x87', // x87 - '\\x88', '\\x89', '\\x8A', '\\x8B', '\\x8C', '\\x8D', '\\x8E', '\\x8F', // x8F - '\\x90', '\\x91', '\\x92', '\\x93', '\\x94', '\\x95', '\\x96', '\\x97', // x97 - '\\x98', '\\x99', '\\x9A', '\\x9B', '\\x9C', '\\x9D', '\\x9E', '\\x9F', // x9F -]; - -// https://tc39.es/ecma262/#sec-IsHTMLDDA-internal-slot -const isUndetectableObject = (v) => typeof v === "undefined" && v !== undefined; - -// deno-lint-ignore no-control-regex -const strEscapeSequencesRegExp = /[\x00-\x1f\x27\x5c\x7f-\x9f]/; -// deno-lint-ignore no-control-regex -const strEscapeSequencesReplacer = /[\x00-\x1f\x27\x5c\x7f-\x9f]/g; -// deno-lint-ignore no-control-regex -const strEscapeSequencesRegExpSingle = /[\x00-\x1f\x5c\x7f-\x9f]/; -// deno-lint-ignore no-control-regex -const strEscapeSequencesReplacerSingle = /[\x00-\x1f\x5c\x7f-\x9f]/g; - -const keyStrRegExp = /^[a-zA-Z_][a-zA-Z_0-9]*$/; -const numberRegExp = /^(0|[1-9][0-9]*)$/; -const nodeModulesRegExp = /[/\\]node_modules[/\\](.+?)(?=[/\\])/g; - -const classRegExp = /^(\s+[^(]*?)\s*{/; -// eslint-disable-next-line node-core/no-unescaped-regexp-dot -const stripCommentsRegExp = /(\/\/.*?\n)|(\/\*(.|\n)*?\*\/)/g; - -const inspectDefaultOptions = { - showHidden: false, - depth: 2, - colors: false, - customInspect: true, - showProxy: false, - maxArrayLength: 100, - maxStringLength: 10000, - breakLength: 80, - compact: 3, - sorted: false, - getters: false, -}; - -function getUserOptions(ctx, isCrossContext) { - const ret = { - stylize: ctx.stylize, - showHidden: ctx.showHidden, - depth: ctx.depth, - colors: ctx.colors, - customInspect: ctx.customInspect, - showProxy: ctx.showProxy, - maxArrayLength: ctx.maxArrayLength, - maxStringLength: ctx.maxStringLength, - breakLength: ctx.breakLength, - compact: ctx.compact, - sorted: ctx.sorted, - getters: ctx.getters, - ...ctx.userOptions, - }; - - // Typically, the target value will be an instance of `Object`. If that is - // *not* the case, the object may come from another vm.Context, and we want - // to avoid passing it objects from this Context in that case, so we remove - // the prototype from the returned object itself + the `stylize()` function, - // and remove all other non-primitives, including non-primitive user options. - if (isCrossContext) { - Object.setPrototypeOf(ret, null); - for (const key of Object.keys(ret)) { - if ( - (typeof ret[key] === "object" || typeof ret[key] === "function") && - ret[key] !== null - ) { - delete ret[key]; - } - } - ret.stylize = Object.setPrototypeOf((value, flavour) => { - let stylized; - try { - stylized = `${ctx.stylize(value, flavour)}`; - } catch { - // noop - } - - if (typeof stylized !== "string") return value; - // `stylized` is a string as it should be, which is safe to pass along. - return stylized; - }, null); - } - - return ret; -} - -/** - * Echos the value of any input. Tries to print the value out - * in the best way possible given the different types. - */ -/* Legacy: value, showHidden, depth, colors */ -export function inspect(value, opts) { - // Default options - const ctx = { - budget: {}, - indentationLvl: 0, - seen: [], - currentDepth: 0, - stylize: stylizeNoColor, - showHidden: inspectDefaultOptions.showHidden, - depth: inspectDefaultOptions.depth, - colors: inspectDefaultOptions.colors, - customInspect: inspectDefaultOptions.customInspect, - showProxy: inspectDefaultOptions.showProxy, - maxArrayLength: inspectDefaultOptions.maxArrayLength, - maxStringLength: inspectDefaultOptions.maxStringLength, - breakLength: inspectDefaultOptions.breakLength, - compact: inspectDefaultOptions.compact, - sorted: inspectDefaultOptions.sorted, - getters: inspectDefaultOptions.getters, - }; - if (arguments.length > 1) { - // Legacy... - if (arguments.length > 2) { - if (arguments[2] !== undefined) { - ctx.depth = arguments[2]; - } - if (arguments.length > 3 && arguments[3] !== undefined) { - ctx.colors = arguments[3]; - } - } - // Set user-specified options - if (typeof opts === "boolean") { - ctx.showHidden = opts; - } else if (opts) { - const optKeys = Object.keys(opts); - for (let i = 0; i < optKeys.length; ++i) { - const key = optKeys[i]; - // TODO(BridgeAR): Find a solution what to do about stylize. Either make - // this function public or add a new API with a similar or better - // functionality. - if ( - // deno-lint-ignore no-prototype-builtins - inspectDefaultOptions.hasOwnProperty(key) || - key === "stylize" - ) { - ctx[key] = opts[key]; - } else if (ctx.userOptions === undefined) { - // This is required to pass through the actual user input. - ctx.userOptions = opts; - } - } - } - } - if (ctx.colors) ctx.stylize = stylizeWithColor; - if (ctx.maxArrayLength === null) ctx.maxArrayLength = Infinity; - if (ctx.maxStringLength === null) ctx.maxStringLength = Infinity; - return formatValue(ctx, value, 0); -} -const customInspectSymbol = Symbol.for("nodejs.util.inspect.custom"); -inspect.custom = customInspectSymbol; - -Object.defineProperty(inspect, "defaultOptions", { - get() { - return inspectDefaultOptions; - }, - set(options) { - validateObject(options, "options"); - return Object.assign(inspectDefaultOptions, options); - }, -}); // Set Graphics Rendition https://en.wikipedia.org/wiki/ANSI_escape_code#graphics // Each color consists of an array with the color code as first entry and the // reset code as second entry. const defaultFG = 39; const defaultBG = 49; -inspect.colors = Object.assign(Object.create(null), { +inspect.colors = { reset: [0, 0], bold: [1, 22], dim: [2, 22], // Alias: faint @@ -274,7 +77,7 @@ inspect.colors = Object.assign(Object.create(null), { bgMagentaBright: [105, defaultBG], bgCyanBright: [106, defaultBG], bgWhiteBright: [107, defaultBG], -}); +}; function defineColorAlias(target, alias) { Object.defineProperty(inspect.colors, alias, { @@ -289,1627 +92,145 @@ function defineColorAlias(target, alias) { }); } -defineColorAlias("gray", "grey"); -defineColorAlias("gray", "blackBright"); -defineColorAlias("bgGray", "bgGrey"); -defineColorAlias("bgGray", "bgBlackBright"); -defineColorAlias("dim", "faint"); -defineColorAlias("strikethrough", "crossedout"); -defineColorAlias("strikethrough", "strikeThrough"); -defineColorAlias("strikethrough", "crossedOut"); -defineColorAlias("hidden", "conceal"); -defineColorAlias("inverse", "swapColors"); -defineColorAlias("inverse", "swapcolors"); -defineColorAlias("doubleunderline", "doubleUnderline"); +defineColorAlias('gray', 'grey'); +defineColorAlias('gray', 'blackBright'); +defineColorAlias('bgGray', 'bgGrey'); +defineColorAlias('bgGray', 'bgBlackBright'); +defineColorAlias('dim', 'faint'); +defineColorAlias('strikethrough', 'crossedout'); +defineColorAlias('strikethrough', 'strikeThrough'); +defineColorAlias('strikethrough', 'crossedOut'); +defineColorAlias('hidden', 'conceal'); +defineColorAlias('inverse', 'swapColors'); +defineColorAlias('inverse', 'swapcolors'); +defineColorAlias('doubleunderline', 'doubleUnderline'); // TODO(BridgeAR): Add function style support for more complex styles. // Don't use 'blue' not visible on cmd.exe inspect.styles = Object.assign(Object.create(null), { - special: "cyan", - number: "yellow", - bigint: "yellow", - boolean: "yellow", - undefined: "grey", - null: "bold", - string: "green", - symbol: "green", - date: "magenta", + special: 'cyan', + number: 'yellow', + bigint: 'yellow', + boolean: 'yellow', + undefined: 'grey', + null: 'bold', + string: 'green', + symbol: 'green', + date: 'magenta', // "name": intentionally not styling // TODO(BridgeAR): Highlight regular expressions properly. - regexp: "red", - module: "underline", + regexp: 'red', + module: 'underline', }); -function addQuotes(str, quotes) { - if (quotes === -1) { - return `"${str}"`; - } - if (quotes === -2) { - return `\`${str}\``; - } - return `'${str}'`; -} -// TODO(wafuwafu13): Figure out -const escapeFn = (str) => meta[str.charCodeAt(0)]; +const inspectDefaultOptions = { + indentationLvl: 0, + currentDepth: 0, + stylize: stylizeNoColor, -// Escape control characters, single quotes and the backslash. -// This is similar to JSON stringify escaping. -function strEscape(str) { - let escapeTest = strEscapeSequencesRegExp; - let escapeReplace = strEscapeSequencesReplacer; - let singleQuote = 39; + showHidden: false, + depth: 2, + colors: false, + showProxy: false, + breakLength: 80, + compact: 3, + sorted: false, + getters: false, - // Check for double quotes. If not present, do not escape single quotes and - // instead wrap the text in double quotes. If double quotes exist, check for - // backticks. If they do not exist, use those as fallback instead of the - // double quotes. - if (str.includes("'")) { - // This invalidates the charCode and therefore can not be matched for - // anymore. - if (!str.includes('"')) { - singleQuote = -1; - } else if ( - !str.includes("`") && - !str.includes("${") - ) { - singleQuote = -2; - } - if (singleQuote !== 39) { - escapeTest = strEscapeSequencesRegExpSingle; - escapeReplace = strEscapeSequencesReplacerSingle; - } - } + // node only + maxArrayLength: 100, + maxStringLength: 10000, // deno: strAbbreviateSize: 100 + customInspect: true, - // Some magic numbers that worked out fine while benchmarking with v8 6.0 - if (str.length < 5000 && !escapeTest.test(str)) { - return addQuotes(str, singleQuote); - } - if (str.length > 100) { - str = str.replace(escapeReplace, escapeFn); - return addQuotes(str, singleQuote); - } + // deno only + /** You can override the quotes preference in inspectString. + * Used by util.inspect() */ + // TODO(kt3k): Consider using symbol as a key to hide this from the public + // API. + quotes: ["'", '"', "`"], + iterableLimit: Infinity, // similar to node's maxArrayLength, but doesn't only apply to arrays + trailingComma: false, - let result = ""; - let last = 0; - const lastIndex = str.length; - for (let i = 0; i < lastIndex; i++) { - const point = str.charCodeAt(i); - if ( - point === singleQuote || - point === 92 || - point < 32 || - (point > 126 && point < 160) - ) { - if (last === i) { - result += meta[point]; - } else { - result += `${str.slice(last, i)}${meta[point]}`; + inspect, + + // TODO(@crowlKats): merge into indentationLvl + indentLevel: 0, +}; + +/** + * Echos the value of any input. Tries to print the value out + * in the best way possible given the different types. + */ +/* Legacy: value, showHidden, depth, colors */ +export function inspect(value, opts) { + // Default options + const ctx = { + budget: {}, + seen: [], + ...inspectDefaultOptions, + }; + if (arguments.length > 1) { + // Legacy... + if (arguments.length > 2) { + if (arguments[2] !== undefined) { + ctx.depth = arguments[2]; + } + if (arguments.length > 3 && arguments[3] !== undefined) { + ctx.colors = arguments[3]; + } + } + // Set user-specified options + if (typeof opts === "boolean") { + ctx.showHidden = opts; + } else if (opts) { + const optKeys = Object.keys(opts); + for (let i = 0; i < optKeys.length; ++i) { + const key = optKeys[i]; + // TODO(BridgeAR): Find a solution what to do about stylize. Either make + // this function public or add a new API with a similar or better + // functionality. + if ( + // deno-lint-ignore no-prototype-builtins + inspectDefaultOptions.hasOwnProperty(key) || + key === "stylize" + ) { + ctx[key] = opts[key]; + } else if (ctx.userOptions === undefined) { + // This is required to pass through the actual user input. + ctx.userOptions = opts; + } } - last = i + 1; } } - - if (last !== lastIndex) { - result += str.slice(last); - } - return addQuotes(result, singleQuote); + if (ctx.colors) ctx.stylize = createStylizeWithColor(inspect.styles, inspect.colors); + if (ctx.maxArrayLength === null) ctx.maxArrayLength = Infinity; + if (ctx.maxStringLength === null) ctx.maxStringLength = Infinity; + return formatValue(ctx, value, 0); } +const customInspectSymbol = Symbol.for("nodejs.util.inspect.custom"); +inspect.custom = customInspectSymbol; -function stylizeWithColor(str, styleType) { - const style = inspect.styles[styleType]; - if (style !== undefined) { - const color = inspect.colors[style]; - if (color !== undefined) { - return `\u001b[${color[0]}m${str}\u001b[${color[1]}m`; - } - } - return str; -} +Object.defineProperty(inspect, "defaultOptions", { + get() { + return inspectDefaultOptions; + }, + set(options) { + validateObject(options, "options"); + return Object.assign(inspectDefaultOptions, options); + }, +}); function stylizeNoColor(str) { return str; } -// Note: using `formatValue` directly requires the indentation level to be -// corrected by setting `ctx.indentationLvL += diff` and then to decrease the -// value afterwards again. -function formatValue( - ctx, - value, - recurseTimes, - typedArray, -) { - // Primitive types cannot have properties. - if ( - typeof value !== "object" && - typeof value !== "function" && - !isUndetectableObject(value) - ) { - return formatPrimitive(ctx.stylize, value, ctx); - } - if (value === null) { - return ctx.stylize("null", "null"); - } - - // Memorize the context for custom inspection on proxies. - const context = value; - // Always check for proxies to prevent side effects and to prevent triggering - // any proxy handlers. - // TODO(wafuwafu13): Set Proxy - const proxy = undefined; - // const proxy = getProxyDetails(value, !!ctx.showProxy); - // if (proxy !== undefined) { - // if (ctx.showProxy) { - // return formatProxy(ctx, proxy, recurseTimes); - // } - // value = proxy; - // } - - // Provide a hook for user-specified inspect functions. - // Check that value is an object with an inspect function on it. - if (ctx.customInspect) { - const maybeCustom = value[customInspectSymbol]; - if ( - typeof maybeCustom === "function" && - // Filter out the util module, its inspect function is special. - maybeCustom !== inspect && - // Also filter out any prototype objects using the circular check. - !(value.constructor && value.constructor.prototype === value) - ) { - // This makes sure the recurseTimes are reported as before while using - // a counter internally. - const depth = ctx.depth === null ? null : ctx.depth - recurseTimes; - const isCrossContext = proxy !== undefined || - !(context instanceof Object); - const ret = maybeCustom.call( - context, - depth, - getUserOptions(ctx, isCrossContext), - ); - // If the custom inspection method returned `this`, don't go into - // infinite recursion. - if (ret !== context) { - if (typeof ret !== "string") { - return formatValue(ctx, ret, recurseTimes); - } - return ret.replace(/\n/g, `\n${" ".repeat(ctx.indentationLvl)}`); - } - } - } - - // Using an array here is actually better for the average case than using - // a Set. `seen` will only check for the depth and will never grow too large. - if (ctx.seen.includes(value)) { - let index = 1; - if (ctx.circular === undefined) { - ctx.circular = new Map(); - ctx.circular.set(value, index); - } else { - index = ctx.circular.get(value); - if (index === undefined) { - index = ctx.circular.size + 1; - ctx.circular.set(value, index); - } - } - return ctx.stylize(`[Circular *${index}]`, "special"); - } - - return formatRaw(ctx, value, recurseTimes, typedArray); -} - -function formatRaw(ctx, value, recurseTimes, typedArray) { - let keys; - let protoProps; - if (ctx.showHidden && (recurseTimes <= ctx.depth || ctx.depth === null)) { - protoProps = []; - } - - const constructor = getConstructorName(value, ctx, recurseTimes, protoProps); - // Reset the variable to check for this later on. - if (protoProps !== undefined && protoProps.length === 0) { - protoProps = undefined; - } - - let tag = value[Symbol.toStringTag]; - // Only list the tag in case it's non-enumerable / not an own property. - // Otherwise we'd print this twice. - if ( - typeof tag !== "string" - // TODO(wafuwafu13): Implement - // (tag !== "" && - // (ctx.showHidden - // ? Object.prototype.hasOwnProperty - // : Object.prototype.propertyIsEnumerable)( - // value, - // Symbol.toStringTag, - // )) - ) { - tag = ""; - } - let base = ""; - let formatter = getEmptyFormatArray; - let braces; - let noIterator = true; - let i = 0; - const filter = ctx.showHidden ? ALL_PROPERTIES : ONLY_ENUMERABLE; - - let extrasType = kObjectType; - - // Iterators and the rest are split to reduce checks. - // We have to check all values in case the constructor is set to null. - // Otherwise it would not possible to identify all types properly. - if (value[Symbol.iterator] || constructor === null) { - noIterator = false; - if (Array.isArray(value)) { - // Only set the constructor for non ordinary ("Array [...]") arrays. - const prefix = (constructor !== "Array" || tag !== "") - ? getPrefix(constructor, tag, "Array", `(${value.length})`) - : ""; - keys = getOwnNonIndexProperties(value, filter); - braces = [`${prefix}[`, "]"]; - if (value.length === 0 && keys.length === 0 && protoProps === undefined) { - return `${braces[0]}]`; - } - extrasType = kArrayExtrasType; - formatter = formatArray; - } else if (types.isSet(value)) { - const size = value.size; - const prefix = getPrefix(constructor, tag, "Set", `(${size})`); - keys = getKeys(value, ctx.showHidden); - formatter = constructor !== null - ? formatSet.bind(null, value) - : formatSet.bind(null, value.values()); - if (size === 0 && keys.length === 0 && protoProps === undefined) { - return `${prefix}{}`; - } - braces = [`${prefix}{`, "}"]; - } else if (types.isMap(value)) { - const size = value.size; - const prefix = getPrefix(constructor, tag, "Map", `(${size})`); - keys = getKeys(value, ctx.showHidden); - formatter = constructor !== null - ? formatMap.bind(null, value) - : formatMap.bind(null, value.entries()); - if (size === 0 && keys.length === 0 && protoProps === undefined) { - return `${prefix}{}`; - } - braces = [`${prefix}{`, "}"]; - } else if (types.isTypedArray(value)) { - keys = getOwnNonIndexProperties(value, filter); - const bound = value; - const fallback = ""; - if (constructor === null) { - // TODO(wafuwafu13): Implement - // fallback = TypedArrayPrototypeGetSymbolToStringTag(value); - // // Reconstruct the array information. - // bound = new primordials[fallback](value); - } - const size = value.length; - const prefix = getPrefix(constructor, tag, fallback, `(${size})`); - braces = [`${prefix}[`, "]"]; - if (value.length === 0 && keys.length === 0 && !ctx.showHidden) { - return `${braces[0]}]`; - } - // Special handle the value. The original value is required below. The - // bound function is required to reconstruct missing information. - (formatter) = formatTypedArray.bind(null, bound, size); - extrasType = kArrayExtrasType; - } else if (types.isMapIterator(value)) { - keys = getKeys(value, ctx.showHidden); - braces = getIteratorBraces("Map", tag); - // Add braces to the formatter parameters. - (formatter) = formatIterator.bind(null, braces); - } else if (types.isSetIterator(value)) { - keys = getKeys(value, ctx.showHidden); - braces = getIteratorBraces("Set", tag); - // Add braces to the formatter parameters. - (formatter) = formatIterator.bind(null, braces); - } else { - noIterator = true; - } - } - if (noIterator) { - keys = getKeys(value, ctx.showHidden); - braces = ["{", "}"]; - if (constructor === "Object") { - if (types.isArgumentsObject(value)) { - braces[0] = "[Arguments] {"; - } else if (tag !== "") { - braces[0] = `${getPrefix(constructor, tag, "Object")}{`; - } - if (keys.length === 0 && protoProps === undefined) { - return `${braces[0]}}`; - } - } else if (typeof value === "function") { - base = getFunctionBase(value, constructor, tag); - if (keys.length === 0 && protoProps === undefined) { - return ctx.stylize(base, "special"); - } - } else if (types.isRegExp(value)) { - // Make RegExps say that they are RegExps - base = RegExp(constructor !== null ? value : new RegExp(value)) - .toString(); - const prefix = getPrefix(constructor, tag, "RegExp"); - if (prefix !== "RegExp ") { - base = `${prefix}${base}`; - } - if ( - (keys.length === 0 && protoProps === undefined) || - (recurseTimes > ctx.depth && ctx.depth !== null) - ) { - return ctx.stylize(base, "regexp"); - } - } else if (types.isDate(value)) { - // Make dates with properties first say the date - base = Number.isNaN(value.getTime()) - ? value.toString() - : value.toISOString(); - const prefix = getPrefix(constructor, tag, "Date"); - if (prefix !== "Date ") { - base = `${prefix}${base}`; - } - if (keys.length === 0 && protoProps === undefined) { - return ctx.stylize(base, "date"); - } - } else if (value instanceof Error) { - base = formatError(value, constructor, tag, ctx, keys); - if (keys.length === 0 && protoProps === undefined) { - return base; - } - } else if (types.isAnyArrayBuffer(value)) { - // Fast path for ArrayBuffer and SharedArrayBuffer. - // Can't do the same for DataView because it has a non-primitive - // .buffer property that we need to recurse for. - const arrayType = types.isArrayBuffer(value) - ? "ArrayBuffer" - : "SharedArrayBuffer"; - const prefix = getPrefix(constructor, tag, arrayType); - if (typedArray === undefined) { - (formatter) = formatArrayBuffer; - } else if (keys.length === 0 && protoProps === undefined) { - return prefix + - `{ byteLength: ${formatNumber(ctx.stylize, value.byteLength)} }`; - } - braces[0] = `${prefix}{`; - Array.prototype.unshift.call(keys, "byteLength"); - } else if (types.isDataView(value)) { - braces[0] = `${getPrefix(constructor, tag, "DataView")}{`; - // .buffer goes last, it's not a primitive like the others. - Array.prototype.unshift.call(keys, "byteLength", "byteOffset", "buffer"); - } else if (types.isPromise(value)) { - braces[0] = `${getPrefix(constructor, tag, "Promise")}{`; - (formatter) = formatPromise; - } else if (types.isWeakSet(value)) { - braces[0] = `${getPrefix(constructor, tag, "WeakSet")}{`; - (formatter) = ctx.showHidden ? formatWeakSet : formatWeakCollection; - } else if (types.isWeakMap(value)) { - braces[0] = `${getPrefix(constructor, tag, "WeakMap")}{`; - (formatter) = ctx.showHidden ? formatWeakMap : formatWeakCollection; - } else if (types.isModuleNamespaceObject(value)) { - braces[0] = `${getPrefix(constructor, tag, "Module")}{`; - // Special handle keys for namespace objects. - (formatter) = formatNamespaceObject.bind(null, keys); - } else if (types.isBoxedPrimitive(value)) { - base = getBoxedBase(value, ctx, keys, constructor, tag); - if (keys.length === 0 && protoProps === undefined) { - return base; - } - } else { - if (keys.length === 0 && protoProps === undefined) { - // TODO(wafuwafu13): Implement - // if (types.isExternal(value)) { - // const address = getExternalValue(value).toString(16); - // return ctx.stylize(`[External: ${address}]`, 'special'); - // } - return `${getCtxStyle(value, constructor, tag)}{}`; - } - braces[0] = `${getCtxStyle(value, constructor, tag)}{`; - } - } - - if (recurseTimes > ctx.depth && ctx.depth !== null) { - let constructorName = getCtxStyle(value, constructor, tag).slice(0, -1); - if (constructor !== null) { - constructorName = `[${constructorName}]`; - } - return ctx.stylize(constructorName, "special"); - } - recurseTimes += 1; - - ctx.seen.push(value); - ctx.currentDepth = recurseTimes; - let output; - const indentationLvl = ctx.indentationLvl; - try { - output = formatter(ctx, value, recurseTimes); - for (i = 0; i < keys.length; i++) { - output.push( - formatProperty(ctx, value, recurseTimes, keys[i], extrasType), - ); - } - if (protoProps !== undefined) { - output.push(...protoProps); - } - } catch (err) { - const constructorName = getCtxStyle(value, constructor, tag).slice(0, -1); - return handleMaxCallStackSize(ctx, err, constructorName, indentationLvl); - } - if (ctx.circular !== undefined) { - const index = ctx.circular.get(value); - if (index !== undefined) { - const reference = ctx.stylize(``, "special"); - // Add reference always to the very beginning of the output. - if (ctx.compact !== true) { - base = base === "" ? reference : `${reference} ${base}`; - } else { - braces[0] = `${reference} ${braces[0]}`; - } - } - } - ctx.seen.pop(); - - if (ctx.sorted) { - const comparator = ctx.sorted === true ? undefined : ctx.sorted; - if (extrasType === kObjectType) { - output = output.sort(comparator); - } else if (keys.length > 1) { - const sorted = output.slice(output.length - keys.length).sort(comparator); - output.splice(output.length - keys.length, keys.length, ...sorted); - } - } - - const res = reduceToSingleString( - ctx, - output, - base, - braces, - extrasType, - recurseTimes, - value, - ); - const budget = ctx.budget[ctx.indentationLvl] || 0; - const newLength = budget + res.length; - ctx.budget[ctx.indentationLvl] = newLength; - // If any indentationLvl exceeds this limit, limit further inspecting to the - // minimum. Otherwise the recursive algorithm might continue inspecting the - // object even though the maximum string size (~2 ** 28 on 32 bit systems and - // ~2 ** 30 on 64 bit systems) exceeded. The actual output is not limited at - // exactly 2 ** 27 but a bit higher. This depends on the object shape. - // This limit also makes sure that huge objects don't block the event loop - // significantly. - if (newLength > 2 ** 27) { - ctx.depth = -1; - } - return res; -} - const builtInObjects = new Set( Object.getOwnPropertyNames(globalThis).filter((e) => /^[A-Z][a-zA-Z0-9]+$/.test(e) ), ); -function addPrototypeProperties( - ctx, - main, - obj, - recurseTimes, - output, -) { - let depth = 0; - let keys; - let keySet; - do { - if (depth !== 0 || main === obj) { - obj = Object.getPrototypeOf(obj); - // Stop as soon as a null prototype is encountered. - if (obj === null) { - return; - } - // Stop as soon as a built-in object type is detected. - const descriptor = Object.getOwnPropertyDescriptor(obj, "constructor"); - if ( - descriptor !== undefined && - typeof descriptor.value === "function" && - builtInObjects.has(descriptor.value.name) - ) { - return; - } - } - - if (depth === 0) { - keySet = new Set(); - } else { - Array.prototype.forEach.call(keys, (key) => keySet.add(key)); - } - // Get all own property names and symbols. - keys = Reflect.ownKeys(obj); - Array.prototype.push.call(ctx.seen, main); - for (const key of keys) { - // Ignore the `constructor` property and keys that exist on layers above. - if ( - key === "constructor" || - // deno-lint-ignore no-prototype-builtins - main.hasOwnProperty(key) || - (depth !== 0 && keySet.has(key)) - ) { - continue; - } - const desc = Object.getOwnPropertyDescriptor(obj, key); - if (typeof desc.value === "function") { - continue; - } - const value = formatProperty( - ctx, - obj, - recurseTimes, - key, - kObjectType, - desc, - main, - ); - if (ctx.colors) { - // Faint! - Array.prototype.push.call(output, `\u001b[2m${value}\u001b[22m`); - } else { - Array.prototype.push.call(output, value); - } - } - Array.prototype.pop.call(ctx.seen); - // Limit the inspection to up to three prototype layers. Using `recurseTimes` - // is not a good choice here, because it's as if the properties are declared - // on the current object from the users perspective. - } while (++depth !== 3); -} - -function getConstructorName( - obj, - ctx, - recurseTimes, - protoProps, -) { - let firstProto; - const tmp = obj; - while (obj || isUndetectableObject(obj)) { - const descriptor = Object.getOwnPropertyDescriptor(obj, "constructor"); - if ( - descriptor !== undefined && - typeof descriptor.value === "function" && - descriptor.value.name !== "" && - isInstanceof(tmp, descriptor.value) - ) { - if ( - protoProps !== undefined && - (firstProto !== obj || - !builtInObjects.has(descriptor.value.name)) - ) { - addPrototypeProperties( - ctx, - tmp, - firstProto || tmp, - recurseTimes, - protoProps, - ); - } - return descriptor.value.name; - } - - obj = Object.getPrototypeOf(obj); - if (firstProto === undefined) { - firstProto = obj; - } - } - - if (firstProto === null) { - return null; - } - - // TODO(wafuwafu13): Implement - // const res = internalGetConstructorName(tmp); - const res = undefined; - - if (recurseTimes > ctx.depth && ctx.depth !== null) { - return `${res} `; - } - - const protoConstr = getConstructorName( - firstProto, - ctx, - recurseTimes + 1, - protoProps, - ); - - if (protoConstr === null) { - return `${res} <${ - inspect(firstProto, { - ...ctx, - customInspect: false, - depth: -1, - }) - }>`; - } - - return `${res} <${protoConstr}>`; -} - -function formatPrimitive(fn, value, ctx) { - if (typeof value === "string") { - let trailer = ""; - if (value.length > ctx.maxStringLength) { - const remaining = value.length - ctx.maxStringLength; - value = value.slice(0, ctx.maxStringLength); - trailer = `... ${remaining} more character${remaining > 1 ? "s" : ""}`; - } - if ( - ctx.compact !== true && - // TODO(BridgeAR): Add unicode support. Use the readline getStringWidth - // function. - value.length > kMinLineLength && - value.length > ctx.breakLength - ctx.indentationLvl - 4 - ) { - return value - .split(/(?<=\n)/) - .map((line) => fn(strEscape(line), "string")) - .join(` +\n${" ".repeat(ctx.indentationLvl + 2)}`) + trailer; - } - return fn(strEscape(value), "string") + trailer; - } - if (typeof value === "number") { - return formatNumber(fn, value); - } - if (typeof value === "bigint") { - return formatBigInt(fn, value); - } - if (typeof value === "boolean") { - return fn(`${value}`, "boolean"); - } - if (typeof value === "undefined") { - return fn("undefined", "undefined"); - } - // es6 symbol primitive - return fn(value.toString(), "symbol"); -} - -// Return a new empty array to push in the results of the default formatter. -function getEmptyFormatArray() { - return []; -} - -function isInstanceof(object, proto) { - try { - return object instanceof proto; - } catch { - return false; - } -} - -function getPrefix(constructor, tag, fallback, size = "") { - if (constructor === null) { - if (tag !== "" && fallback !== tag) { - return `[${fallback}${size}: null prototype] [${tag}] `; - } - return `[${fallback}${size}: null prototype] `; - } - - if (tag !== "" && constructor !== tag) { - return `${constructor}${size} [${tag}] `; - } - return `${constructor}${size} `; -} - -function formatArray(ctx, value, recurseTimes) { - const valLen = value.length; - const len = Math.min(Math.max(0, ctx.maxArrayLength), valLen); - - const remaining = valLen - len; - const output = []; - for (let i = 0; i < len; i++) { - // Special handle sparse arrays. - // deno-lint-ignore no-prototype-builtins - if (!value.hasOwnProperty(i)) { - return formatSpecialArray(ctx, value, recurseTimes, len, output, i); - } - output.push(formatProperty(ctx, value, recurseTimes, i, kArrayType)); - } - if (remaining > 0) { - output.push(`... ${remaining} more item${remaining > 1 ? "s" : ""}`); - } - return output; -} - -function getCtxStyle(_value, constructor, tag) { - let fallback = ""; - if (constructor === null) { - // TODO(wafuwafu13): Implement - // fallback = internalGetConstructorName(value); - if (fallback === tag) { - fallback = "Object"; - } - } - return getPrefix(constructor, tag, fallback); -} - -// Look up the keys of the object. -function getKeys(value, showHidden) { - let keys; - const symbols = Object.getOwnPropertySymbols(value); - if (showHidden) { - keys = Object.getOwnPropertyNames(value); - if (symbols.length !== 0) { - Array.prototype.push.apply(keys, symbols); - } - } else { - // This might throw if `value` is a Module Namespace Object from an - // unevaluated module, but we don't want to perform the actual type - // check because it's expensive. - // TODO(devsnek): track https://github.com/tc39/ecma262/issues/1209 - // and modify this logic as needed. - try { - keys = Object.keys(value); - } catch (_err) { - // TODO(wafuwafu13): Implement - // assert(isNativeError(err) && err.name === 'ReferenceError' && - // isModuleNamespaceObject(value)); - keys = Object.getOwnPropertyNames(value); - } - if (symbols.length !== 0) { - // TODO(wafuwafu13): Implement - // const filter = (key: any) => - // - // Object.prototype.propertyIsEnumerable(value, key); - // Array.prototype.push.apply( - // keys, - // symbols.filter(filter), - // ); - } - } - return keys; -} - -function formatSet(value, ctx, _ignored, recurseTimes) { - const output = []; - ctx.indentationLvl += 2; - for (const v of value) { - Array.prototype.push.call(output, formatValue(ctx, v, recurseTimes)); - } - ctx.indentationLvl -= 2; - return output; -} - -function formatMap(value, ctx, _gnored, recurseTimes) { - const output = []; - ctx.indentationLvl += 2; - for (const { 0: k, 1: v } of value) { - output.push( - `${formatValue(ctx, k, recurseTimes)} => ${ - formatValue(ctx, v, recurseTimes) - }`, - ); - } - ctx.indentationLvl -= 2; - return output; -} - -function formatTypedArray( - value, - length, - ctx, - _ignored, - recurseTimes, -) { - const maxLength = Math.min(Math.max(0, ctx.maxArrayLength), length); - const remaining = value.length - maxLength; - const output = new Array(maxLength); - const elementFormatter = value.length > 0 && typeof value[0] === "number" - ? formatNumber - : formatBigInt; - for (let i = 0; i < maxLength; ++i) { - output[i] = elementFormatter(ctx.stylize, value[i]); - } - if (remaining > 0) { - output[maxLength] = `... ${remaining} more item${remaining > 1 ? "s" : ""}`; - } - if (ctx.showHidden) { - // .buffer goes last, it's not a primitive like the others. - // All besides `BYTES_PER_ELEMENT` are actually getters. - ctx.indentationLvl += 2; - for ( - const key of [ - "BYTES_PER_ELEMENT", - "length", - "byteLength", - "byteOffset", - "buffer", - ] - ) { - const str = formatValue(ctx, value[key], recurseTimes, true); - Array.prototype.push.call(output, `[${key}]: ${str}`); - } - ctx.indentationLvl -= 2; - } - return output; -} - -function getIteratorBraces(type, tag) { - if (tag !== `${type} Iterator`) { - if (tag !== "") { - tag += "] ["; - } - tag += `${type} Iterator`; - } - return [`[${tag}] {`, "}"]; -} - -function formatIterator(braces, ctx, value, recurseTimes) { - // TODO(wafuwafu13): Implement - // const { 0: entries, 1: isKeyValue } = previewEntries(value, true); - const { 0: entries, 1: isKeyValue } = value; - if (isKeyValue) { - // Mark entry iterators as such. - braces[0] = braces[0].replace(/ Iterator] {$/, " Entries] {"); - return formatMapIterInner(ctx, recurseTimes, entries, kMapEntries); - } - - return formatSetIterInner(ctx, recurseTimes, entries, kIterator); -} - -function getFunctionBase(value, constructor, tag) { - const stringified = Function.prototype.toString.call(value); - if (stringified.slice(0, 5) === "class" && stringified.endsWith("}")) { - const slice = stringified.slice(5, -1); - const bracketIndex = slice.indexOf("{"); - if ( - bracketIndex !== -1 && - (!slice.slice(0, bracketIndex).includes("(") || - // Slow path to guarantee that it's indeed a class. - classRegExp.test(slice.replace(stripCommentsRegExp))) - ) { - return getClassBase(value, constructor, tag); - } - } - let type = "Function"; - if (types.isGeneratorFunction(value)) { - type = `Generator${type}`; - } - if (types.isAsyncFunction(value)) { - type = `Async${type}`; - } - let base = `[${type}`; - if (constructor === null) { - base += " (null prototype)"; - } - if (value.name === "") { - base += " (anonymous)"; - } else { - base += `: ${value.name}`; - } - base += "]"; - if (constructor !== type && constructor !== null) { - base += ` ${constructor}`; - } - if (tag !== "" && constructor !== tag) { - base += ` [${tag}]`; - } - return base; -} - -function formatError( - err, - constructor, - tag, - ctx, - keys, -) { - const name = err.name != null ? String(err.name) : "Error"; - let len = name.length; - let stack = err.stack ? String(err.stack) : err.toString(); - - // Do not "duplicate" error properties that are already included in the output - // otherwise. - if (!ctx.showHidden && keys.length !== 0) { - for (const name of ["name", "message", "stack"]) { - const index = keys.indexOf(name); - // Only hide the property in case it's part of the original stack - if (index !== -1 && stack.includes(err[name])) { - keys.splice(index, 1); - } - } - } - - // A stack trace may contain arbitrary data. Only manipulate the output - // for "regular errors" (errors that "look normal") for now. - if ( - constructor === null || - (name.endsWith("Error") && - stack.startsWith(name) && - (stack.length === len || stack[len] === ":" || stack[len] === "\n")) - ) { - let fallback = "Error"; - if (constructor === null) { - const start = stack.match(/^([A-Z][a-z_ A-Z0-9[\]()-]+)(?::|\n {4}at)/) || - stack.match(/^([a-z_A-Z0-9-]*Error)$/); - fallback = (start && start[1]) || ""; - len = fallback.length; - fallback = fallback || "Error"; - } - const prefix = getPrefix(constructor, tag, fallback).slice(0, -1); - if (name !== prefix) { - if (prefix.includes(name)) { - if (len === 0) { - stack = `${prefix}: ${stack}`; - } else { - stack = `${prefix}${stack.slice(len)}`; - } - } else { - stack = `${prefix} [${name}]${stack.slice(len)}`; - } - } - } - // Ignore the error message if it's contained in the stack. - let pos = (err.message && stack.indexOf(err.message)) || -1; - if (pos !== -1) { - pos += err.message.length; - } - // Wrap the error in brackets in case it has no stack trace. - const stackStart = stack.indexOf("\n at", pos); - if (stackStart === -1) { - stack = `[${stack}]`; - } else if (ctx.colors) { - // Highlight userland code and node modules. - let newStack = stack.slice(0, stackStart); - const lines = stack.slice(stackStart + 1).split("\n"); - for (const line of lines) { - // const core = line.match(coreModuleRegExp); - // TODO(wafuwafu13): Implement - // if (core !== null && NativeModule.exists(core[1])) { - // newStack += `\n${ctx.stylize(line, 'undefined')}`; - // } else { - // This adds underscores to all node_modules to quickly identify them. - let nodeModule; - newStack += "\n"; - let pos = 0; - // deno-lint-ignore no-cond-assign - while (nodeModule = nodeModulesRegExp.exec(line)) { - // '/node_modules/'.length === 14 - newStack += line.slice(pos, nodeModule.index + 14); - newStack += ctx.stylize(nodeModule[1], "module"); - pos = nodeModule.index + nodeModule[0].length; - } - newStack += pos === 0 ? line : line.slice(pos); - // } - } - stack = newStack; - } - // The message and the stack have to be indented as well! - if (ctx.indentationLvl !== 0) { - const indentation = " ".repeat(ctx.indentationLvl); - stack = stack.replace(/\n/g, `\n${indentation}`); - } - return stack; -} - -let hexSlice; - -function formatArrayBuffer(ctx, value) { - let buffer; - try { - buffer = new Uint8Array(value); - } catch { - return [ctx.stylize("(detached)", "special")]; - } - // TODO(wafuwafu13): Implement - // if (hexSlice === undefined) - // hexSlice = uncurryThis(require('buffer').Buffer.prototype.hexSlice); - let str = hexSlice(buffer, 0, Math.min(ctx.maxArrayLength, buffer.length)) - .replace(/(.{2})/g, "$1 ").trim(); - - const remaining = buffer.length - ctx.maxArrayLength; - if (remaining > 0) { - str += ` ... ${remaining} more byte${remaining > 1 ? "s" : ""}`; - } - return [`${ctx.stylize("[Uint8Contents]", "special")}: <${str}>`]; -} - -function formatNumber(fn, value) { - // Format -0 as '-0'. Checking `value === -0` won't distinguish 0 from -0. - return fn(Object.is(value, -0) ? "-0" : `${value}`, "number"); -} - -function formatPromise(ctx, value, recurseTimes) { - let output; - // TODO(wafuwafu13): Implement - // const { 0: state, 1: result } = getPromiseDetails(value); - const { 0: state, 1: result } = value; - if (state === kPending) { - output = [ctx.stylize("", "special")]; - } else { - ctx.indentationLvl += 2; - const str = formatValue(ctx, result, recurseTimes); - ctx.indentationLvl -= 2; - output = [ - state === kRejected - ? `${ctx.stylize("", "special")} ${str}` - : str, - ]; - } - return output; -} - -function formatWeakCollection(ctx) { - return [ctx.stylize("", "special")]; -} - -function formatWeakSet(ctx, value, recurseTimes) { - // TODO(wafuwafu13): Implement - // const entries = previewEntries(value); - const entries = value; - return formatSetIterInner(ctx, recurseTimes, entries, kWeak); -} - -function formatWeakMap(ctx, value, recurseTimes) { - // TODO(wafuwafu13): Implement - // const entries = previewEntries(value); - const entries = value; - return formatMapIterInner(ctx, recurseTimes, entries, kWeak); -} - -function formatProperty( - ctx, - value, - recurseTimes, - key, - type, - desc, - original = value, -) { - let name, str; - let extra = " "; - desc = desc || Object.getOwnPropertyDescriptor(value, key) || - { value: value[key], enumerable: true }; - if (desc.value !== undefined) { - const diff = (ctx.compact !== true || type !== kObjectType) ? 2 : 3; - ctx.indentationLvl += diff; - str = formatValue(ctx, desc.value, recurseTimes); - if (diff === 3 && ctx.breakLength < getStringWidth(str, ctx.colors)) { - extra = `\n${" ".repeat(ctx.indentationLvl)}`; - } - ctx.indentationLvl -= diff; - } else if (desc.get !== undefined) { - const label = desc.set !== undefined ? "Getter/Setter" : "Getter"; - const s = ctx.stylize; - const sp = "special"; - if ( - ctx.getters && (ctx.getters === true || - (ctx.getters === "get" && desc.set === undefined) || - (ctx.getters === "set" && desc.set !== undefined)) - ) { - try { - const tmp = desc.get.call(original); - ctx.indentationLvl += 2; - if (tmp === null) { - str = `${s(`[${label}:`, sp)} ${s("null", "null")}${s("]", sp)}`; - } else if (typeof tmp === "object") { - str = `${s(`[${label}]`, sp)} ${formatValue(ctx, tmp, recurseTimes)}`; - } else { - const primitive = formatPrimitive(s, tmp, ctx); - str = `${s(`[${label}:`, sp)} ${primitive}${s("]", sp)}`; - } - ctx.indentationLvl -= 2; - } catch (err) { - const message = ``; - str = `${s(`[${label}:`, sp)} ${message}${s("]", sp)}`; - } - } else { - str = ctx.stylize(`[${label}]`, sp); - } - } else if (desc.set !== undefined) { - str = ctx.stylize("[Setter]", "special"); - } else { - str = ctx.stylize("undefined", "undefined"); - } - if (type === kArrayType) { - return str; - } - if (typeof key === "symbol") { - const tmp = key.toString().replace(strEscapeSequencesReplacer, escapeFn); - - name = `[${ctx.stylize(tmp, "symbol")}]`; - } else if (key === "__proto__") { - name = "['__proto__']"; - } else if (desc.enumerable === false) { - const tmp = key.replace(strEscapeSequencesReplacer, escapeFn); - - name = `[${tmp}]`; - } else if (keyStrRegExp.test(key)) { - name = ctx.stylize(key, "name"); - } else { - name = ctx.stylize(strEscape(key), "string"); - } - return `${name}:${extra}${str}`; -} - -function handleMaxCallStackSize( - _ctx, - _err, - _constructorName, - _indentationLvl, -) { - // TODO(wafuwafu13): Implement - // if (types.isStackOverflowError(err)) { - // ctx.seen.pop(); - // ctx.indentationLvl = indentationLvl; - // return ctx.stylize( - // `[${constructorName}: Inspection interrupted ` + - // 'prematurely. Maximum call stack size exceeded.]', - // 'special' - // ); - // } - // /* c8 ignore next */ - // assert.fail(err.stack); -} - -// deno-lint-ignore no-control-regex -const colorRegExp = /\u001b\[\d\d?m/g; -function removeColors(str) { - return str.replace(colorRegExp, ""); -} - -function isBelowBreakLength(ctx, output, start, base) { - // Each entry is separated by at least a comma. Thus, we start with a total - // length of at least `output.length`. In addition, some cases have a - // whitespace in-between each other that is added to the total as well. - // TODO(BridgeAR): Add unicode support. Use the readline getStringWidth - // function. Check the performance overhead and make it an opt-in in case it's - // significant. - let totalLength = output.length + start; - if (totalLength + output.length > ctx.breakLength) { - return false; - } - for (let i = 0; i < output.length; i++) { - if (ctx.colors) { - totalLength += removeColors(output[i]).length; - } else { - totalLength += output[i].length; - } - if (totalLength > ctx.breakLength) { - return false; - } - } - // Do not line up properties on the same line if `base` contains line breaks. - return base === "" || !base.includes("\n"); -} - -function formatBigInt(fn, value) { - return fn(`${value}n`, "bigint"); -} - -function formatNamespaceObject( - keys, - ctx, - value, - recurseTimes, -) { - const output = new Array(keys.length); - for (let i = 0; i < keys.length; i++) { - try { - output[i] = formatProperty( - ctx, - value, - recurseTimes, - keys[i], - kObjectType, - ); - } catch (_err) { - // TODO(wafuwfu13): Implement - // assert(isNativeError(err) && err.name === 'ReferenceError'); - // Use the existing functionality. This makes sure the indentation and - // line breaks are always correct. Otherwise it is very difficult to keep - // this aligned, even though this is a hacky way of dealing with this. - const tmp = { [keys[i]]: "" }; - output[i] = formatProperty(ctx, tmp, recurseTimes, keys[i], kObjectType); - const pos = output[i].lastIndexOf(" "); - // We have to find the last whitespace and have to replace that value as - // it will be visualized as a regular string. - output[i] = output[i].slice(0, pos + 1) + - ctx.stylize("", "special"); - } - } - // Reset the keys to an empty array. This prevents duplicated inspection. - keys.length = 0; - return output; -} - -// The array is sparse and/or has extra keys -function formatSpecialArray( - ctx, - value, - recurseTimes, - maxLength, - output, - i, -) { - const keys = Object.keys(value); - let index = i; - for (; i < keys.length && output.length < maxLength; i++) { - const key = keys[i]; - const tmp = +key; - // Arrays can only have up to 2^32 - 1 entries - if (tmp > 2 ** 32 - 2) { - break; - } - if (`${index}` !== key) { - if (!numberRegExp.test(key)) { - break; - } - const emptyItems = tmp - index; - const ending = emptyItems > 1 ? "s" : ""; - const message = `<${emptyItems} empty item${ending}>`; - output.push(ctx.stylize(message, "undefined")); - index = tmp; - if (output.length === maxLength) { - break; - } - } - output.push(formatProperty(ctx, value, recurseTimes, key, kArrayType)); - index++; - } - const remaining = value.length - index; - if (output.length !== maxLength) { - if (remaining > 0) { - const ending = remaining > 1 ? "s" : ""; - const message = `<${remaining} empty item${ending}>`; - output.push(ctx.stylize(message, "undefined")); - } - } else if (remaining > 0) { - output.push(`... ${remaining} more item${remaining > 1 ? "s" : ""}`); - } - return output; -} - -function getBoxedBase( - value, - ctx, - keys, - constructor, - tag, -) { - let type; - if (types.isNumberObject(value)) { - type = "Number"; - } else if (types.isStringObject(value)) { - type = "String"; - // For boxed Strings, we have to remove the 0-n indexed entries, - // since they just noisy up the output and are redundant - // Make boxed primitive Strings look like such - keys.splice(0, value.length); - } else if (types.isBooleanObject(value)) { - type = "Boolean"; - } else if (types.isBigIntObject(value)) { - type = "BigInt"; - } else { - type = "Symbol"; - } - let base = `[${type}`; - if (type !== constructor) { - if (constructor === null) { - base += " (null prototype)"; - } else { - base += ` (${constructor})`; - } - } - - base += `: ${formatPrimitive(stylizeNoColor, value.valueOf(), ctx)}]`; - if (tag !== "" && tag !== constructor) { - base += ` [${tag}]`; - } - if (keys.length !== 0 || ctx.stylize === stylizeNoColor) { - return base; - } - return ctx.stylize(base, type.toLowerCase()); -} - -function getClassBase(value, constructor, tag) { - // deno-lint-ignore no-prototype-builtins - const hasName = value.hasOwnProperty("name"); - const name = (hasName && value.name) || "(anonymous)"; - let base = `class ${name}`; - if (constructor !== "Function" && constructor !== null) { - base += ` [${constructor}]`; - } - if (tag !== "" && constructor !== tag) { - base += ` [${tag}]`; - } - if (constructor !== null) { - const superName = Object.getPrototypeOf(value).name; - if (superName) { - base += ` extends ${superName}`; - } - } else { - base += " extends [null prototype]"; - } - return `[${base}]`; -} - -function reduceToSingleString( - ctx, - output, - base, - braces, - extrasType, - recurseTimes, - value, -) { - if (ctx.compact !== true) { - if (typeof ctx.compact === "number" && ctx.compact >= 1) { - // Memorize the original output length. In case the output is grouped, - // prevent lining up the entries on a single line. - const entries = output.length; - // Group array elements together if the array contains at least six - // separate entries. - if (extrasType === kArrayExtrasType && entries > 6) { - output = groupArrayElements(ctx, output, value); - } - // `ctx.currentDepth` is set to the most inner depth of the currently - // inspected object part while `recurseTimes` is the actual current depth - // that is inspected. - // - // Example: - // - // const a = { first: [ 1, 2, 3 ], second: { inner: [ 1, 2, 3 ] } } - // - // The deepest depth of `a` is 2 (a.second.inner) and `a.first` has a max - // depth of 1. - // - // Consolidate all entries of the local most inner depth up to - // `ctx.compact`, as long as the properties are smaller than - // `ctx.breakLength`. - if ( - ctx.currentDepth - recurseTimes < ctx.compact && - entries === output.length - ) { - // Line up all entries on a single line in case the entries do not - // exceed `breakLength`. Add 10 as constant to start next to all other - // factors that may reduce `breakLength`. - const start = output.length + ctx.indentationLvl + - braces[0].length + base.length + 10; - if (isBelowBreakLength(ctx, output, start, base)) { - return `${base ? `${base} ` : ""}${braces[0]} ${join(output, ", ")}` + - ` ${braces[1]}`; - } - } - } - // Line up each entry on an individual line. - const indentation = `\n${" ".repeat(ctx.indentationLvl)}`; - return `${base ? `${base} ` : ""}${braces[0]}${indentation} ` + - `${join(output, `,${indentation} `)}${indentation}${braces[1]}`; - } - // Line up all entries on a single line in case the entries do not exceed - // `breakLength`. - if (isBelowBreakLength(ctx, output, 0, base)) { - return `${braces[0]}${base ? ` ${base}` : ""} ${join(output, ", ")} ` + - braces[1]; - } - const indentation = " ".repeat(ctx.indentationLvl); - // If the opening "brace" is too large, like in the case of "Set {", - // we need to force the first item to be on the next line or the - // items will not line up correctly. - const ln = base === "" && braces[0].length === 1 - ? " " - : `${base ? ` ${base}` : ""}\n${indentation} `; - // Line up each entry on an individual line. - return `${braces[0]}${ln}${join(output, `,\n${indentation} `)} ${braces[1]}`; -} - -// The built-in Array#join is slower in v8 6.0 -function join(output, separator) { - let str = ""; - if (output.length !== 0) { - const lastIndex = output.length - 1; - for (let i = 0; i < lastIndex; i++) { - // It is faster not to use a template string here - str += output[i]; - str += separator; - } - str += output[lastIndex]; - } - return str; -} - -function groupArrayElements(ctx, output, value) { - let totalLength = 0; - let maxLength = 0; - let i = 0; - let outputLength = output.length; - if (ctx.maxArrayLength < output.length) { - // This makes sure the "... n more items" part is not taken into account. - outputLength--; - } - const separatorSpace = 2; // Add 1 for the space and 1 for the separator. - const dataLen = new Array(outputLength); - // Calculate the total length of all output entries and the individual max - // entries length of all output entries. We have to remove colors first, - // otherwise the length would not be calculated properly. - for (; i < outputLength; i++) { - const len = getStringWidth(output[i], ctx.colors); - dataLen[i] = len; - totalLength += len + separatorSpace; - if (maxLength < len) { - maxLength = len; - } - } - // Add two to `maxLength` as we add a single whitespace character plus a comma - // in-between two entries. - const actualMax = maxLength + separatorSpace; - // Check if at least three entries fit next to each other and prevent grouping - // of arrays that contains entries of very different length (i.e., if a single - // entry is longer than 1/5 of all other entries combined). Otherwise the - // space in-between small entries would be enormous. - if ( - actualMax * 3 + ctx.indentationLvl < ctx.breakLength && - (totalLength / actualMax > 5 || maxLength <= 6) - ) { - const approxCharHeights = 2.5; - const averageBias = Math.sqrt(actualMax - totalLength / output.length); - const biasedMax = Math.max(actualMax - 3 - averageBias, 1); - // Dynamically check how many columns seem possible. - const columns = Math.min( - // Ideally a square should be drawn. We expect a character to be about 2.5 - // times as high as wide. This is the area formula to calculate a square - // which contains n rectangles of size `actualMax * approxCharHeights`. - // Divide that by `actualMax` to receive the correct number of columns. - // The added bias increases the columns for short entries. - Math.round( - Math.sqrt( - approxCharHeights * biasedMax * outputLength, - ) / biasedMax, - ), - // Do not exceed the breakLength. - Math.floor((ctx.breakLength - ctx.indentationLvl) / actualMax), - // Limit array grouping for small `compact` modes as the user requested - // minimal grouping. - ctx.compact * 4, - // Limit the columns to a maximum of fifteen. - 15, - ); - // Return with the original output if no grouping should happen. - if (columns <= 1) { - return output; - } - const tmp = []; - const maxLineLength = []; - for (let i = 0; i < columns; i++) { - let lineMaxLength = 0; - for (let j = i; j < output.length; j += columns) { - if (dataLen[j] > lineMaxLength) { - lineMaxLength = dataLen[j]; - } - } - lineMaxLength += separatorSpace; - maxLineLength[i] = lineMaxLength; - } - let order = String.prototype.padStart; - if (value !== undefined) { - for (let i = 0; i < output.length; i++) { - if (typeof value[i] !== "number" && typeof value[i] !== "bigint") { - order = String.prototype.padEnd; - break; - } - } - } - // Each iteration creates a single line of grouped entries. - for (let i = 0; i < outputLength; i += columns) { - // The last lines may contain less entries than columns. - const max = Math.min(i + columns, outputLength); - let str = ""; - let j = i; - for (; j < max - 1; j++) { - // Calculate extra color padding in case it's active. This has to be - // done line by line as some lines might contain more colors than - // others. - const padding = maxLineLength[j - i] + output[j].length - dataLen[j]; - str += `${output[j]}, `.padStart(padding, " "); - } - if (order === String.prototype.padStart) { - const padding = maxLineLength[j - i] + - output[j].length - - dataLen[j] - - separatorSpace; - str += output[j].padStart(padding, " "); - } else { - str += output[j]; - } - Array.prototype.push.call(tmp, str); - } - if (ctx.maxArrayLength < output.length) { - Array.prototype.push.call(tmp, output[outputLength]); - } - output = tmp; - } - return output; -} - -function formatMapIterInner( - ctx, - recurseTimes, - entries, - state, -) { - const maxArrayLength = Math.max(ctx.maxArrayLength, 0); - // Entries exist as [key1, val1, key2, val2, ...] - const len = entries.length / 2; - const remaining = len - maxArrayLength; - const maxLength = Math.min(maxArrayLength, len); - let output = new Array(maxLength); - let i = 0; - ctx.indentationLvl += 2; - if (state === kWeak) { - for (; i < maxLength; i++) { - const pos = i * 2; - output[i] = `${formatValue(ctx, entries[pos], recurseTimes)} => ${ - formatValue(ctx, entries[pos + 1], recurseTimes) - }`; - } - // Sort all entries to have a halfway reliable output (if more entries than - // retrieved ones exist, we can not reliably return the same output) if the - // output is not sorted anyway. - if (!ctx.sorted) { - output = output.sort(); - } - } else { - for (; i < maxLength; i++) { - const pos = i * 2; - const res = [ - formatValue(ctx, entries[pos], recurseTimes), - formatValue(ctx, entries[pos + 1], recurseTimes), - ]; - output[i] = reduceToSingleString( - ctx, - res, - "", - ["[", "]"], - kArrayExtrasType, - recurseTimes, - ); - } - } - ctx.indentationLvl -= 2; - if (remaining > 0) { - output.push(`... ${remaining} more item${remaining > 1 ? "s" : ""}`); - } - return output; -} - -function formatSetIterInner( - ctx, - recurseTimes, - entries, - state, -) { - const maxArrayLength = Math.max(ctx.maxArrayLength, 0); - const maxLength = Math.min(maxArrayLength, entries.length); - const output = new Array(maxLength); - ctx.indentationLvl += 2; - for (let i = 0; i < maxLength; i++) { - output[i] = formatValue(ctx, entries[i], recurseTimes); - } - ctx.indentationLvl -= 2; - if (state === kWeak && !ctx.sorted) { - // Sort all entries to have a halfway reliable output (if more entries than - // retrieved ones exist, we can not reliably return the same output) if the - // output is not sorted anyway. - output.sort(); - } - const remaining = entries.length - maxLength; - if (remaining > 0) { - Array.prototype.push.call( - output, - `... ${remaining} more item${remaining > 1 ? "s" : ""}`, - ); - } - return output; -} - // Regex used for ansi escape code splitting // Adopted from https://github.com/chalk/ansi-regex/blob/HEAD/index.js // License: MIT, authors: @sindresorhus, Qix-, arjunmehta and LitoMore diff --git a/runtime/js/99_main.js b/runtime/js/99_main.js index fa16cc1f40..b6dab121c4 100644 --- a/runtime/js/99_main.js +++ b/runtime/js/99_main.js @@ -46,6 +46,7 @@ import * as os from "ext:runtime/30_os.js"; import * as timers from "ext:deno_web/02_timers.js"; import * as colors from "ext:deno_console/01_colors.js"; import { + getDefaultInspectOptions, inspectArgs, quoteString, wrapConsole, @@ -218,7 +219,7 @@ function formatException(error) { return null; } else if (typeof error == "string") { return `Uncaught ${ - inspectArgs([quoteString(error)], { + inspectArgs([quoteString(error, getDefaultInspectOptions())], { colors: !colors.getNoColor(), }) }`; From bb1f5e4262940a966e6314f57a4267514911d262 Mon Sep 17 00:00:00 2001 From: Matt Mastracci Date: Sun, 30 Apr 2023 10:50:24 +0200 Subject: [PATCH 084/320] perf(core): async op pseudo-codegen and performance work (#18887) Performance: ``` async_ops.js: 760k -> 1030k (!) async_ops_deferred.js: 730k -> 770k Deno.serve bench: 118k -> 124k WS test w/ third_party/prebuilt/mac/load_test 100 localhost 8000 0 0: unchanged Startup time: approx 0.5ms slower (13.7 -> 14.2ms) ``` --- cli/bench/async_ops.js | 4 +- cli/bench/async_ops_deferred.js | 4 +- cli/tests/unit/metrics_test.ts | 10 +- cli/tests/unit/opcall_test.ts | 33 +- cli/tsc/compiler.d.ts | 2 + core/01_core.js | 469 ++++++++++++++++-- core/bindings.js | 49 ++ core/bindings.rs | 210 ++++---- core/extensions.rs | 1 + core/lib.deno_core.d.ts | 8 +- core/ops_builtin.rs | 18 + core/runtime.rs | 30 -- ext/http/00_serve.js | 81 ++- ops/lib.rs | 24 +- ops/optimizer_tests/async_nop.out | 1 + ops/optimizer_tests/async_result.out | 1 + ops/optimizer_tests/callback_options.out | 1 + ops/optimizer_tests/cow_str.out | 1 + ops/optimizer_tests/f64_slice.out | 1 + ops/optimizer_tests/incompatible_1.out | 1 + ops/optimizer_tests/issue16934.out | 1 + ops/optimizer_tests/issue16934_fast.out | 1 + .../op_blob_revoke_object_url.out | 1 + ops/optimizer_tests/op_ffi_ptr_value.out | 1 + ops/optimizer_tests/op_print.out | 1 + ops/optimizer_tests/op_state.out | 1 + ops/optimizer_tests/op_state_basic1.out | 1 + ops/optimizer_tests/op_state_generics.out | 1 + ops/optimizer_tests/op_state_result.out | 1 + ops/optimizer_tests/op_state_warning.out | 1 + .../op_state_with_transforms.out | 1 + ops/optimizer_tests/opstate_with_arity.out | 1 + ops/optimizer_tests/option_arg.out | 1 + ops/optimizer_tests/owned_string.out | 1 + .../param_mut_binding_warning.out | 1 + ops/optimizer_tests/raw_ptr.out | 1 + ops/optimizer_tests/serde_v8_value.out | 1 + ops/optimizer_tests/strings.out | 1 + ops/optimizer_tests/strings_result.out | 1 + ops/optimizer_tests/u64_result.out | 1 + ops/optimizer_tests/uint8array.out | 1 + ops/optimizer_tests/unit_result.out | 1 + ops/optimizer_tests/unit_result2.out | 1 + ops/optimizer_tests/unit_ret.out | 1 + ops/optimizer_tests/wasm_op.out | 1 + 45 files changed, 737 insertions(+), 237 deletions(-) create mode 100644 core/bindings.js diff --git a/cli/bench/async_ops.js b/cli/bench/async_ops.js index fc04942be0..f6c1465d2b 100644 --- a/cli/bench/async_ops.js +++ b/cli/bench/async_ops.js @@ -17,4 +17,6 @@ async function bench(fun) { } const core = Deno[Deno.internal].core; -bench(() => core.opAsync("op_void_async")); +const ops = core.ops; +const opVoidAsync = ops.op_void_async; +bench(() => opVoidAsync()); diff --git a/cli/bench/async_ops_deferred.js b/cli/bench/async_ops_deferred.js index 7a816cf954..2751ad2261 100644 --- a/cli/bench/async_ops_deferred.js +++ b/cli/bench/async_ops_deferred.js @@ -17,4 +17,6 @@ async function bench(fun) { } const core = Deno[Deno.internal].core; -bench(() => core.opAsync("op_void_async_deferred")); +const ops = core.ops; +const opVoidAsyncDeferred = ops.op_void_async_deferred; +bench(() => opVoidAsyncDeferred()); diff --git a/cli/tests/unit/metrics_test.ts b/cli/tests/unit/metrics_test.ts index df2f1b2be5..5fdfebc85b 100644 --- a/cli/tests/unit/metrics_test.ts +++ b/cli/tests/unit/metrics_test.ts @@ -80,12 +80,14 @@ Deno.test(function metricsForOpCrates() { // Test that op_names == Objects.keys(Deno[Deno.internal].core.ops) // since building the per-op metrics depends on op_names being complete Deno.test(function opNamesMatch() { + // @ts-ignore: Deno[Deno.internal].core allowed + const ops = Object.keys(Deno[Deno.internal].core.ops); + // @ts-ignore: Deno[Deno.internal].core allowed + ops.concat(Object.keys(Deno[Deno.internal].core.asyncOps)); + assertEquals( // @ts-ignore: Deno[Deno.internal].core allowed Deno[Deno.internal].core.opNames().sort(), - // @ts-ignore: Deno[Deno.internal].core allowed - Object.keys(Deno[Deno.internal].core.ops).sort().filter((name) => - name !== "asyncOpsInfo" - ), + ops.sort().filter((name) => name !== "asyncOpsInfo"), ); }); diff --git a/cli/tests/unit/opcall_test.ts b/cli/tests/unit/opcall_test.ts index 8985c97801..3b37f8c097 100644 --- a/cli/tests/unit/opcall_test.ts +++ b/cli/tests/unit/opcall_test.ts @@ -1,20 +1,18 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +import { assertEquals } from "https://deno.land/std@v0.42.0/testing/asserts.ts"; import { assert, assertStringIncludes, unreachable } from "./test_util.ts"; Deno.test(async function sendAsyncStackTrace() { - const buf = new Uint8Array(10); - const rid = 10; try { - await Deno.read(rid, buf); + await core.ops.op_error_async(); unreachable(); } catch (error) { assert(error instanceof Error); const s = error.stack?.toString(); assert(s); - console.log(s); assertStringIncludes(s, "opcall_test.ts"); - assertStringIncludes(s, "read"); + assertStringIncludes(s, "sendAsyncStackTrace"); assert( !s.includes("ext:core"), "opcall stack traces should NOT include ext:core internals such as unwrapOpResult", @@ -22,6 +20,31 @@ Deno.test(async function sendAsyncStackTrace() { } }); +Deno.test(async function sendAsyncStackTraceDeferred() { + try { + await core.ops.op_error_async_deferred(); + unreachable(); + } catch (error) { + assert(error instanceof Error); + const s = error.stack?.toString(); + assert(s); + assertStringIncludes(s, "opcall_test.ts"); + assertStringIncludes(s, "sendAsyncStackTraceDeferred"); + assert( + !s.includes("ext:core"), + "opcall stack traces should NOT include ext:core internals such as unwrapOpResult", + ); + } +}); + +Deno.test(function syncAdd() { + assertEquals(30, core.ops.op_add(10, 20)); +}); + +Deno.test(async function asyncAdd() { + assertEquals(30, await core.ops.op_add_async(10, 20)); +}); + // @ts-ignore This is not publicly typed namespace, but it's there for sure. const core = Deno[Deno.internal].core; diff --git a/cli/tsc/compiler.d.ts b/cli/tsc/compiler.d.ts index b59f6dca81..66c0946972 100644 --- a/cli/tsc/compiler.d.ts +++ b/cli/tsc/compiler.d.ts @@ -46,6 +46,8 @@ declare global { encode(value: string): Uint8Array; // deno-lint-ignore no-explicit-any ops: Record any>; + // deno-lint-ignore no-explicit-any + asyncOps: Record any>; print(msg: string, stderr: boolean): void; registerErrorClass( name: string, diff --git a/core/01_core.js b/core/01_core.js index a8bdeb2a86..3972dec333 100644 --- a/core/01_core.js +++ b/core/01_core.js @@ -16,11 +16,15 @@ ObjectAssign, ObjectFreeze, ObjectFromEntries, + ObjectKeys, Promise, + PromiseReject, + PromiseResolve, PromisePrototypeThen, RangeError, ReferenceError, ReflectHas, + ReflectApply, SafeArrayIterator, SafeMap, SafePromisePrototypeFinally, @@ -32,7 +36,7 @@ TypeError, URIError, } = window.__bootstrap.primordials; - const { ops } = window.Deno.core; + const { ops, asyncOps } = window.Deno.core; const build = { target: "unknown", @@ -85,6 +89,17 @@ return opCallTracingEnabled; } + function movePromise(promiseId) { + const idx = promiseId % RING_SIZE; + // Move old promise from ring to map + const oldPromise = promiseRing[idx]; + if (oldPromise !== NO_PROMISE) { + const oldPromiseId = promiseId - RING_SIZE; + MapPrototypeSet(promiseMap, oldPromiseId, oldPromise); + } + return promiseRing[idx] = NO_PROMISE; + } + function setPromise(promiseId) { const idx = promiseId % RING_SIZE; // Move old promise from ring to map @@ -208,7 +223,29 @@ return error; } - function unwrapOpResult(res) { + function unwrapOpError(hideFunction) { + return (res) => { + // .$err_class_name is a special key that should only exist on errors + const className = res?.$err_class_name; + if (!className) { + return res; + } + + const errorBuilder = errorMap[className]; + const err = errorBuilder ? errorBuilder(res.message) : new Error( + `Unregistered error class: "${className}"\n ${res.message}\n Classes of errors returned from ops should be registered via Deno.core.registerErrorClass().`, + ); + // Set .code if error was a known OS error, see error_codes.rs + if (res.code) { + err.code = res.code; + } + // Strip unwrapOpResult() and errorBuilder() calls from stack trace + ErrorCaptureStackTrace(err, hideFunction); + throw err; + }; + } + + function unwrapOpResultNewPromise(id, res, hideFunction) { // .$err_class_name is a special key that should only exist on errors if (res?.$err_class_name) { const className = res.$err_class_name; @@ -221,59 +258,359 @@ err.code = res.code; } // Strip unwrapOpResult() and errorBuilder() calls from stack trace - ErrorCaptureStackTrace(err, unwrapOpResult); - throw err; + ErrorCaptureStackTrace(err, hideFunction); + return PromiseReject(err); } - return res; + const promise = PromiseResolve(res); + promise[promiseIdSymbol] = id; + return promise; + } + + /* +Basic codegen. + +TODO(mmastrac): automate this (handlebars?) + +let s = ""; +const vars = "abcdefghijklm"; +for (let i = 0; i < 10; i++) { + let args = ""; + for (let j = 0; j < i; j++) { + args += `${vars[j]},`; + } + s += ` + case ${i}: + fn = function async_op_${i}(${args}) { + const id = nextPromiseId++; + try { + const maybeResult = originalOp(id, ${args}); + if (maybeResult !== undefined) { + movePromise(id); + return unwrapOpResultNewPromise(id, maybeResult, async_op_${i}); + } + } catch (err) { + movePromise(id); + ErrorCaptureStackTrace(err, async_op_${i}); + return PromiseReject(err); + } + let promise = PromisePrototypeThen(setPromise(id), unwrapOpError(eventLoopTick)); + promise = handleOpCallTracing(opName, id, promise); + promise[promiseIdSymbol] = id; + return promise; + }; + break; + `; +} + */ + + // This function is called once per async stub + function asyncStub(opName, args) { + setUpAsyncStub(opName); + return ReflectApply(ops[opName], undefined, args); + } + + function setUpAsyncStub(opName) { + const originalOp = asyncOps[opName]; + let fn; + // The body of this switch statement can be generated using the script above. + switch (originalOp.length - 1) { + case 0: + fn = function async_op_0() { + const id = nextPromiseId++; + try { + const maybeResult = originalOp(id); + if (maybeResult !== undefined) { + movePromise(id); + return unwrapOpResultNewPromise(id, maybeResult, async_op_0); + } + } catch (err) { + movePromise(id); + ErrorCaptureStackTrace(err, async_op_0); + return PromiseReject(err); + } + let promise = PromisePrototypeThen( + setPromise(id), + unwrapOpError(eventLoopTick), + ); + promise = handleOpCallTracing(opName, id, promise); + promise[promiseIdSymbol] = id; + return promise; + }; + break; + + case 1: + fn = function async_op_1(a) { + const id = nextPromiseId++; + try { + const maybeResult = originalOp(id, a); + if (maybeResult !== undefined) { + movePromise(id); + return unwrapOpResultNewPromise(id, maybeResult, async_op_1); + } + } catch (err) { + movePromise(id); + ErrorCaptureStackTrace(err, async_op_1); + return PromiseReject(err); + } + let promise = PromisePrototypeThen( + setPromise(id), + unwrapOpError(eventLoopTick), + ); + promise = handleOpCallTracing(opName, id, promise); + promise[promiseIdSymbol] = id; + return promise; + }; + break; + + case 2: + fn = function async_op_2(a, b) { + const id = nextPromiseId++; + try { + const maybeResult = originalOp(id, a, b); + if (maybeResult !== undefined) { + movePromise(id); + return unwrapOpResultNewPromise(id, maybeResult, async_op_2); + } + } catch (err) { + movePromise(id); + ErrorCaptureStackTrace(err, async_op_2); + return PromiseReject(err); + } + let promise = PromisePrototypeThen( + setPromise(id), + unwrapOpError(eventLoopTick), + ); + promise = handleOpCallTracing(opName, id, promise); + promise[promiseIdSymbol] = id; + return promise; + }; + break; + + case 3: + fn = function async_op_3(a, b, c) { + const id = nextPromiseId++; + try { + const maybeResult = originalOp(id, a, b, c); + if (maybeResult !== undefined) { + movePromise(id); + return unwrapOpResultNewPromise(id, maybeResult, async_op_3); + } + } catch (err) { + movePromise(id); + ErrorCaptureStackTrace(err, async_op_3); + return PromiseReject(err); + } + let promise = PromisePrototypeThen( + setPromise(id), + unwrapOpError(eventLoopTick), + ); + promise = handleOpCallTracing(opName, id, promise); + promise[promiseIdSymbol] = id; + return promise; + }; + break; + + case 4: + fn = function async_op_4(a, b, c, d) { + const id = nextPromiseId++; + try { + const maybeResult = originalOp(id, a, b, c, d); + if (maybeResult !== undefined) { + movePromise(id); + return unwrapOpResultNewPromise(id, maybeResult, async_op_4); + } + } catch (err) { + movePromise(id); + ErrorCaptureStackTrace(err, async_op_4); + return PromiseReject(err); + } + let promise = PromisePrototypeThen( + setPromise(id), + unwrapOpError(eventLoopTick), + ); + promise = handleOpCallTracing(opName, id, promise); + promise[promiseIdSymbol] = id; + return promise; + }; + break; + + case 5: + fn = function async_op_5(a, b, c, d, e) { + const id = nextPromiseId++; + try { + const maybeResult = originalOp(id, a, b, c, d, e); + if (maybeResult !== undefined) { + movePromise(id); + return unwrapOpResultNewPromise(id, maybeResult, async_op_5); + } + } catch (err) { + movePromise(id); + ErrorCaptureStackTrace(err, async_op_5); + return PromiseReject(err); + } + let promise = PromisePrototypeThen( + setPromise(id), + unwrapOpError(eventLoopTick), + ); + promise = handleOpCallTracing(opName, id, promise); + promise[promiseIdSymbol] = id; + return promise; + }; + break; + + case 6: + fn = function async_op_6(a, b, c, d, e, f) { + const id = nextPromiseId++; + try { + const maybeResult = originalOp(id, a, b, c, d, e, f); + if (maybeResult !== undefined) { + movePromise(id); + return unwrapOpResultNewPromise(id, maybeResult, async_op_6); + } + } catch (err) { + movePromise(id); + ErrorCaptureStackTrace(err, async_op_6); + return PromiseReject(err); + } + let promise = PromisePrototypeThen( + setPromise(id), + unwrapOpError(eventLoopTick), + ); + promise = handleOpCallTracing(opName, id, promise); + promise[promiseIdSymbol] = id; + return promise; + }; + break; + + case 7: + fn = function async_op_7(a, b, c, d, e, f, g) { + const id = nextPromiseId++; + try { + const maybeResult = originalOp(id, a, b, c, d, e, f, g); + if (maybeResult !== undefined) { + movePromise(id); + return unwrapOpResultNewPromise(id, maybeResult, async_op_7); + } + } catch (err) { + movePromise(id); + ErrorCaptureStackTrace(err, async_op_7); + return PromiseReject(err); + } + let promise = PromisePrototypeThen( + setPromise(id), + unwrapOpError(eventLoopTick), + ); + promise = handleOpCallTracing(opName, id, promise); + promise[promiseIdSymbol] = id; + return promise; + }; + break; + + case 8: + fn = function async_op_8(a, b, c, d, e, f, g, h) { + const id = nextPromiseId++; + try { + const maybeResult = originalOp(id, a, b, c, d, e, f, g, h); + if (maybeResult !== undefined) { + movePromise(id); + return unwrapOpResultNewPromise(id, maybeResult, async_op_8); + } + } catch (err) { + movePromise(id); + ErrorCaptureStackTrace(err, async_op_8); + return PromiseReject(err); + } + let promise = PromisePrototypeThen( + setPromise(id), + unwrapOpError(eventLoopTick), + ); + promise = handleOpCallTracing(opName, id, promise); + promise[promiseIdSymbol] = id; + return promise; + }; + break; + + case 9: + fn = function async_op_9(a, b, c, d, e, f, g, h, i) { + const id = nextPromiseId++; + try { + const maybeResult = originalOp(id, a, b, c, d, e, f, g, h, i); + if (maybeResult !== undefined) { + movePromise(id); + return unwrapOpResultNewPromise(id, maybeResult, async_op_9); + } + } catch (err) { + movePromise(id); + ErrorCaptureStackTrace(err, async_op_9); + return PromiseReject(err); + } + let promise = PromisePrototypeThen( + setPromise(id), + unwrapOpError(eventLoopTick), + ); + promise = handleOpCallTracing(opName, id, promise); + promise[promiseIdSymbol] = id; + return promise; + }; + break; + + default: + throw new Error( + `Too many arguments for async op codegen (length of ${opName} was ${ + originalOp.length - 1 + })`, + ); + } + return (ops[opName] = fn); } function opAsync2(name, arg0, arg1) { const id = nextPromiseId++; - let promise = PromisePrototypeThen(setPromise(id), unwrapOpResult); - let maybeResult; try { - maybeResult = ops[name](id, arg0, arg1); - } catch (err) { - // Cleanup the just-created promise - getPromise(id); - if (!ReflectHas(ops, name)) { - throw new TypeError(`${name} is not a registered op`); + const maybeResult = asyncOps[name](id, arg0, arg1); + if (maybeResult !== undefined) { + movePromise(id); + return unwrapOpResultNewPromise(id, maybeResult, opAsync2); } - // Rethrow the error - throw err; + } catch (err) { + movePromise(id); + if (!ReflectHas(asyncOps, name)) { + return PromiseReject(new TypeError(`${name} is not a registered op`)); + } + ErrorCaptureStackTrace(err, opAsync2); + return PromiseReject(err); } + let promise = PromisePrototypeThen( + setPromise(id), + unwrapOpError(eventLoopTick), + ); promise = handleOpCallTracing(name, id, promise); promise[promiseIdSymbol] = id; - if (typeof maybeResult !== "undefined") { - const promise = getPromise(id); - promise.resolve(maybeResult); - } - return promise; } function opAsync(name, ...args) { const id = nextPromiseId++; - let promise = PromisePrototypeThen(setPromise(id), unwrapOpResult); - let maybeResult; try { - maybeResult = ops[name](id, ...new SafeArrayIterator(args)); - } catch (err) { - // Cleanup the just-created promise - getPromise(id); - if (!ReflectHas(ops, name)) { - throw new TypeError(`${name} is not a registered op`); + const maybeResult = asyncOps[name](id, ...new SafeArrayIterator(args)); + if (maybeResult !== undefined) { + movePromise(id); + return unwrapOpResultNewPromise(id, maybeResult, opAsync); } - // Rethrow the error - throw err; + } catch (err) { + movePromise(id); + if (!ReflectHas(asyncOps, name)) { + return PromiseReject(new TypeError(`${name} is not a registered op`)); + } + ErrorCaptureStackTrace(err, opAsync); + return PromiseReject(err); } + let promise = PromisePrototypeThen( + setPromise(id), + unwrapOpError(eventLoopTick), + ); promise = handleOpCallTracing(name, id, promise); promise[promiseIdSymbol] = id; - if (typeof maybeResult !== "undefined") { - const promise = getPromise(id); - promise.resolve(maybeResult); - } - return promise; } @@ -439,8 +776,52 @@ ); } + // Eagerly initialize ops for snapshot purposes + for (const opName of new SafeArrayIterator(ObjectKeys(asyncOps))) { + setUpAsyncStub(opName); + } + + function generateAsyncOpHandler(/* opNames... */) { + const fastOps = {}; + for (const opName of new SafeArrayIterator(arguments)) { + if (ops[opName] === undefined) { + throw new Error(`Unknown or disabled op '${opName}'`); + } + if (asyncOps[opName] !== undefined) { + fastOps[opName] = setUpAsyncStub(opName); + } else { + fastOps[opName] = ops[opName]; + } + } + return fastOps; + } + + const { + op_close: close, + op_try_close: tryClose, + op_read: read, + op_read_all: readAll, + op_write: write, + op_write_all: writeAll, + op_read_sync: readSync, + op_write_sync: writeSync, + op_shutdown: shutdown, + } = generateAsyncOpHandler( + "op_close", + "op_try_close", + "op_read", + "op_read_all", + "op_write", + "op_write_all", + "op_read_sync", + "op_write_sync", + "op_shutdown", + ); + // Extra Deno.core.* exports const core = ObjectAssign(globalThis.Deno.core, { + asyncStub, + generateAsyncOpHandler, opAsync, opAsync2, resources, @@ -460,15 +841,15 @@ unrefOp, setReportExceptionCallback, setPromiseHooks, - close: (rid) => ops.op_close(rid), - tryClose: (rid) => ops.op_try_close(rid), - read: opAsync.bind(null, "op_read"), - readAll: opAsync.bind(null, "op_read_all"), - write: opAsync.bind(null, "op_write"), - writeAll: opAsync.bind(null, "op_write_all"), - readSync: (rid, buffer) => ops.op_read_sync(rid, buffer), - writeSync: (rid, buffer) => ops.op_write_sync(rid, buffer), - shutdown: opAsync.bind(null, "op_shutdown"), + close, + tryClose, + read, + readAll, + write, + writeAll, + readSync, + writeSync, + shutdown, print: (msg, isErr) => ops.op_print(msg, isErr), setMacrotaskCallback, setNextTickCallback, diff --git a/core/bindings.js b/core/bindings.js new file mode 100644 index 0000000000..c7d7af30ce --- /dev/null +++ b/core/bindings.js @@ -0,0 +1,49 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +if (!globalThis.Deno) { + globalThis.Deno = { + core: { + ops: {}, + asyncOps: {}, + }, + }; +} + +Deno.__op__console = function (callConsole, console) { + Deno.core.callConsole = callConsole; + Deno.core.console = console; +}; + +Deno.__op__registerOp = function (isAsync, op, opName) { + const core = Deno.core; + if (isAsync) { + if (core.ops[opName] !== undefined) { + return; + } + core.asyncOps[opName] = op; + core.ops[opName] = function (...args) { + if (this !== core.ops) { + // deno-lint-ignore prefer-primordials + throw new Error( + "An async stub cannot be separated from Deno.core.ops. Use ???", + ); + } + return core.asyncStub(opName, args); + }; + } else { + core.ops[opName] = op; + } +}; + +Deno.__op__unregisterOp = function (isAsync, opName) { + if (isAsync) { + delete Deno.core.asyncOps[opName]; + } + delete Deno.core.ops[opName]; +}; + +Deno.__op__cleanup = function () { + delete Deno.__op__console; + delete Deno.__op__registerOp; + delete Deno.__op__unregisterOp; + delete Deno.__op__cleanup; +}; diff --git a/core/bindings.rs b/core/bindings.rs index 95e78b6cd3..2d9c914619 100644 --- a/core/bindings.rs +++ b/core/bindings.rs @@ -1,9 +1,9 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use log::debug; +use std::fmt::Write; use std::option::Option; use std::os::raw::c_void; - -use log::debug; use v8::MapFnTo; use crate::error::is_instance_of_error; @@ -98,6 +98,23 @@ pub fn module_origin<'a>( ) } +fn get<'s, T>( + scope: &mut v8::HandleScope<'s>, + from: v8::Local, + key: &'static [u8], + path: &'static str, +) -> T +where + v8::Local<'s, v8::Value>: TryInto, +{ + let key = v8::String::new_external_onebyte_static(scope, key).unwrap(); + from + .get(scope, key.into()) + .unwrap_or_else(|| panic!("{path} exists")) + .try_into() + .unwrap_or_else(|_| panic!("unable to convert")) +} + pub(crate) fn initialize_context<'s>( scope: &mut v8::HandleScope<'s, ()>, op_ctxs: &[OpCtx], @@ -108,135 +125,92 @@ pub(crate) fn initialize_context<'s>( let scope = &mut v8::ContextScope::new(scope, context); - let deno_str = - v8::String::new_external_onebyte_static(scope, b"Deno").unwrap(); - let core_str = - v8::String::new_external_onebyte_static(scope, b"core").unwrap(); - let ops_str = v8::String::new_external_onebyte_static(scope, b"ops").unwrap(); + let mut codegen = String::with_capacity(op_ctxs.len() * 200); + codegen.push_str(include_str!("bindings.js")); + _ = writeln!( + codegen, + "Deno.__op__ = function(opFns, callConsole, console) {{" + ); + if !snapshot_options.loaded() { + _ = writeln!(codegen, "Deno.__op__console(callConsole, console);"); + } + for op_ctx in op_ctxs { + if op_ctx.decl.enabled { + // If we're loading from a snapshot, we can skip registration for most ops + if matches!(snapshot_options, SnapshotOptions::Load) + && !op_ctx.decl.force_registration + { + continue; + } + _ = writeln!( + codegen, + "Deno.__op__registerOp({}, opFns[{}], \"{}\");", + op_ctx.decl.is_async, op_ctx.id, op_ctx.decl.name + ); + } else { + _ = writeln!( + codegen, + "Deno.__op__unregisterOp({}, \"{}\");", + op_ctx.decl.is_async, op_ctx.decl.name + ); + } + } + codegen.push_str("Deno.__op__cleanup();"); + _ = writeln!(codegen, "}}"); - let ops_obj = if snapshot_options.loaded() { - // Snapshot already registered `Deno.core.ops` but - // extensions may provide ops that aren't part of the snapshot. - // Grab the Deno.core.ops object & init it - let deno_obj: v8::Local = global - .get(scope, deno_str.into()) - .unwrap() - .try_into() - .unwrap(); - let core_obj: v8::Local = deno_obj - .get(scope, core_str.into()) - .unwrap() - .try_into() - .unwrap(); - let ops_obj: v8::Local = core_obj - .get(scope, ops_str.into()) - .expect("Deno.core.ops to exist") - .try_into() - .unwrap(); - ops_obj + let script = v8::String::new_from_one_byte( + scope, + codegen.as_bytes(), + v8::NewStringType::Normal, + ) + .unwrap(); + let script = v8::Script::compile(scope, script, None).unwrap(); + script.run(scope); + + let deno = get(scope, global, b"Deno", "Deno"); + let op_fn: v8::Local = + get(scope, deno, b"__op__", "Deno.__op__"); + let recv = v8::undefined(scope); + let op_fns = v8::Array::new(scope, op_ctxs.len() as i32); + for op_ctx in op_ctxs { + let op_fn = op_ctx_function(scope, op_ctx); + op_fns.set_index(scope, op_ctx.id as u32, op_fn.into()); + } + if snapshot_options.loaded() { + op_fn.call(scope, recv.into(), &[op_fns.into()]); } else { - // globalThis.Deno = { core: { } }; - let deno_obj = v8::Object::new(scope); - global.set(scope, deno_str.into(), deno_obj.into()); - - let core_obj = v8::Object::new(scope); - deno_obj.set(scope, core_str.into(), core_obj.into()); - // Bind functions to Deno.core.* - set_func(scope, core_obj, "callConsole", call_console); + let call_console_fn = v8::Function::new(scope, call_console).unwrap(); // Bind v8 console object to Deno.core.console let extra_binding_obj = context.get_extras_binding_object(scope); - let console_str = - v8::String::new_external_onebyte_static(scope, b"console").unwrap(); - let console_obj = extra_binding_obj.get(scope, console_str.into()).unwrap(); - core_obj.set(scope, console_str.into(), console_obj); + let console_obj: v8::Local = get( + scope, + extra_binding_obj, + b"console", + "ExtrasBindingObject.console", + ); - // Bind functions to Deno.core.ops.* - let ops_obj = v8::Object::new(scope); - core_obj.set(scope, ops_str.into(), ops_obj.into()); - ops_obj - }; - - if matches!(snapshot_options, SnapshotOptions::Load) { - // Only register ops that have `force_registration` flag set to true, - // the remaining ones should already be in the snapshot. Ignore ops that - // are disabled. - for op_ctx in op_ctxs { - if op_ctx.decl.enabled { - if op_ctx.decl.force_registration { - add_op_to_deno_core_ops(scope, ops_obj, op_ctx); - } - } else { - delete_op_from_deno_core_ops(scope, ops_obj, op_ctx) - } - } - } else if matches!(snapshot_options, SnapshotOptions::CreateFromExisting) { - // Register all enabled ops, probing for which ones are already registered. - for op_ctx in op_ctxs { - let key = v8::String::new_external_onebyte_static( - scope, - op_ctx.decl.name.as_bytes(), - ) - .unwrap(); - - if op_ctx.decl.enabled { - if ops_obj.get(scope, key.into()).is_some() { - continue; - } - add_op_to_deno_core_ops(scope, ops_obj, op_ctx); - } else { - delete_op_from_deno_core_ops(scope, ops_obj, op_ctx) - } - } - } else { - // In other cases register all ops enabled unconditionally. - for op_ctx in op_ctxs { - if op_ctx.decl.enabled { - add_op_to_deno_core_ops(scope, ops_obj, op_ctx); - } - } + op_fn.call( + scope, + recv.into(), + &[op_fns.into(), call_console_fn.into(), console_obj.into()], + ); } context } -fn set_func( - scope: &mut v8::HandleScope<'_>, - obj: v8::Local, - name: &'static str, - callback: impl v8::MapFnTo, -) { - let key = - v8::String::new_external_onebyte_static(scope, name.as_bytes()).unwrap(); - let val = v8::Function::new(scope, callback).unwrap(); - val.set_name(key); - obj.set(scope, key.into(), val.into()); -} - -fn delete_op_from_deno_core_ops( - scope: &mut v8::HandleScope<'_>, - obj: v8::Local, +fn op_ctx_function<'s>( + scope: &mut v8::HandleScope<'s>, op_ctx: &OpCtx, -) { - let key = - v8::String::new_external_onebyte_static(scope, op_ctx.decl.name.as_bytes()) - .unwrap(); - obj.delete(scope, key.into()); -} - -fn add_op_to_deno_core_ops( - scope: &mut v8::HandleScope<'_>, - obj: v8::Local, - op_ctx: &OpCtx, -) { +) -> v8::Local<'s, v8::Function> { let op_ctx_ptr = op_ctx as *const OpCtx as *const c_void; - let key = - v8::String::new_external_onebyte_static(scope, op_ctx.decl.name.as_bytes()) - .unwrap(); let external = v8::External::new(scope, op_ctx_ptr as *mut c_void); - let builder = v8::FunctionTemplate::builder_raw(op_ctx.decl.v8_fn_ptr) - .data(external.into()); + let builder: v8::FunctionBuilder = + v8::FunctionTemplate::builder_raw(op_ctx.decl.v8_fn_ptr) + .data(external.into()) + .length(op_ctx.decl.arg_count as i32); let templ = if let Some(fast_function) = &op_ctx.decl.fast_fn { builder.build_fast( @@ -249,9 +223,7 @@ fn add_op_to_deno_core_ops( } else { builder.build(scope) }; - let val = templ.get_function(scope).unwrap(); - val.set_name(key); - obj.set(scope, key.into(), val.into()); + templ.get_function(scope).unwrap() } pub extern "C" fn wasm_async_resolve_promise_callback( diff --git a/core/extensions.rs b/core/extensions.rs index a0f99c92b0..a8b52eb3b6 100644 --- a/core/extensions.rs +++ b/core/extensions.rs @@ -73,6 +73,7 @@ pub struct OpDecl { pub is_unstable: bool, pub is_v8: bool, pub force_registration: bool, + pub arg_count: u8, pub fast_fn: Option, } diff --git a/core/lib.deno_core.d.ts b/core/lib.deno_core.d.ts index 7f3ea2a191..fc78658294 100644 --- a/core/lib.deno_core.d.ts +++ b/core/lib.deno_core.d.ts @@ -23,10 +23,16 @@ declare namespace Deno { /** * List of all registered ops, in the form of a map that maps op - * name to internal numerical op id. + * name to function. */ const ops: Record any>; + /** + * List of all registered async ops, in the form of a map that maps op + * name to function. + */ + const asyncOps: Record any>; + /** * Retrieve a list of all open resources, in the form of a map that maps * resource id to the resource name. diff --git a/core/ops_builtin.rs b/core/ops_builtin.rs index 0c071a9186..70f478acd9 100644 --- a/core/ops_builtin.rs +++ b/core/ops_builtin.rs @@ -27,9 +27,12 @@ crate::extension!( op_wasm_streaming_feed, op_wasm_streaming_set_url, op_void_sync, + op_error_async, + op_error_async_deferred, op_void_async, op_void_async_deferred, op_add, + op_add_async, // TODO(@AaronO): track IO metrics for builtin streams op_read, op_read_all, @@ -96,12 +99,27 @@ fn op_add(a: i32, b: i32) -> i32 { a + b } +#[op] +pub async fn op_add_async(a: i32, b: i32) -> i32 { + a + b +} + #[op(fast)] pub fn op_void_sync() {} #[op] pub async fn op_void_async() {} +#[op] +pub async fn op_error_async() -> Result<(), Error> { + Err(Error::msg("error")) +} + +#[op(deferred)] +pub async fn op_error_async_deferred() -> Result<(), Error> { + Err(Error::msg("error")) +} + #[op(deferred)] pub async fn op_void_async_deferred() {} diff --git a/core/runtime.rs b/core/runtime.rs index d88ddccacb..e6c365e420 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -3737,21 +3737,6 @@ assertEquals(1, notify_return_value); }) } - #[test] - fn test_core_js_stack_frame() { - let mut runtime = JsRuntime::new(RuntimeOptions::default()); - // Call non-existent op so we get error from `core.js` - let error = runtime - .execute_script_static( - "core_js_stack_frame.js", - "Deno.core.opAsync('non_existent');", - ) - .unwrap_err(); - let error_string = error.to_string(); - // Test that the script specifier is a URL: `ext:`. - assert!(error_string.contains("ext:core/01_core.js")); - } - #[test] fn test_v8_platform() { let options = RuntimeOptions { @@ -4721,21 +4706,6 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { .is_ok()); } - #[test] - fn test_non_existent_async_op_error() { - // Verify that "resizable ArrayBuffer" is disabled - let mut runtime = JsRuntime::new(Default::default()); - let err = runtime - .execute_script_static( - "test_rab.js", - r#"Deno.core.opAsync("this_op_doesnt_exist");"#, - ) - .unwrap_err(); - assert!(err - .to_string() - .contains("this_op_doesnt_exist is not a registered op")); - } - #[tokio::test] async fn cant_load_internal_module_when_snapshot_is_loaded_and_not_snapshotting( ) { diff --git a/ext/http/00_serve.js b/ext/http/00_serve.js index 0b2c605388..33742e122c 100644 --- a/ext/http/00_serve.js +++ b/ext/http/00_serve.js @@ -1,4 +1,5 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +// deno-lint-ignore-file camelcase const core = globalThis.Deno.core; const primordials = globalThis.__bootstrap.primordials; const internals = globalThis.__bootstrap.internals; @@ -46,6 +47,39 @@ const { Uint8Array, } = primordials; +const { + op_http_wait, + op_upgrade, + op_get_request_headers, + op_get_request_method_and_url, + op_read_request_body, + op_serve_http, + op_set_promise_complete, + op_set_response_body_bytes, + op_set_response_body_resource, + op_set_response_body_stream, + op_set_response_body_text, + op_set_response_header, + op_set_response_headers, + op_upgrade_raw, + op_ws_server_create, +} = Deno.core.generateAsyncOpHandler( + "op_http_wait", + "op_upgrade", + "op_get_request_headers", + "op_get_request_method_and_url", + "op_read_request_body", + "op_serve_http", + "op_set_promise_complete", + "op_set_response_body_bytes", + "op_set_response_body_resource", + "op_set_response_body_stream", + "op_set_response_body_text", + "op_set_response_header", + "op_set_response_headers", + "op_upgrade_raw", + "op_ws_server_create", +); const _upgraded = Symbol("_upgraded"); function internalServerError() { @@ -143,7 +177,7 @@ class InnerRequest { this.#upgraded = () => {}; - const upgradeRid = core.ops.op_upgrade_raw(slabId); + const upgradeRid = op_upgrade_raw(slabId); const conn = new TcpConn( upgradeRid, @@ -174,12 +208,11 @@ class InnerRequest { (async () => { try { // Returns the connection and extra bytes, which we can pass directly to op_ws_server_create - const upgrade = await core.opAsync2( - "op_upgrade", + const upgrade = await op_upgrade( slabId, response.headerList, ); - const wsRid = core.ops.op_ws_server_create(upgrade[0], upgrade[1]); + const wsRid = op_ws_server_create(upgrade[0], upgrade[1]); // We have to wait for the go-ahead signal await goAhead; @@ -214,7 +247,7 @@ class InnerRequest { } // TODO(mmastrac): This is quite slow as we're serializing a large number of values. We may want to consider // splitting this up into multiple ops. - this.#methodAndUri = core.ops.op_get_request_method_and_url(this.#slabId); + this.#methodAndUri = op_get_request_method_and_url(this.#slabId); } const path = this.#methodAndUri[2]; @@ -249,7 +282,7 @@ class InnerRequest { if (this.#slabId === undefined) { throw new TypeError("request closed"); } - this.#methodAndUri = core.ops.op_get_request_method_and_url(this.#slabId); + this.#methodAndUri = op_get_request_method_and_url(this.#slabId); } return { transport: "tcp", @@ -263,7 +296,7 @@ class InnerRequest { if (this.#slabId === undefined) { throw new TypeError("request closed"); } - this.#methodAndUri = core.ops.op_get_request_method_and_url(this.#slabId); + this.#methodAndUri = op_get_request_method_and_url(this.#slabId); } return this.#methodAndUri[0]; } @@ -281,7 +314,7 @@ class InnerRequest { this.#body = null; return null; } - this.#streamRid = core.ops.op_read_request_body(this.#slabId); + this.#streamRid = op_read_request_body(this.#slabId); this.#body = new InnerBody(readableStreamForRid(this.#streamRid, false)); return this.#body; } @@ -290,7 +323,7 @@ class InnerRequest { if (this.#slabId === undefined) { throw new TypeError("request closed"); } - return core.ops.op_get_request_headers(this.#slabId); + return op_get_request_headers(this.#slabId); } get slabId() { @@ -331,12 +364,12 @@ function fastSyncResponseOrStream(req, respBody) { const body = stream.body; if (ObjectPrototypeIsPrototypeOf(Uint8ArrayPrototype, body)) { - core.ops.op_set_response_body_bytes(req, body); + op_set_response_body_bytes(req, body); return null; } if (typeof body === "string") { - core.ops.op_set_response_body_text(req, body); + op_set_response_body_text(req, body); return null; } @@ -346,7 +379,7 @@ function fastSyncResponseOrStream(req, respBody) { } const resourceBacking = getReadableStreamResourceBacking(stream); if (resourceBacking) { - core.ops.op_set_response_body_resource( + op_set_response_body_resource( req, resourceBacking.rid, resourceBacking.autoClose, @@ -382,9 +415,9 @@ async function asyncResponse(responseBodies, req, status, stream) { // and we race it. let timeoutPromise; timeout = setTimeout(() => { - responseRid = core.ops.op_set_response_body_stream(req); + responseRid = op_set_response_body_stream(req); SetPrototypeAdd(responseBodies, responseRid); - core.ops.op_set_promise_complete(req, status); + op_set_promise_complete(req, status); timeoutPromise = core.writeAll(responseRid, value1); }, 250); const { value: value2, done: done2 } = await reader.read(); @@ -409,13 +442,13 @@ async function asyncResponse(responseBodies, req, status, stream) { // Reader will be closed by finally block // No response stream closed = true; - core.ops.op_set_response_body_bytes(req, value1); + op_set_response_body_bytes(req, value1); return; } - responseRid = core.ops.op_set_response_body_stream(req); + responseRid = op_set_response_body_stream(req); SetPrototypeAdd(responseBodies, responseRid); - core.ops.op_set_promise_complete(req, status); + op_set_promise_complete(req, status); // Write our first packet await core.writeAll(responseRid, value1); } @@ -447,7 +480,7 @@ async function asyncResponse(responseBodies, req, status, stream) { core.tryClose(responseRid); SetPrototypeDelete(responseBodies, responseRid); } else { - core.ops.op_set_promise_complete(req, status); + op_set_promise_complete(req, status); } } } @@ -511,9 +544,9 @@ function mapToCallback(responseBodies, context, signal, callback, onError) { const headers = inner.headerList; if (headers && headers.length > 0) { if (headers.length == 1) { - core.ops.op_set_response_header(req, headers[0][0], headers[0][1]); + op_set_response_header(req, headers[0][0], headers[0][1]); } else { - core.ops.op_set_response_headers(req, headers); + op_set_response_headers(req, headers); } } @@ -523,7 +556,7 @@ function mapToCallback(responseBodies, context, signal, callback, onError) { // Handle the stream asynchronously await asyncResponse(responseBodies, req, status, stream); } else { - core.ops.op_set_promise_complete(req, status); + op_set_promise_complete(req, status); } innerRequest?.close(); @@ -591,13 +624,13 @@ async function serve(arg1, arg2) { listenOpts.alpnProtocols = ["h2", "http/1.1"]; const listener = Deno.listenTls(listenOpts); listenOpts.port = listener.addr.port; - context.initialize(core.ops.op_serve_http( + context.initialize(op_serve_http( listener.rid, )); } else { const listener = Deno.listen(listenOpts); listenOpts.port = listener.addr.port; - context.initialize(core.ops.op_serve_http( + context.initialize(op_serve_http( listener.rid, )); } @@ -624,7 +657,7 @@ async function serve(arg1, arg2) { const rid = context.serverRid; let req; try { - req = await core.opAsync2("op_http_wait", rid); + req = await op_http_wait(rid); } catch (error) { if (ObjectPrototypeIsPrototypeOf(BadResourcePrototype, error)) { break; diff --git a/ops/lib.rs b/ops/lib.rs index 5a192537fd..d4fa0bb824 100644 --- a/ops/lib.rs +++ b/ops/lib.rs @@ -144,6 +144,8 @@ impl Op { is_unstable: #is_unstable, is_v8: #is_v8, force_registration: false, + // TODO(mmastrac) + arg_count: 0, } } @@ -158,8 +160,8 @@ impl Op { let has_fallible_fast_call = active && optimizer.returns_result; - let v8_body = if is_async { - let deferred = attrs.deferred; + let (v8_body, arg_count) = if is_async { + let deferred: bool = attrs.deferred; codegen_v8_async( &core, &item, @@ -205,6 +207,7 @@ impl Op { is_unstable: #is_unstable, is_v8: #is_v8, force_registration: false, + arg_count: #arg_count as u8, } } @@ -241,7 +244,7 @@ fn codegen_v8_async( margs: Attributes, asyncness: bool, deferred: bool, -) -> TokenStream2 { +) -> (TokenStream2, usize) { let Attributes { is_v8, .. } = margs; let special_args = f .sig @@ -309,7 +312,7 @@ fn codegen_v8_async( } }; - quote! { + let token_stream = quote! { use #core::futures::FutureExt; // SAFETY: #core guarantees args.data() is a v8 External pointing to an OpCtx for the isolates lifetime let ctx = unsafe { @@ -336,7 +339,10 @@ fn codegen_v8_async( if let Some(response) = maybe_response { rv.set(response); } - } + }; + + // +1 arg for the promise ID + (token_stream, 1 + f.sig.inputs.len() - rust_i0) } fn scope_arg(arg: &FnArg) -> Option { @@ -373,7 +379,7 @@ fn codegen_v8_sync( f: &syn::ItemFn, margs: Attributes, has_fallible_fast_call: bool, -) -> TokenStream2 { +) -> (TokenStream2, usize) { let Attributes { is_v8, .. } = margs; let special_args = f .sig @@ -404,7 +410,7 @@ fn codegen_v8_sync( quote! {} }; - quote! { + let token_stream = quote! { // SAFETY: #core guarantees args.data() is a v8 External pointing to an OpCtx for the isolates lifetime let ctx = unsafe { &*(#core::v8::Local::<#core::v8::External>::cast(args.data()).value() @@ -421,7 +427,9 @@ fn codegen_v8_sync( op_state.tracker.track_sync(ctx.id); #ret - } + }; + + (token_stream, f.sig.inputs.len() - rust_i0) } /// (full declarations, idents, v8 argument count) diff --git a/ops/optimizer_tests/async_nop.out b/ops/optimizer_tests/async_nop.out index d267338258..3765e611a8 100644 --- a/ops/optimizer_tests/async_nop.out +++ b/ops/optimizer_tests/async_nop.out @@ -41,6 +41,7 @@ impl op_void_async { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 1usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/async_result.out b/ops/optimizer_tests/async_result.out index 4494bf22ae..ca6d13c2e8 100644 --- a/ops/optimizer_tests/async_result.out +++ b/ops/optimizer_tests/async_result.out @@ -41,6 +41,7 @@ impl op_async_result { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 2usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/callback_options.out b/ops/optimizer_tests/callback_options.out index e892e01189..656124a807 100644 --- a/ops/optimizer_tests/callback_options.out +++ b/ops/optimizer_tests/callback_options.out @@ -41,6 +41,7 @@ impl op_fallback { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 1usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/cow_str.out b/ops/optimizer_tests/cow_str.out index dc909da819..ebb2108a21 100644 --- a/ops/optimizer_tests/cow_str.out +++ b/ops/optimizer_tests/cow_str.out @@ -41,6 +41,7 @@ impl op_cow_str { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 1usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/f64_slice.out b/ops/optimizer_tests/f64_slice.out index 3e8ef07d85..811aee288f 100644 --- a/ops/optimizer_tests/f64_slice.out +++ b/ops/optimizer_tests/f64_slice.out @@ -41,6 +41,7 @@ impl op_f64_buf { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 1usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/incompatible_1.out b/ops/optimizer_tests/incompatible_1.out index 5104fb5e46..59eb600bc6 100644 --- a/ops/optimizer_tests/incompatible_1.out +++ b/ops/optimizer_tests/incompatible_1.out @@ -31,6 +31,7 @@ impl op_sync_serialize_object_with_numbers_as_keys { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 1usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/issue16934.out b/ops/optimizer_tests/issue16934.out index e92510038c..35bd383390 100644 --- a/ops/optimizer_tests/issue16934.out +++ b/ops/optimizer_tests/issue16934.out @@ -31,6 +31,7 @@ impl send_stdin { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 2usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/issue16934_fast.out b/ops/optimizer_tests/issue16934_fast.out index 2a16d1b626..1291f9cabf 100644 --- a/ops/optimizer_tests/issue16934_fast.out +++ b/ops/optimizer_tests/issue16934_fast.out @@ -31,6 +31,7 @@ impl send_stdin { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 2usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/op_blob_revoke_object_url.out b/ops/optimizer_tests/op_blob_revoke_object_url.out index 4eda692240..1a10a2b0a8 100644 --- a/ops/optimizer_tests/op_blob_revoke_object_url.out +++ b/ops/optimizer_tests/op_blob_revoke_object_url.out @@ -31,6 +31,7 @@ impl op_blob_revoke_object_url { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 1usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/op_ffi_ptr_value.out b/ops/optimizer_tests/op_ffi_ptr_value.out index 3fee00cff8..f3da0dfce5 100644 --- a/ops/optimizer_tests/op_ffi_ptr_value.out +++ b/ops/optimizer_tests/op_ffi_ptr_value.out @@ -41,6 +41,7 @@ impl op_ffi_ptr_value { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 2usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/op_print.out b/ops/optimizer_tests/op_print.out index 7bf5457d78..e0fecd6b29 100644 --- a/ops/optimizer_tests/op_print.out +++ b/ops/optimizer_tests/op_print.out @@ -31,6 +31,7 @@ impl op_print { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 2usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/op_state.out b/ops/optimizer_tests/op_state.out index cebb1e25c7..300dd6fc2f 100644 --- a/ops/optimizer_tests/op_state.out +++ b/ops/optimizer_tests/op_state.out @@ -41,6 +41,7 @@ impl op_set_exit_code { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 1usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/op_state_basic1.out b/ops/optimizer_tests/op_state_basic1.out index d8278daca6..2452e886c0 100644 --- a/ops/optimizer_tests/op_state_basic1.out +++ b/ops/optimizer_tests/op_state_basic1.out @@ -41,6 +41,7 @@ impl foo { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 2usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/op_state_generics.out b/ops/optimizer_tests/op_state_generics.out index 631a2142f7..3faaa4bf16 100644 --- a/ops/optimizer_tests/op_state_generics.out +++ b/ops/optimizer_tests/op_state_generics.out @@ -47,6 +47,7 @@ impl op_foo { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 0usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/op_state_result.out b/ops/optimizer_tests/op_state_result.out index d03ffd5a61..137eeeac04 100644 --- a/ops/optimizer_tests/op_state_result.out +++ b/ops/optimizer_tests/op_state_result.out @@ -41,6 +41,7 @@ impl foo { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 2usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/op_state_warning.out b/ops/optimizer_tests/op_state_warning.out index 5548dc134e..ce677f0fa8 100644 --- a/ops/optimizer_tests/op_state_warning.out +++ b/ops/optimizer_tests/op_state_warning.out @@ -41,6 +41,7 @@ impl op_listen { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 0usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/op_state_with_transforms.out b/ops/optimizer_tests/op_state_with_transforms.out index ad4e5335a8..4347f63e45 100644 --- a/ops/optimizer_tests/op_state_with_transforms.out +++ b/ops/optimizer_tests/op_state_with_transforms.out @@ -47,6 +47,7 @@ impl op_now { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 1usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/opstate_with_arity.out b/ops/optimizer_tests/opstate_with_arity.out index 037774c255..a1ae081270 100644 --- a/ops/optimizer_tests/opstate_with_arity.out +++ b/ops/optimizer_tests/opstate_with_arity.out @@ -41,6 +41,7 @@ impl op_add_4 { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 4usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/option_arg.out b/ops/optimizer_tests/option_arg.out index 39d47562b8..adfc8da19d 100644 --- a/ops/optimizer_tests/option_arg.out +++ b/ops/optimizer_tests/option_arg.out @@ -31,6 +31,7 @@ impl op_try_close { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 1usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/owned_string.out b/ops/optimizer_tests/owned_string.out index f8b195b2fb..d8c0842ac5 100644 --- a/ops/optimizer_tests/owned_string.out +++ b/ops/optimizer_tests/owned_string.out @@ -41,6 +41,7 @@ impl op_string_length { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 1usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/param_mut_binding_warning.out b/ops/optimizer_tests/param_mut_binding_warning.out index 98dc6b2b91..e99606b377 100644 --- a/ops/optimizer_tests/param_mut_binding_warning.out +++ b/ops/optimizer_tests/param_mut_binding_warning.out @@ -31,6 +31,7 @@ impl op_read_sync { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 2usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/raw_ptr.out b/ops/optimizer_tests/raw_ptr.out index 678ce50152..3eefb5e7f4 100644 --- a/ops/optimizer_tests/raw_ptr.out +++ b/ops/optimizer_tests/raw_ptr.out @@ -52,6 +52,7 @@ impl op_ffi_ptr_of { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 2usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/serde_v8_value.out b/ops/optimizer_tests/serde_v8_value.out index d0f8dacdfc..867d89e43c 100644 --- a/ops/optimizer_tests/serde_v8_value.out +++ b/ops/optimizer_tests/serde_v8_value.out @@ -41,6 +41,7 @@ impl op_is_proxy { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 1usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/strings.out b/ops/optimizer_tests/strings.out index 3238bfc427..523736d70e 100644 --- a/ops/optimizer_tests/strings.out +++ b/ops/optimizer_tests/strings.out @@ -41,6 +41,7 @@ impl op_string_length { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 1usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/strings_result.out b/ops/optimizer_tests/strings_result.out index 8b2e2acef2..aae8b356bc 100644 --- a/ops/optimizer_tests/strings_result.out +++ b/ops/optimizer_tests/strings_result.out @@ -31,6 +31,7 @@ impl op_string_length { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 1usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/u64_result.out b/ops/optimizer_tests/u64_result.out index 02d25686a8..a0d7465125 100644 --- a/ops/optimizer_tests/u64_result.out +++ b/ops/optimizer_tests/u64_result.out @@ -31,6 +31,7 @@ impl op_bench_now { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 0usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/uint8array.out b/ops/optimizer_tests/uint8array.out index 93fa40e1f2..124f2ac576 100644 --- a/ops/optimizer_tests/uint8array.out +++ b/ops/optimizer_tests/uint8array.out @@ -41,6 +41,7 @@ impl op_import_spki_x25519 { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 2usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/unit_result.out b/ops/optimizer_tests/unit_result.out index 354a2e3b94..9a46ee0874 100644 --- a/ops/optimizer_tests/unit_result.out +++ b/ops/optimizer_tests/unit_result.out @@ -41,6 +41,7 @@ impl op_unit_result { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 0usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/unit_result2.out b/ops/optimizer_tests/unit_result2.out index 721229121b..c2e6708a03 100644 --- a/ops/optimizer_tests/unit_result2.out +++ b/ops/optimizer_tests/unit_result2.out @@ -41,6 +41,7 @@ impl op_set_nodelay { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 2usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/unit_ret.out b/ops/optimizer_tests/unit_ret.out index 7d0f63dc88..538674068e 100644 --- a/ops/optimizer_tests/unit_ret.out +++ b/ops/optimizer_tests/unit_ret.out @@ -41,6 +41,7 @@ impl op_unit { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 0usize as u8, } } #[inline] diff --git a/ops/optimizer_tests/wasm_op.out b/ops/optimizer_tests/wasm_op.out index 0196f45481..cc8e3b8472 100644 --- a/ops/optimizer_tests/wasm_op.out +++ b/ops/optimizer_tests/wasm_op.out @@ -41,6 +41,7 @@ impl op_wasm { is_unstable: false, is_v8: false, force_registration: false, + arg_count: 1usize as u8, } } #[inline] From 59825a95b4d3952c955b8b43e174189999e35d15 Mon Sep 17 00:00:00 2001 From: Leo Kettmeir Date: Sun, 30 Apr 2023 11:11:37 +0200 Subject: [PATCH 085/320] refactor: remove ext/console/01_colors.js (#18927) --- cli/js/40_testing.js | 2 +- ext/console/01_colors.js | 108 ------------------- ext/console/{02_console.js => 01_console.js} | 17 ++- ext/console/internal.d.ts | 2 +- ext/console/lib.rs | 2 +- ext/fetch/23_request.js | 2 +- ext/fetch/23_response.js | 2 +- ext/node/polyfills/internal/util/inspect.mjs | 2 +- ext/web/01_dom_exception.js | 2 +- ext/web/02_event.js | 2 +- ext/web/06_streams.js | 2 +- ext/web/09_file.js | 2 +- ext/web/15_performance.js | 2 +- runtime/fmt_errors.rs | 2 +- runtime/js/90_deno_ns.js | 2 +- runtime/js/98_global_scope.js | 2 +- runtime/js/99_main.js | 11 +- 17 files changed, 34 insertions(+), 130 deletions(-) delete mode 100644 ext/console/01_colors.js rename ext/console/{02_console.js => 01_console.js} (99%) diff --git a/cli/js/40_testing.js b/cli/js/40_testing.js index 555f5f1fe7..8afcb74ee2 100644 --- a/cli/js/40_testing.js +++ b/cli/js/40_testing.js @@ -3,7 +3,7 @@ const core = globalThis.Deno.core; const ops = core.ops; import { setExitHandler } from "ext:runtime/30_os.js"; -import { Console } from "ext:deno_console/02_console.js"; +import { Console } from "ext:deno_console/01_console.js"; import { serializePermissions } from "ext:runtime/10_permissions.js"; import { assert } from "ext:deno_web/00_infra.js"; const primordials = globalThis.__bootstrap.primordials; diff --git a/ext/console/01_colors.js b/ext/console/01_colors.js deleted file mode 100644 index a598db9217..0000000000 --- a/ext/console/01_colors.js +++ /dev/null @@ -1,108 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. - -/// - -const primordials = globalThis.__bootstrap.primordials; -const { - SafeRegExp, - StringPrototypeReplace, - ArrayPrototypeJoin, -} = primordials; - -let noColor = false; - -function setNoColor(value) { - noColor = value; -} - -function getNoColor() { - return noColor; -} - -function code(open, close) { - return { - open: `\x1b[${open}m`, - close: `\x1b[${close}m`, - regexp: new SafeRegExp(`\\x1b\\[${close}m`, "g"), - }; -} - -function run(str, code) { - return `${code.open}${ - StringPrototypeReplace(str, code.regexp, code.open) - }${code.close}`; -} - -function bold(str) { - return run(str, code(1, 22)); -} - -function italic(str) { - return run(str, code(3, 23)); -} - -function yellow(str) { - return run(str, code(33, 39)); -} - -function cyan(str) { - return run(str, code(36, 39)); -} - -function red(str) { - return run(str, code(31, 39)); -} - -function green(str) { - return run(str, code(32, 39)); -} - -function bgRed(str) { - return run(str, code(41, 49)); -} - -function white(str) { - return run(str, code(37, 39)); -} - -function gray(str) { - return run(str, code(90, 39)); -} - -function magenta(str) { - return run(str, code(35, 39)); -} - -// https://github.com/chalk/ansi-regex/blob/02fa893d619d3da85411acc8fd4e2eea0e95a9d9/index.js -const ANSI_PATTERN = new SafeRegExp( - ArrayPrototypeJoin([ - "[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)", - "(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-nq-uy=><~]))", - ], "|"), - "g", -); - -function stripColor(string) { - return StringPrototypeReplace(string, ANSI_PATTERN, ""); -} - -function maybeColor(fn) { - return !noColor ? fn : (s) => s; -} - -export { - bgRed, - bold, - cyan, - getNoColor, - gray, - green, - italic, - magenta, - maybeColor, - red, - setNoColor, - stripColor, - white, - yellow, -}; diff --git a/ext/console/02_console.js b/ext/console/01_console.js similarity index 99% rename from ext/console/02_console.js rename to ext/console/01_console.js index 51e8278764..318cf9cb42 100644 --- a/ext/console/02_console.js +++ b/ext/console/01_console.js @@ -119,7 +119,16 @@ const { SafeMapIterator, ArrayBufferPrototype, } = primordials; -import * as colors_ from "ext:deno_console/01_colors.js"; + +let noColor = false; + +function setNoColor(value) { + noColor = value; +} + +function getNoColor() { + return noColor; +} // Don't use 'blue' not visible on cmd.exe const styles = { @@ -3031,7 +3040,7 @@ function inspectArgs(args, inspectOptions = {}) { if (ctx.maxArrayLength === null) ctx.maxArrayLength = Infinity; if (ctx.maxStringLength === null) ctx.maxStringLength = Infinity; - const noColor = colors_.getNoColor(); + const noColor = getNoColor(); const first = args[0]; let a = 0; let string = ""; @@ -3146,7 +3155,7 @@ const timerMap = new SafeMap(); const isConsoleInstance = Symbol("isConsoleInstance"); function getConsoleInspectOptions() { - const color = !colors_.getNoColor(); + const color = !getNoColor(); return { ...getDefaultInspectOptions(), colors: color, @@ -3597,9 +3606,11 @@ export { formatNumber, formatValue, getDefaultInspectOptions, + getNoColor, inspect, inspectArgs, quoteString, + setNoColor, styles, wrapConsole, }; diff --git a/ext/console/internal.d.ts b/ext/console/internal.d.ts index d344f3a777..1fbc893785 100644 --- a/ext/console/internal.d.ts +++ b/ext/console/internal.d.ts @@ -3,7 +3,7 @@ /// /// -declare module "ext:deno_console/02_console.js" { +declare module "ext:deno_console/01_console.js" { function createFilteredInspectProxy(params: { object: TObject; keys: (keyof TObject)[]; diff --git a/ext/console/lib.rs b/ext/console/lib.rs index a45b856cd9..a31470e080 100644 --- a/ext/console/lib.rs +++ b/ext/console/lib.rs @@ -1,7 +1,7 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. use std::path::PathBuf; -deno_core::extension!(deno_console, esm = ["01_colors.js", "02_console.js"],); +deno_core::extension!(deno_console, esm = ["01_console.js"],); pub fn get_declaration() -> PathBuf { PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("lib.deno_console.d.ts") diff --git a/ext/fetch/23_request.js b/ext/fetch/23_request.js index ae3edffd47..22c1f83544 100644 --- a/ext/fetch/23_request.js +++ b/ext/fetch/23_request.js @@ -10,7 +10,7 @@ /// import * as webidl from "ext:deno_webidl/00_webidl.js"; -import { createFilteredInspectProxy } from "ext:deno_console/02_console.js"; +import { createFilteredInspectProxy } from "ext:deno_console/01_console.js"; import { byteUpperCase, HTTP_TOKEN_CODE_POINT_RE, diff --git a/ext/fetch/23_response.js b/ext/fetch/23_response.js index ffbfe49360..86799252b2 100644 --- a/ext/fetch/23_response.js +++ b/ext/fetch/23_response.js @@ -12,7 +12,7 @@ const core = globalThis.Deno.core; import * as webidl from "ext:deno_webidl/00_webidl.js"; -import { createFilteredInspectProxy } from "ext:deno_console/02_console.js"; +import { createFilteredInspectProxy } from "ext:deno_console/01_console.js"; import { byteLowerCase, HTTP_TAB_OR_SPACE, diff --git a/ext/node/polyfills/internal/util/inspect.mjs b/ext/node/polyfills/internal/util/inspect.mjs index cdaa3db816..671ab2acf4 100644 --- a/ext/node/polyfills/internal/util/inspect.mjs +++ b/ext/node/polyfills/internal/util/inspect.mjs @@ -22,7 +22,7 @@ import { validateObject, validateString } from "ext:deno_node/internal/validators.mjs"; import { codes } from "ext:deno_node/internal/error_codes.ts"; -import { createStylizeWithColor, formatValue, formatNumber, formatBigInt, styles, colors } from "ext:deno_console/02_console.js"; +import { createStylizeWithColor, formatValue, formatNumber, formatBigInt, styles, colors } from "ext:deno_console/01_console.js"; diff --git a/ext/web/01_dom_exception.js b/ext/web/01_dom_exception.js index ae3dcfd2e2..c465a06fdc 100644 --- a/ext/web/01_dom_exception.js +++ b/ext/web/01_dom_exception.js @@ -21,7 +21,7 @@ const { SymbolFor, } = primordials; import * as webidl from "ext:deno_webidl/00_webidl.js"; -import { createFilteredInspectProxy } from "ext:deno_console/02_console.js"; +import { createFilteredInspectProxy } from "ext:deno_console/01_console.js"; const _name = Symbol("name"); const _message = Symbol("message"); diff --git a/ext/web/02_event.js b/ext/web/02_event.js index 34b3502a7c..e453474206 100644 --- a/ext/web/02_event.js +++ b/ext/web/02_event.js @@ -9,7 +9,7 @@ const core = globalThis.Deno.core; const ops = core.ops; import * as webidl from "ext:deno_webidl/00_webidl.js"; import DOMException from "ext:deno_web/01_dom_exception.js"; -import { createFilteredInspectProxy } from "ext:deno_console/02_console.js"; +import { createFilteredInspectProxy } from "ext:deno_console/01_console.js"; const primordials = globalThis.__bootstrap.primordials; const { ArrayPrototypeFilter, diff --git a/ext/web/06_streams.js b/ext/web/06_streams.js index ac626a209d..c8a7b9c47e 100644 --- a/ext/web/06_streams.js +++ b/ext/web/06_streams.js @@ -78,7 +78,7 @@ const { WeakMapPrototypeHas, WeakMapPrototypeSet, } = primordials; -import { createFilteredInspectProxy } from "ext:deno_console/02_console.js"; +import { createFilteredInspectProxy } from "ext:deno_console/01_console.js"; import { assert, AssertionError } from "ext:deno_web/00_infra.js"; /** @template T */ diff --git a/ext/web/09_file.js b/ext/web/09_file.js index dccb206112..8f0072e05b 100644 --- a/ext/web/09_file.js +++ b/ext/web/09_file.js @@ -48,7 +48,7 @@ const { TypeError, Uint8Array, } = primordials; -import { createFilteredInspectProxy } from "ext:deno_console/02_console.js"; +import { createFilteredInspectProxy } from "ext:deno_console/01_console.js"; // TODO(lucacasonato): this needs to not be hardcoded and instead depend on // host os. diff --git a/ext/web/15_performance.js b/ext/web/15_performance.js index 85990c954d..d494a5328b 100644 --- a/ext/web/15_performance.js +++ b/ext/web/15_performance.js @@ -16,7 +16,7 @@ const { } = primordials; import * as webidl from "ext:deno_webidl/00_webidl.js"; import { structuredClone } from "ext:deno_web/02_structured_clone.js"; -import { createFilteredInspectProxy } from "ext:deno_console/02_console.js"; +import { createFilteredInspectProxy } from "ext:deno_console/01_console.js"; import { EventTarget } from "ext:deno_web/02_event.js"; import { opNow } from "ext:deno_web/02_timers.js"; import DOMException from "ext:deno_web/01_dom_exception.js"; diff --git a/runtime/fmt_errors.rs b/runtime/fmt_errors.rs index 6852cbcd1e..5dcb96ec77 100644 --- a/runtime/fmt_errors.rs +++ b/runtime/fmt_errors.rs @@ -12,7 +12,7 @@ use std::fmt::Write as _; /// Compares all properties of JsError, except for JsError::cause. /// This function is used to detect that 2 JsError objects in a JsError::cause /// chain are identical, ie. there is a recursive cause. -/// 02_console.js, which also detects recursive causes, can use JS object +/// 01_console.js, which also detects recursive causes, can use JS object /// comparisons to compare errors. We don't have access to JS object identity in /// format_js_error(). fn errors_are_equal_without_cause(a: &JsError, b: &JsError) -> bool { diff --git a/runtime/js/90_deno_ns.js b/runtime/js/90_deno_ns.js index bb6ba3b08d..3507a98a3d 100644 --- a/runtime/js/90_deno_ns.js +++ b/runtime/js/90_deno_ns.js @@ -4,7 +4,7 @@ const core = globalThis.Deno.core; const ops = core.ops; import * as timers from "ext:deno_web/02_timers.js"; import * as httpClient from "ext:deno_fetch/22_http_client.js"; -import * as console from "ext:deno_console/02_console.js"; +import * as console from "ext:deno_console/01_console.js"; import * as ffi from "ext:deno_ffi/00_ffi.js"; import * as net from "ext:deno_net/01_net.js"; import * as tls from "ext:deno_net/02_tls.js"; diff --git a/runtime/js/98_global_scope.js b/runtime/js/98_global_scope.js index 911db65cb8..1084f5c248 100644 --- a/runtime/js/98_global_scope.js +++ b/runtime/js/98_global_scope.js @@ -13,7 +13,7 @@ import * as event from "ext:deno_web/02_event.js"; import * as timers from "ext:deno_web/02_timers.js"; import * as base64 from "ext:deno_web/05_base64.js"; import * as encoding from "ext:deno_web/08_text_encoding.js"; -import * as console from "ext:deno_console/02_console.js"; +import * as console from "ext:deno_console/01_console.js"; import * as caches from "ext:deno_cache/01_cache.js"; import * as compression from "ext:deno_web/14_compression.js"; import * as worker from "ext:runtime/11_workers.js"; diff --git a/runtime/js/99_main.js b/runtime/js/99_main.js index b6dab121c4..0ed692f3c9 100644 --- a/runtime/js/99_main.js +++ b/runtime/js/99_main.js @@ -44,13 +44,14 @@ import * as location from "ext:deno_web/12_location.js"; import * as version from "ext:runtime/01_version.ts"; import * as os from "ext:runtime/30_os.js"; import * as timers from "ext:deno_web/02_timers.js"; -import * as colors from "ext:deno_console/01_colors.js"; import { getDefaultInspectOptions, + getNoColor, inspectArgs, quoteString, + setNoColor, wrapConsole, -} from "ext:deno_console/02_console.js"; +} from "ext:deno_console/01_console.js"; import * as performance from "ext:deno_web/15_performance.js"; import * as url from "ext:deno_url/00_url.js"; import * as fetch from "ext:deno_fetch/26_fetch.js"; @@ -220,11 +221,11 @@ function formatException(error) { } else if (typeof error == "string") { return `Uncaught ${ inspectArgs([quoteString(error, getDefaultInspectOptions())], { - colors: !colors.getNoColor(), + colors: !getNoColor(), }) }`; } else { - return `Uncaught ${inspectArgs([error], { colors: !colors.getNoColor() })}`; + return `Uncaught ${inspectArgs([error], { colors: !getNoColor() })}`; } } @@ -313,7 +314,7 @@ function runtimeStart( ); core.setBuildInfo(target); util.setLogDebug(debugFlag, source); - colors.setNoColor(noColor || !isTty); + setNoColor(noColor || !isTty); // deno-lint-ignore prefer-primordials Error.prepareStackTrace = core.prepareStackTrace; } From 74bb09aa38e37603caf81152b48652a0d0972ccd Mon Sep 17 00:00:00 2001 From: Kenta Moriuchi Date: Sun, 30 Apr 2023 19:24:34 +0900 Subject: [PATCH 086/320] fix(ext/url): throw `TypeError` for empty argument (#18896) Fixes #18893 --- cli/tests/unit/url_test.ts | 15 +++++++++++++++ ext/url/00_url.js | 2 ++ 2 files changed, 17 insertions(+) diff --git a/cli/tests/unit/url_test.ts b/cli/tests/unit/url_test.ts index 644b8dd39a..28cf9a0e2c 100644 --- a/cli/tests/unit/url_test.ts +++ b/cli/tests/unit/url_test.ts @@ -32,6 +32,21 @@ Deno.test(function urlParsing() { ); }); +Deno.test(function emptyUrl() { + assertThrows( + // @ts-ignore for test + () => new URL(), + TypeError, + "1 argument required, but only 0 present", + ); + assertThrows( + // @ts-ignore for test + () => URL.canParse(), + TypeError, + "1 argument required, but only 0 present", + ); +}); + Deno.test(function urlProtocolParsing() { assertEquals(new URL("Aa+-.1://foo").protocol, "aa+-.1:"); assertEquals(new URL("aA+-.1://foo").protocol, "aa+-.1:"); diff --git a/ext/url/00_url.js b/ext/url/00_url.js index d76366cfae..7f674e9aa2 100644 --- a/ext/url/00_url.js +++ b/ext/url/00_url.js @@ -371,6 +371,7 @@ class URL { */ constructor(url, base = undefined) { const prefix = "Failed to construct 'URL'"; + webidl.requiredArguments(arguments.length, 1, prefix); url = webidl.converters.DOMString(url, { prefix, context: "Argument 1" }); if (base !== undefined) { base = webidl.converters.DOMString(base, { @@ -390,6 +391,7 @@ class URL { */ static canParse(url, base = undefined) { const prefix = "Failed to call 'URL.canParse'"; + webidl.requiredArguments(arguments.length, 1, prefix); url = webidl.converters.DOMString(url, { prefix, context: "Argument 1" }); if (base !== undefined) { base = webidl.converters.DOMString(base, { From 314799bc4f1ee42d6568c1a6a3578fbd3f9596c7 Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Sun, 30 Apr 2023 15:54:56 +0530 Subject: [PATCH 087/320] perf(ext/websocket): use internal dispatch for msg events (#18904) ``` Linux divy-2 5.19.0-1022-gcp #24~22.04.1-Ubuntu SMP Sun Apr 23 09:51:08 UTC 2023 x86_64 x86_64 x86_64 GNU/Linux 32GiB System memory Intel(R) Xeon(R) CPU @ 3.10GHz # main Msg/sec: 89398.250000 Msg/sec: 90079.750000 # this patch Msg/sec: 91919.750000 Msg/sec: 91762.250000 ``` --- ext/web/02_event.js | 1 + ext/websocket/01_websocket.js | 5 +++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/ext/web/02_event.js b/ext/web/02_event.js index e453474206..6a5db3464c 100644 --- a/ext/web/02_event.js +++ b/ext/web/02_event.js @@ -1527,6 +1527,7 @@ export { CloseEvent, CustomEvent, defineEventHandler, + dispatch, ErrorEvent, Event, EventTarget, diff --git a/ext/websocket/01_websocket.js b/ext/websocket/01_websocket.js index 7b23df80f0..7d46fdf2a0 100644 --- a/ext/websocket/01_websocket.js +++ b/ext/websocket/01_websocket.js @@ -15,6 +15,7 @@ import { _skipInternalInit, CloseEvent, defineEventHandler, + dispatch, ErrorEvent, Event, EventTarget, @@ -451,7 +452,7 @@ class WebSocket extends EventTarget { data: value, origin: this[_url], }); - this.dispatchEvent(event); + dispatch(this, event); break; } case 1: { @@ -470,7 +471,7 @@ class WebSocket extends EventTarget { origin: this[_url], [_skipInternalInit]: true, }); - this.dispatchEvent(event); + dispatch(this, event); break; } case 2: { From 96e214d9d0a18a77256f3fcce755dec03fdb49b5 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Sun, 30 Apr 2023 10:03:39 -0400 Subject: [PATCH 088/320] fix(fmt/json): support formatting number with exponent and no sign (#18894) Numbers with an exponent and no sign (ex. `1e1`) would fail to parse. --- .dprint.json | 4 ++-- Cargo.lock | 8 ++++---- cli/Cargo.toml | 4 ++-- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.dprint.json b/.dprint.json index d20b1673ba..b8af9469f4 100644 --- a/.dprint.json +++ b/.dprint.json @@ -51,8 +51,8 @@ "tools/wpt/manifest.json" ], "plugins": [ - "https://plugins.dprint.dev/typescript-0.84.0.wasm", - "https://plugins.dprint.dev/json-0.17.0.wasm", + "https://plugins.dprint.dev/typescript-0.84.2.wasm", + "https://plugins.dprint.dev/json-0.17.2.wasm", "https://plugins.dprint.dev/markdown-0.15.2.wasm", "https://plugins.dprint.dev/toml-0.5.4.wasm", "https://plugins.dprint.dev/exec-0.3.5.json@d687dda57be0fe9a0088ccdaefa5147649ff24127d8b3ea227536c68ee7abeab" diff --git a/Cargo.lock b/Cargo.lock index 93c7651ec9..99a51ffc47 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1534,9 +1534,9 @@ dependencies = [ [[package]] name = "dprint-plugin-json" -version = "0.17.0" +version = "0.17.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6120aa5613816db2ef2ef539c229c24f4ee3dbba15317242bcf0de5f17a0060" +checksum = "602d5b9cc4657bdf06d27fa6c22dfcfdb85a19ab555f2834cf7b01ba8001a1f6" dependencies = [ "anyhow", "dprint-core", @@ -2596,9 +2596,9 @@ dependencies = [ [[package]] name = "jsonc-parser" -version = "0.21.0" +version = "0.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a1853e40333206f9a685358046d13ab200169e3ee573019bddf0ede0dc29307" +checksum = "7b56a20e76235284255a09fcd1f45cf55d3c524ea657ebd3854735925c57743d" dependencies = [ "serde_json", ] diff --git a/cli/Cargo.toml b/cli/Cargo.toml index ac3c840e2a..319c8cb56c 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -65,7 +65,7 @@ clap_complete_fig = "=4.1.2" console_static_text.workspace = true data-url.workspace = true dissimilar = "=1.0.4" -dprint-plugin-json = "=0.17.0" +dprint-plugin-json = "=0.17.2" dprint-plugin-markdown = "=0.15.2" dprint-plugin-typescript = "=0.84.2" encoding_rs.workspace = true @@ -79,7 +79,7 @@ http.workspace = true hyper.workspace = true import_map = "=0.15.0" indexmap.workspace = true -jsonc-parser = { version = "=0.21.0", features = ["serde"] } +jsonc-parser = { version = "=0.21.1", features = ["serde"] } lazy-regex.workspace = true libc.workspace = true log = { workspace = true, features = ["serde"] } From 9a9473533e70987e922ddd5ff8da915815e37d03 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Sun, 30 Apr 2023 16:51:31 -0400 Subject: [PATCH 089/320] refactor(cli): remove `Clone` on `ProcState` (#18874) Slowly phasing this out. --- cli/lsp/testing/execution.rs | 6 ++-- cli/proc_state.rs | 61 +++++-------------------------- cli/tools/bench.rs | 38 ++++++++++---------- cli/tools/repl/mod.rs | 2 +- cli/tools/run.rs | 18 +++++----- cli/tools/test.rs | 69 +++++++++++++++++++++--------------- 6 files changed, 84 insertions(+), 110 deletions(-) diff --git a/cli/lsp/testing/execution.rs b/cli/lsp/testing/execution.rs index b7859ebda4..5dfb310137 100644 --- a/cli/lsp/testing/execution.rs +++ b/cli/lsp/testing/execution.rs @@ -225,7 +225,9 @@ impl TestRun { let permissions = Permissions::from_options(&ps.options.permissions_options())?; test::check_specifiers( - &ps, + &ps.options, + &ps.file_fetcher, + &ps.module_load_preparer, self .queue .iter() @@ -257,7 +259,7 @@ impl TestRun { let tests: Arc>> = Arc::new(RwLock::new(IndexMap::new())); let mut test_steps = IndexMap::new(); - let worker_factory = Arc::new(ps.into_cli_main_worker_factory()); + let worker_factory = Arc::new(ps.create_cli_main_worker_factory()); let join_handles = queue.into_iter().map(move |specifier| { let specifier = specifier.clone(); diff --git a/cli/proc_state.rs b/cli/proc_state.rs index bb8fd9c3e7..321bf45c3f 100644 --- a/cli/proc_state.rs +++ b/cli/proc_state.rs @@ -49,17 +49,13 @@ use deno_semver::npm::NpmPackageReqReference; use import_map::ImportMap; use log::warn; use std::collections::HashSet; -use std::ops::Deref; use std::path::PathBuf; use std::sync::Arc; -/// This structure represents state of single "deno" program. -/// -/// It is shared by all created workers (thus V8 isolates). -#[derive(Clone)] -pub struct ProcState(Arc); - -pub struct Inner { +/// This structure used to represent state of single "deno" program +/// that was shared by all created workers. It morphed into being the +/// "factory" for all objects, but is being slowly phased out. +pub struct ProcState { pub dir: DenoDir, pub caches: Arc, pub file_fetcher: Arc, @@ -87,14 +83,6 @@ pub struct Inner { pub npm_resolution: Arc, pub package_json_deps_installer: Arc, pub cjs_resolutions: Arc, - progress_bar: ProgressBar, -} - -impl Deref for ProcState { - type Target = Arc; - fn deref(&self) -> &Self::Target { - &self.0 - } } impl ProcState { @@ -123,48 +111,18 @@ impl ProcState { /// Reset all runtime state to its default. This should be used on file /// watcher restarts. - pub fn reset_for_file_watcher(&mut self) { + pub fn reset_for_file_watcher(&self) { self.cjs_resolutions.clear(); self.parsed_source_cache.clear(); self.graph_container.clear(); - self.0 = Arc::new(Inner { - dir: self.dir.clone(), - caches: self.caches.clone(), - options: self.options.clone(), - emit_cache: self.emit_cache.clone(), - emitter: self.emitter.clone(), - file_fetcher: self.file_fetcher.clone(), - http_client: self.http_client.clone(), - graph_container: self.graph_container.clone(), - lockfile: self.lockfile.clone(), - maybe_import_map: self.maybe_import_map.clone(), - maybe_inspector_server: self.maybe_inspector_server.clone(), - root_cert_store: self.root_cert_store.clone(), - blob_store: self.blob_store.clone(), - parsed_source_cache: self.parsed_source_cache.clone(), - resolver: self.resolver.clone(), - maybe_file_watcher_reporter: self.maybe_file_watcher_reporter.clone(), - module_graph_builder: self.module_graph_builder.clone(), - module_load_preparer: self.module_load_preparer.clone(), - node_code_translator: self.node_code_translator.clone(), - node_fs: self.node_fs.clone(), - node_resolver: self.node_resolver.clone(), - npm_api: self.npm_api.clone(), - npm_cache: self.npm_cache.clone(), - npm_resolver: self.npm_resolver.clone(), - npm_resolution: self.npm_resolution.clone(), - package_json_deps_installer: self.package_json_deps_installer.clone(), - cjs_resolutions: self.cjs_resolutions.clone(), - progress_bar: self.progress_bar.clone(), - }); self.init_watcher(); } // Add invariant files like the import map and explicit watch flag list to // the watcher. Dedup for build_for_file_watcher and reset_for_file_watcher. fn init_watcher(&self) { - let files_to_watch_sender = match &self.0.maybe_file_watcher_reporter { + let files_to_watch_sender = match &self.maybe_file_watcher_reporter { Some(reporter) => &reporter.sender, None => return, }; @@ -338,7 +296,7 @@ impl ProcState { type_checker, )); - Ok(ProcState(Arc::new(Inner { + Ok(ProcState { dir, caches, options: cli_options, @@ -366,13 +324,12 @@ impl ProcState { package_json_deps_installer, cjs_resolutions: Default::default(), module_load_preparer, - progress_bar, - }))) + }) } // todo(dsherret): this is a transitory method as we separate out // ProcState from more code - pub fn into_cli_main_worker_factory(self) -> CliMainWorkerFactory { + pub fn create_cli_main_worker_factory(&self) -> CliMainWorkerFactory { CliMainWorkerFactory::new( StorageKeyResolver::from_options(&self.options), self.npm_resolver.clone(), diff --git a/cli/tools/bench.rs b/cli/tools/bench.rs index 88e19dd701..3f606cfa93 100644 --- a/cli/tools/bench.rs +++ b/cli/tools/bench.rs @@ -6,6 +6,7 @@ use crate::args::TypeCheckMode; use crate::colors; use crate::display::write_json_to_stdout; use crate::graph_util::graph_valid_with_cli_options; +use crate::module_loader::ModuleLoadPreparer; use crate::ops; use crate::proc_state::ProcState; use crate::tools::test::format_test_error; @@ -36,7 +37,6 @@ use indexmap::IndexSet; use log::Level; use serde::Deserialize; use serde::Serialize; -use std::cell::RefCell; use std::collections::HashSet; use std::path::Path; use std::path::PathBuf; @@ -418,11 +418,12 @@ impl BenchReporter for ConsoleReporter { /// Type check a collection of module and document specifiers. async fn check_specifiers( - ps: &ProcState, + cli_options: &CliOptions, + module_load_preparer: &ModuleLoadPreparer, specifiers: Vec, ) -> Result<(), AnyError> { - let lib = ps.options.ts_type_lib_window(); - ps.module_load_preparer + let lib = cli_options.ts_type_lib_window(); + module_load_preparer .prepare_module_load( specifiers, false, @@ -648,14 +649,15 @@ pub async fn run_benchmarks( return Err(generic_error("No bench modules found")); } - check_specifiers(&ps, specifiers.clone()).await?; + check_specifiers(&ps.options, &ps.module_load_preparer, specifiers.clone()) + .await?; if bench_options.no_run { return Ok(()); } let log_level = ps.options.log_level(); - let worker_factory = Arc::new(ps.into_cli_main_worker_factory()); + let worker_factory = Arc::new(ps.create_cli_main_worker_factory()); bench_specifiers( worker_factory, &permissions, @@ -684,14 +686,13 @@ pub async fn run_benchmarks_with_watch( Permissions::from_options(&ps.options.permissions_options())?; let no_check = ps.options.type_check_mode() == TypeCheckMode::None; - let ps = RefCell::new(ps); - let resolver = |changed: Option>| { let paths_to_watch = bench_options.files.include.clone(); let paths_to_watch_clone = paths_to_watch.clone(); let files_changed = changed.is_some(); let bench_options = &bench_options; - let ps = ps.borrow().clone(); + let module_graph_builder = ps.module_graph_builder.clone(); + let cli_options = ps.options.clone(); async move { let bench_modules = @@ -703,11 +704,10 @@ pub async fn run_benchmarks_with_watch( } else { bench_modules.clone() }; - let graph = ps - .module_graph_builder + let graph = module_graph_builder .create_graph(bench_modules.clone()) .await?; - graph_valid_with_cli_options(&graph, &bench_modules, &ps.options)?; + graph_valid_with_cli_options(&graph, &bench_modules, &cli_options)?; // TODO(@kitsonk) - This should be totally derivable from the graph. for specifier in bench_modules { @@ -800,8 +800,10 @@ pub async fn run_benchmarks_with_watch( let operation = |modules_to_reload: Vec| { let permissions = &permissions; let bench_options = &bench_options; - ps.borrow_mut().reset_for_file_watcher(); - let ps = ps.borrow().clone(); + ps.reset_for_file_watcher(); + let module_load_preparer = ps.module_load_preparer.clone(); + let cli_options = ps.options.clone(); + let worker_factory = Arc::new(ps.create_cli_main_worker_factory()); async move { let specifiers = @@ -810,14 +812,14 @@ pub async fn run_benchmarks_with_watch( .filter(|specifier| modules_to_reload.contains(specifier)) .collect::>(); - check_specifiers(&ps, specifiers.clone()).await?; + check_specifiers(&cli_options, &module_load_preparer, specifiers.clone()) + .await?; if bench_options.no_run { return Ok(()); } - let log_level = ps.options.log_level(); - let worker_factory = Arc::new(ps.into_cli_main_worker_factory()); + let log_level = cli_options.log_level(); bench_specifiers( worker_factory, permissions, @@ -834,7 +836,7 @@ pub async fn run_benchmarks_with_watch( } }; - let clear_screen = !ps.borrow().options.no_clear_screen(); + let clear_screen = !ps.options.no_clear_screen(); file_watcher::watch_func( resolver, operation, diff --git a/cli/tools/repl/mod.rs b/cli/tools/repl/mod.rs index 59b79ce86c..f0faf74ec1 100644 --- a/cli/tools/repl/mod.rs +++ b/cli/tools/repl/mod.rs @@ -108,7 +108,7 @@ pub async fn run(flags: Flags, repl_flags: ReplFlags) -> Result { let resolver = ps.resolver.clone(); let dir = ps.dir.clone(); let file_fetcher = ps.file_fetcher.clone(); - let worker_factory = ps.into_cli_main_worker_factory(); + let worker_factory = ps.create_cli_main_worker_factory(); let mut worker = worker_factory .create_main_worker(main_module, permissions) diff --git a/cli/tools/run.rs b/cli/tools/run.rs index 6515ebde69..e1dc529bc2 100644 --- a/cli/tools/run.rs +++ b/cli/tools/run.rs @@ -47,7 +47,7 @@ To grant permissions, set them before the script argument. For example: let permissions = PermissionsContainer::new(Permissions::from_options( &ps.options.permissions_options(), )?); - let worker_factory = ps.into_cli_main_worker_factory(); + let worker_factory = ps.create_cli_main_worker_factory(); let mut worker = worker_factory .create_main_worker(main_module, permissions) .await?; @@ -78,7 +78,7 @@ pub async fn run_from_stdin(flags: Flags) -> Result { // to allow module access by TS compiler ps.file_fetcher.insert_cached(source_file); - let worker_factory = ps.into_cli_main_worker_factory(); + let worker_factory = ps.create_cli_main_worker_factory(); let mut worker = worker_factory .create_main_worker(main_module, permissions) .await?; @@ -90,19 +90,19 @@ pub async fn run_from_stdin(flags: Flags) -> Result { // code properly. async fn run_with_watch(flags: Flags) -> Result { let (sender, receiver) = tokio::sync::mpsc::unbounded_channel(); - let mut ps = + let ps = ProcState::from_flags_for_file_watcher(flags, sender.clone()).await?; let clear_screen = !ps.options.no_clear_screen(); let main_module = ps.options.resolve_main_module()?; let operation = |main_module: ModuleSpecifier| { ps.reset_for_file_watcher(); - let ps = ps.clone(); + let permissions = PermissionsContainer::new(Permissions::from_options( + &ps.options.permissions_options(), + )?); + let worker_factory = ps.create_cli_main_worker_factory(); + Ok(async move { - let permissions = PermissionsContainer::new(Permissions::from_options( - &ps.options.permissions_options(), - )?); - let worker_factory = ps.into_cli_main_worker_factory(); let worker = worker_factory .create_main_worker(main_module, permissions) .await?; @@ -157,7 +157,7 @@ pub async fn eval_command( ps.file_fetcher.insert_cached(file); let mut worker = ps - .into_cli_main_worker_factory() + .create_cli_main_worker_factory() .create_main_worker(main_module, permissions) .await?; let exit_code = worker.run().await?; diff --git a/cli/tools/test.rs b/cli/tools/test.rs index 0bdcb88606..429bee71b9 100644 --- a/cli/tools/test.rs +++ b/cli/tools/test.rs @@ -7,7 +7,9 @@ use crate::args::TypeCheckMode; use crate::colors; use crate::display; use crate::file_fetcher::File; +use crate::file_fetcher::FileFetcher; use crate::graph_util::graph_valid_with_cli_options; +use crate::module_loader::ModuleLoadPreparer; use crate::ops; use crate::proc_state::ProcState; use crate::util::checksum; @@ -49,7 +51,6 @@ use rand::seq::SliceRandom; use rand::SeedableRng; use regex::Regex; use serde::Deserialize; -use std::cell::RefCell; use std::collections::BTreeMap; use std::collections::BTreeSet; use std::collections::HashMap; @@ -1200,13 +1201,13 @@ fn extract_files_from_fenced_blocks( } async fn fetch_inline_files( - ps: &ProcState, + file_fetcher: &FileFetcher, specifiers: Vec, ) -> Result, AnyError> { let mut files = Vec::new(); for specifier in specifiers { let fetch_permissions = PermissionsContainer::allow_all(); - let file = ps.file_fetcher.fetch(&specifier, fetch_permissions).await?; + let file = file_fetcher.fetch(&specifier, fetch_permissions).await?; let inline_files = if file.media_type == MediaType::Unknown { extract_files_from_fenced_blocks( @@ -1230,12 +1231,14 @@ async fn fetch_inline_files( /// Type check a collection of module and document specifiers. pub async fn check_specifiers( - ps: &ProcState, + cli_options: &CliOptions, + file_fetcher: &FileFetcher, + module_load_preparer: &ModuleLoadPreparer, specifiers: Vec<(ModuleSpecifier, TestMode)>, ) -> Result<(), AnyError> { - let lib = ps.options.ts_type_lib_window(); + let lib = cli_options.ts_type_lib_window(); let inline_files = fetch_inline_files( - ps, + file_fetcher, specifiers .iter() .filter_map(|(specifier, mode)| { @@ -1256,10 +1259,10 @@ pub async fn check_specifiers( .collect(); for file in inline_files { - ps.file_fetcher.insert_cached(file); + file_fetcher.insert_cached(file); } - ps.module_load_preparer + module_load_preparer .prepare_module_load( specifiers, false, @@ -1280,7 +1283,7 @@ pub async fn check_specifiers( }) .collect(); - ps.module_load_preparer + module_load_preparer .prepare_module_load( module_specifiers, false, @@ -1601,15 +1604,14 @@ fn collect_specifiers_with_test_mode( /// cannot be run, and therefore need to be marked as `TestMode::Documentation` /// as well. async fn fetch_specifiers_with_test_mode( - ps: &ProcState, + file_fetcher: &FileFetcher, files: &FilesConfig, doc: &bool, ) -> Result, AnyError> { let mut specifiers_with_mode = collect_specifiers_with_test_mode(files, doc)?; for (specifier, mode) in &mut specifiers_with_mode { - let file = ps - .file_fetcher + let file = file_fetcher .fetch(specifier, PermissionsContainer::allow_all()) .await?; @@ -1636,7 +1638,7 @@ pub async fn run_tests( let log_level = ps.options.log_level(); let specifiers_with_mode = fetch_specifiers_with_test_mode( - &ps, + &ps.file_fetcher, &test_options.files, &test_options.doc, ) @@ -1646,13 +1648,19 @@ pub async fn run_tests( return Err(generic_error("No test modules found")); } - check_specifiers(&ps, specifiers_with_mode.clone()).await?; + check_specifiers( + &ps.options, + &ps.file_fetcher, + &ps.module_load_preparer, + specifiers_with_mode.clone(), + ) + .await?; if test_options.no_run { return Ok(()); } - let worker_factory = Arc::new(ps.into_cli_main_worker_factory()); + let worker_factory = Arc::new(ps.create_cli_main_worker_factory()); test_specifiers( worker_factory, @@ -1693,14 +1701,13 @@ pub async fn run_tests_with_watch( let no_check = ps.options.type_check_mode() == TypeCheckMode::None; let log_level = ps.options.log_level(); - let ps = RefCell::new(ps); - let resolver = |changed: Option>| { let paths_to_watch = test_options.files.include.clone(); let paths_to_watch_clone = paths_to_watch.clone(); let files_changed = changed.is_some(); let test_options = &test_options; - let ps = ps.borrow().clone(); + let cli_options = ps.options.clone(); + let module_graph_builder = ps.module_graph_builder.clone(); async move { let test_modules = if test_options.doc { @@ -1715,11 +1722,10 @@ pub async fn run_tests_with_watch( } else { test_modules.clone() }; - let graph = ps - .module_graph_builder + let graph = module_graph_builder .create_graph(test_modules.clone()) .await?; - graph_valid_with_cli_options(&graph, &test_modules, &ps.options)?; + graph_valid_with_cli_options(&graph, &test_modules, &cli_options)?; // TODO(@kitsonk) - This should be totally derivable from the graph. for specifier in test_modules { @@ -1812,12 +1818,15 @@ pub async fn run_tests_with_watch( let operation = |modules_to_reload: Vec| { let permissions = &permissions; let test_options = &test_options; - ps.borrow_mut().reset_for_file_watcher(); - let ps = ps.borrow().clone(); + ps.reset_for_file_watcher(); + let cli_options = ps.options.clone(); + let file_fetcher = ps.file_fetcher.clone(); + let module_load_preparer = ps.module_load_preparer.clone(); + let worker_factory = Arc::new(ps.create_cli_main_worker_factory()); async move { let specifiers_with_mode = fetch_specifiers_with_test_mode( - &ps, + &file_fetcher, &test_options.files, &test_options.doc, ) @@ -1826,14 +1835,18 @@ pub async fn run_tests_with_watch( .filter(|(specifier, _)| modules_to_reload.contains(specifier)) .collect::>(); - check_specifiers(&ps, specifiers_with_mode.clone()).await?; + check_specifiers( + &cli_options, + &file_fetcher, + &module_load_preparer, + specifiers_with_mode.clone(), + ) + .await?; if test_options.no_run { return Ok(()); } - let worker_factory = Arc::new(ps.into_cli_main_worker_factory()); - test_specifiers( worker_factory, permissions, @@ -1874,7 +1887,7 @@ pub async fn run_tests_with_watch( } }); - let clear_screen = !ps.borrow().options.no_clear_screen(); + let clear_screen = !ps.options.no_clear_screen(); file_watcher::watch_func( resolver, operation, From d856bfd336137e1bcf81a0db9e8ad2b418ba711e Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Mon, 1 May 2023 10:39:30 +0530 Subject: [PATCH 090/320] perf(ext/web): fast path for ws events (#18905) - Do not use `ReflectHas` in `isNode`. - Avoid copying handler array when handlers.length == 1 - Avoid searching for path target when path.length == 1 ``` Linux divy-2 5.19.0-1022-gcp #24~22.04.1-Ubuntu SMP Sun Apr 23 09:51:08 UTC 2023 x86_64 x86_64 x86_64 GNU/Linux 32GiB System memory Intel(R) Xeon(R) CPU @ 3.10GHz # main + https://github.com/denoland/deno/pull/18904 Msg/sec: 89326.750000 Msg/sec: 90320.000000 Msg/sec: 89576.250000 # this patch Msg/sec: 97250.000000 Msg/sec: 97125.500000 Msg/sec: 97964.500000 ``` --- ...event_listener_error_immediate_exit.ts.out | 4 ---- ...istener_error_immediate_exit_worker.ts.out | 6 +---- ext/web/02_event.js | 23 ++++++++++++++----- 3 files changed, 18 insertions(+), 15 deletions(-) diff --git a/cli/tests/testdata/run/event_listener_error_immediate_exit.ts.out b/cli/tests/testdata/run/event_listener_error_immediate_exit.ts.out index 2b1b264449..8f03f71b81 100644 --- a/cli/tests/testdata/run/event_listener_error_immediate_exit.ts.out +++ b/cli/tests/testdata/run/event_listener_error_immediate_exit.ts.out @@ -3,8 +3,4 @@ error: Uncaught Error: bar throw new Error("bar"); ^ at [WILDCARD]/event_listener_error_immediate_exit.ts:4:9[WILDCARD] - at innerInvokeEventListeners (ext:deno_web/02_event.js:785:7) - at invokeEventListeners (ext:deno_web/02_event.js:825:5) - at dispatch (ext:deno_web/02_event.js:694:9) - at dispatchEvent (ext:deno_web/02_event.js:1086:12) at [WILDCARD]/event_listener_error_immediate_exit.ts:11:1 diff --git a/cli/tests/testdata/run/event_listener_error_immediate_exit_worker.ts.out b/cli/tests/testdata/run/event_listener_error_immediate_exit_worker.ts.out index 8ab76d6cf2..85b52190bf 100644 --- a/cli/tests/testdata/run/event_listener_error_immediate_exit_worker.ts.out +++ b/cli/tests/testdata/run/event_listener_error_immediate_exit_worker.ts.out @@ -2,11 +2,7 @@ error: Uncaught (in worker "") Error: bar throw new Error("bar"); ^ - at [WILDCARD]/event_listener_error_immediate_exit.ts:4:9 - at innerInvokeEventListeners (ext:deno_web/02_event.js:785:7) - at invokeEventListeners (ext:deno_web/02_event.js:825:5) - at dispatch (ext:deno_web/02_event.js:694:9) - at dispatchEvent (ext:deno_web/02_event.js:1086:12) + at [WILDCARD]/event_listener_error_immediate_exit.ts:4:9[WILDCARD] at [WILDCARD]/event_listener_error_immediate_exit.ts:11:1 error: Uncaught (in promise) Error: Unhandled error in child worker. at [WILDCARD] diff --git a/ext/web/02_event.js b/ext/web/02_event.js index 6a5db3464c..0372293cff 100644 --- a/ext/web/02_event.js +++ b/ext/web/02_event.js @@ -482,7 +482,7 @@ function getRoot(eventTarget) { function isNode( eventTarget, ) { - return Boolean(eventTarget && ReflectHas(eventTarget, "nodeType")); + return eventTarget?.nodeType !== undefined; } // https://dom.spec.whatwg.org/#concept-shadow-including-inclusive-ancestor @@ -734,8 +734,12 @@ function innerInvokeEventListeners( return found; } + let handlers = targetListeners[type]; + // Copy event listeners before iterating since the list can be modified during the iteration. - const handlers = ArrayPrototypeSlice(targetListeners[type]); + if (handlers.length > 1) { + handlers = ArrayPrototypeSlice(targetListeners[type]); + } for (let i = 0; i < handlers.length; i++) { const listener = handlers[i]; @@ -804,12 +808,19 @@ function innerInvokeEventListeners( * Ref: https://dom.spec.whatwg.org/#concept-event-listener-invoke */ function invokeEventListeners(tuple, eventImpl) { const path = getPath(eventImpl); - const tupleIndex = ArrayPrototypeIndexOf(path, tuple); - for (let i = tupleIndex; i >= 0; i--) { - const t = path[i]; + if (path.length === 1) { + const t = path[0]; if (t.target) { setTarget(eventImpl, t.target); - break; + } + } else { + const tupleIndex = ArrayPrototypeIndexOf(path, tuple); + for (let i = tupleIndex; i >= 0; i--) { + const t = path[i]; + if (t.target) { + setTarget(eventImpl, t.target); + break; + } } } From b31cf9fde6ad5398c20370c136695db77df6beeb Mon Sep 17 00:00:00 2001 From: Leo Kettmeir Date: Mon, 1 May 2023 12:47:13 +0200 Subject: [PATCH 091/320] refactor(webidl): move prefix & context out of converters options bag (#18931) --- ext/broadcast_channel/01_broadcast_channel.js | 5 +- ext/cache/01_cache.js | 41 +- ext/crypto/00_crypto.js | 290 ++++++-------- ext/crypto/01_webidl.js | 80 ++-- ext/fetch/20_headers.js | 60 ++- ext/fetch/21_formdata.js | 72 ++-- ext/fetch/22_body.js | 16 +- ext/fetch/23_request.js | 18 +- ext/fetch/23_response.js | 34 +- ext/fetch/26_fetch.js | 9 +- ext/url/00_url.js | 125 ++---- ext/url/01_urlpattern.js | 36 +- ext/web/01_dom_exception.js | 18 +- ext/web/02_event.js | 31 +- ext/web/03_abort_signal.js | 11 +- ext/web/05_base64.js | 10 +- ext/web/06_streams.js | 157 ++++---- ext/web/08_text_encoding.js | 63 ++- ext/web/09_file.js | 58 ++- ext/web/10_filereader.js | 5 +- ext/web/11_blob_url.js | 10 +- ext/web/13_message_port.js | 16 +- ext/web/14_compression.js | 20 +- ext/web/15_performance.js | 103 ++--- ext/webidl/00_webidl.js | 234 ++++++----- ext/webidl/internal.d.ts | 363 ++++++++++++++---- ext/websocket/01_websocket.js | 53 ++- ext/websocket/02_websocketstream.js | 21 +- ext/webstorage/01_webstorage.js | 25 +- runtime/js/11_workers.js | 11 +- runtime/js/99_main.js | 11 +- 31 files changed, 1042 insertions(+), 964 deletions(-) diff --git a/ext/broadcast_channel/01_broadcast_channel.js b/ext/broadcast_channel/01_broadcast_channel.js index a9a7eb9912..d92aef0728 100644 --- a/ext/broadcast_channel/01_broadcast_channel.js +++ b/ext/broadcast_channel/01_broadcast_channel.js @@ -85,10 +85,7 @@ class BroadcastChannel extends EventTarget { const prefix = "Failed to construct 'BroadcastChannel'"; webidl.requiredArguments(arguments.length, 1, prefix); - this[_name] = webidl.converters["DOMString"](name, { - prefix, - context: "Argument 1", - }); + this[_name] = webidl.converters["DOMString"](name, prefix, "Argument 1"); this[webidl.brand] = webidl.brand; diff --git a/ext/cache/01_cache.js b/ext/cache/01_cache.js index 48651dfb69..8cbf540fa7 100644 --- a/ext/cache/01_cache.js +++ b/ext/cache/01_cache.js @@ -27,10 +27,7 @@ class CacheStorage { webidl.assertBranded(this, CacheStoragePrototype); const prefix = "Failed to execute 'open' on 'CacheStorage'"; webidl.requiredArguments(arguments.length, 1, prefix); - cacheName = webidl.converters["DOMString"](cacheName, { - prefix, - context: "Argument 1", - }); + cacheName = webidl.converters["DOMString"](cacheName, prefix, "Argument 1"); const cacheId = await core.opAsync("op_cache_storage_open", cacheName); const cache = webidl.createBranded(Cache); cache[_id] = cacheId; @@ -41,10 +38,7 @@ class CacheStorage { webidl.assertBranded(this, CacheStoragePrototype); const prefix = "Failed to execute 'has' on 'CacheStorage'"; webidl.requiredArguments(arguments.length, 1, prefix); - cacheName = webidl.converters["DOMString"](cacheName, { - prefix, - context: "Argument 1", - }); + cacheName = webidl.converters["DOMString"](cacheName, prefix, "Argument 1"); return await core.opAsync("op_cache_storage_has", cacheName); } @@ -52,10 +46,7 @@ class CacheStorage { webidl.assertBranded(this, CacheStoragePrototype); const prefix = "Failed to execute 'delete' on 'CacheStorage'"; webidl.requiredArguments(arguments.length, 1, prefix); - cacheName = webidl.converters["DOMString"](cacheName, { - prefix, - context: "Argument 1", - }); + cacheName = webidl.converters["DOMString"](cacheName, prefix, "Argument 1"); return await core.opAsync("op_cache_storage_delete", cacheName); } } @@ -76,14 +67,12 @@ class Cache { webidl.assertBranded(this, CachePrototype); const prefix = "Failed to execute 'put' on 'Cache'"; webidl.requiredArguments(arguments.length, 2, prefix); - request = webidl.converters["RequestInfo_DOMString"](request, { + request = webidl.converters["RequestInfo_DOMString"]( + request, prefix, - context: "Argument 1", - }); - response = webidl.converters["Response"](response, { - prefix, - context: "Argument 2", - }); + "Argument 1", + ); + response = webidl.converters["Response"](response, prefix, "Argument 2"); // Step 1. let innerRequest = null; // Step 2. @@ -166,10 +155,11 @@ class Cache { webidl.assertBranded(this, CachePrototype); const prefix = "Failed to execute 'match' on 'Cache'"; webidl.requiredArguments(arguments.length, 1, prefix); - request = webidl.converters["RequestInfo_DOMString"](request, { + request = webidl.converters["RequestInfo_DOMString"]( + request, prefix, - context: "Argument 1", - }); + "Argument 1", + ); const p = await this[_matchAll](request, options); if (p.length > 0) { return p[0]; @@ -183,10 +173,11 @@ class Cache { webidl.assertBranded(this, CachePrototype); const prefix = "Failed to execute 'delete' on 'Cache'"; webidl.requiredArguments(arguments.length, 1, prefix); - request = webidl.converters["RequestInfo_DOMString"](request, { + request = webidl.converters["RequestInfo_DOMString"]( + request, prefix, - context: "Argument 1", - }); + "Argument 1", + ); // Step 1. let r = null; // Step 2. diff --git a/ext/crypto/00_crypto.js b/ext/crypto/00_crypto.js index 4d54b52a9d..2208124f6b 100644 --- a/ext/crypto/00_crypto.js +++ b/ext/crypto/00_crypto.js @@ -199,10 +199,11 @@ function normalizeAlgorithm(algorithm, op) { // 1. const registeredAlgorithms = supportedAlgorithms[op]; // 2. 3. - const initialAlg = webidl.converters.Algorithm(algorithm, { - prefix: "Failed to normalize algorithm", - context: "passed algorithm", - }); + const initialAlg = webidl.converters.Algorithm( + algorithm, + "Failed to normalize algorithm", + "passed algorithm", + ); // 4. let algName = initialAlg.name; @@ -232,10 +233,11 @@ function normalizeAlgorithm(algorithm, op) { } // 6. - const normalizedAlgorithm = webidl.converters[desiredType](algorithm, { - prefix: "Failed to normalize algorithm", - context: "passed algorithm", - }); + const normalizedAlgorithm = webidl.converters[desiredType]( + algorithm, + "Failed to normalize algorithm", + "passed algorithm", + ); // 7. normalizedAlgorithm.name = algName; @@ -469,14 +471,12 @@ class SubtleCrypto { webidl.assertBranded(this, SubtleCryptoPrototype); const prefix = "Failed to execute 'digest' on 'SubtleCrypto'"; webidl.requiredArguments(arguments.length, 2, prefix); - algorithm = webidl.converters.AlgorithmIdentifier(algorithm, { + algorithm = webidl.converters.AlgorithmIdentifier( + algorithm, prefix, - context: "Argument 1", - }); - data = webidl.converters.BufferSource(data, { - prefix, - context: "Argument 2", - }); + "Argument 1", + ); + data = webidl.converters.BufferSource(data, prefix, "Argument 2"); data = copyBuffer(data); @@ -501,18 +501,13 @@ class SubtleCrypto { webidl.assertBranded(this, SubtleCryptoPrototype); const prefix = "Failed to execute 'encrypt' on 'SubtleCrypto'"; webidl.requiredArguments(arguments.length, 3, prefix); - algorithm = webidl.converters.AlgorithmIdentifier(algorithm, { + algorithm = webidl.converters.AlgorithmIdentifier( + algorithm, prefix, - context: "Argument 1", - }); - key = webidl.converters.CryptoKey(key, { - prefix, - context: "Argument 2", - }); - data = webidl.converters.BufferSource(data, { - prefix, - context: "Argument 3", - }); + "Argument 1", + ); + key = webidl.converters.CryptoKey(key, prefix, "Argument 2"); + data = webidl.converters.BufferSource(data, prefix, "Argument 3"); // 2. data = copyBuffer(data); @@ -549,18 +544,13 @@ class SubtleCrypto { webidl.assertBranded(this, SubtleCryptoPrototype); const prefix = "Failed to execute 'decrypt' on 'SubtleCrypto'"; webidl.requiredArguments(arguments.length, 3, prefix); - algorithm = webidl.converters.AlgorithmIdentifier(algorithm, { + algorithm = webidl.converters.AlgorithmIdentifier( + algorithm, prefix, - context: "Argument 1", - }); - key = webidl.converters.CryptoKey(key, { - prefix, - context: "Argument 2", - }); - data = webidl.converters.BufferSource(data, { - prefix, - context: "Argument 3", - }); + "Argument 1", + ); + key = webidl.converters.CryptoKey(key, prefix, "Argument 2"); + data = webidl.converters.BufferSource(data, prefix, "Argument 3"); // 2. data = copyBuffer(data); @@ -757,18 +747,13 @@ class SubtleCrypto { webidl.assertBranded(this, SubtleCryptoPrototype); const prefix = "Failed to execute 'sign' on 'SubtleCrypto'"; webidl.requiredArguments(arguments.length, 3, prefix); - algorithm = webidl.converters.AlgorithmIdentifier(algorithm, { + algorithm = webidl.converters.AlgorithmIdentifier( + algorithm, prefix, - context: "Argument 1", - }); - key = webidl.converters.CryptoKey(key, { - prefix, - context: "Argument 2", - }); - data = webidl.converters.BufferSource(data, { - prefix, - context: "Argument 3", - }); + "Argument 1", + ); + key = webidl.converters.CryptoKey(key, prefix, "Argument 2"); + data = webidl.converters.BufferSource(data, prefix, "Argument 3"); // 1. data = copyBuffer(data); @@ -921,26 +906,23 @@ class SubtleCrypto { webidl.assertBranded(this, SubtleCryptoPrototype); const prefix = "Failed to execute 'importKey' on 'SubtleCrypto'"; webidl.requiredArguments(arguments.length, 4, prefix); - format = webidl.converters.KeyFormat(format, { + format = webidl.converters.KeyFormat(format, prefix, "Argument 1"); + keyData = webidl.converters["BufferSource or JsonWebKey"]( + keyData, prefix, - context: "Argument 1", - }); - keyData = webidl.converters["BufferSource or JsonWebKey"](keyData, { + "Argument 2", + ); + algorithm = webidl.converters.AlgorithmIdentifier( + algorithm, prefix, - context: "Argument 2", - }); - algorithm = webidl.converters.AlgorithmIdentifier(algorithm, { + "Argument 3", + ); + extractable = webidl.converters.boolean(extractable, prefix, "Argument 4"); + keyUsages = webidl.converters["sequence"]( + keyUsages, prefix, - context: "Argument 3", - }); - extractable = webidl.converters.boolean(extractable, { - prefix, - context: "Argument 4", - }); - keyUsages = webidl.converters["sequence"](keyUsages, { - prefix, - context: "Argument 5", - }); + "Argument 5", + ); // 2. if (format !== "jwk") { @@ -1055,14 +1037,8 @@ class SubtleCrypto { webidl.assertBranded(this, SubtleCryptoPrototype); const prefix = "Failed to execute 'exportKey' on 'SubtleCrypto'"; webidl.requiredArguments(arguments.length, 2, prefix); - format = webidl.converters.KeyFormat(format, { - prefix, - context: "Argument 1", - }); - key = webidl.converters.CryptoKey(key, { - prefix, - context: "Argument 2", - }); + format = webidl.converters.KeyFormat(format, prefix, "Argument 1"); + key = webidl.converters.CryptoKey(key, prefix, "Argument 2"); const handle = key[_handle]; // 2. @@ -1127,19 +1103,14 @@ class SubtleCrypto { webidl.assertBranded(this, SubtleCryptoPrototype); const prefix = "Failed to execute 'deriveBits' on 'SubtleCrypto'"; webidl.requiredArguments(arguments.length, 3, prefix); - algorithm = webidl.converters.AlgorithmIdentifier(algorithm, { + algorithm = webidl.converters.AlgorithmIdentifier( + algorithm, prefix, - context: "Argument 1", - }); - baseKey = webidl.converters.CryptoKey(baseKey, { - prefix, - context: "Argument 2", - }); + "Argument 1", + ); + baseKey = webidl.converters.CryptoKey(baseKey, prefix, "Argument 2"); if (length !== null) { - length = webidl.converters["unsigned long"](length, { - prefix, - context: "Argument 3", - }); + length = webidl.converters["unsigned long"](length, prefix, "Argument 3"); } // 2. @@ -1177,26 +1148,27 @@ class SubtleCrypto { webidl.assertBranded(this, SubtleCryptoPrototype); const prefix = "Failed to execute 'deriveKey' on 'SubtleCrypto'"; webidl.requiredArguments(arguments.length, 5, prefix); - algorithm = webidl.converters.AlgorithmIdentifier(algorithm, { + algorithm = webidl.converters.AlgorithmIdentifier( + algorithm, prefix, - context: "Argument 1", - }); - baseKey = webidl.converters.CryptoKey(baseKey, { + "Argument 1", + ); + baseKey = webidl.converters.CryptoKey(baseKey, prefix, "Argument 2"); + derivedKeyType = webidl.converters.AlgorithmIdentifier( + derivedKeyType, prefix, - context: "Argument 2", - }); - derivedKeyType = webidl.converters.AlgorithmIdentifier(derivedKeyType, { + "Argument 3", + ); + extractable = webidl.converters["boolean"]( + extractable, prefix, - context: "Argument 3", - }); - extractable = webidl.converters["boolean"](extractable, { + "Argument 4", + ); + keyUsages = webidl.converters["sequence"]( + keyUsages, prefix, - context: "Argument 4", - }); - keyUsages = webidl.converters["sequence"](keyUsages, { - prefix, - context: "Argument 5", - }); + "Argument 5", + ); // 2-3. const normalizedAlgorithm = normalizeAlgorithm(algorithm, "deriveBits"); @@ -1272,22 +1244,14 @@ class SubtleCrypto { webidl.assertBranded(this, SubtleCryptoPrototype); const prefix = "Failed to execute 'verify' on 'SubtleCrypto'"; webidl.requiredArguments(arguments.length, 4, prefix); - algorithm = webidl.converters.AlgorithmIdentifier(algorithm, { + algorithm = webidl.converters.AlgorithmIdentifier( + algorithm, prefix, - context: "Argument 1", - }); - key = webidl.converters.CryptoKey(key, { - prefix, - context: "Argument 2", - }); - signature = webidl.converters.BufferSource(signature, { - prefix, - context: "Argument 3", - }); - data = webidl.converters.BufferSource(data, { - prefix, - context: "Argument 4", - }); + "Argument 1", + ); + key = webidl.converters.CryptoKey(key, prefix, "Argument 2"); + signature = webidl.converters.BufferSource(signature, prefix, "Argument 3"); + data = webidl.converters.BufferSource(data, prefix, "Argument 4"); // 2. signature = copyBuffer(signature); @@ -1412,22 +1376,18 @@ class SubtleCrypto { webidl.assertBranded(this, SubtleCryptoPrototype); const prefix = "Failed to execute 'wrapKey' on 'SubtleCrypto'"; webidl.requiredArguments(arguments.length, 4, prefix); - format = webidl.converters.KeyFormat(format, { + format = webidl.converters.KeyFormat(format, prefix, "Argument 1"); + key = webidl.converters.CryptoKey(key, prefix, "Argument 2"); + wrappingKey = webidl.converters.CryptoKey( + wrappingKey, prefix, - context: "Argument 1", - }); - key = webidl.converters.CryptoKey(key, { + "Argument 3", + ); + wrapAlgorithm = webidl.converters.AlgorithmIdentifier( + wrapAlgorithm, prefix, - context: "Argument 2", - }); - wrappingKey = webidl.converters.CryptoKey(wrappingKey, { - prefix, - context: "Argument 3", - }); - wrapAlgorithm = webidl.converters.AlgorithmIdentifier(wrapAlgorithm, { - prefix, - context: "Argument 4", - }); + "Argument 4", + ); let normalizedAlgorithm; @@ -1548,37 +1508,33 @@ class SubtleCrypto { webidl.assertBranded(this, SubtleCryptoPrototype); const prefix = "Failed to execute 'unwrapKey' on 'SubtleCrypto'"; webidl.requiredArguments(arguments.length, 7, prefix); - format = webidl.converters.KeyFormat(format, { + format = webidl.converters.KeyFormat(format, prefix, "Argument 1"); + wrappedKey = webidl.converters.BufferSource( + wrappedKey, prefix, - context: "Argument 1", - }); - wrappedKey = webidl.converters.BufferSource(wrappedKey, { + "Argument 2", + ); + unwrappingKey = webidl.converters.CryptoKey( + unwrappingKey, prefix, - context: "Argument 2", - }); - unwrappingKey = webidl.converters.CryptoKey(unwrappingKey, { + "Argument 3", + ); + unwrapAlgorithm = webidl.converters.AlgorithmIdentifier( + unwrapAlgorithm, prefix, - context: "Argument 3", - }); - unwrapAlgorithm = webidl.converters.AlgorithmIdentifier(unwrapAlgorithm, { - prefix, - context: "Argument 4", - }); + "Argument 4", + ); unwrappedKeyAlgorithm = webidl.converters.AlgorithmIdentifier( unwrappedKeyAlgorithm, - { - prefix, - context: "Argument 5", - }, + prefix, + "Argument 5", ); - extractable = webidl.converters.boolean(extractable, { + extractable = webidl.converters.boolean(extractable, prefix, "Argument 6"); + keyUsages = webidl.converters["sequence"]( + keyUsages, prefix, - context: "Argument 6", - }); - keyUsages = webidl.converters["sequence"](keyUsages, { - prefix, - context: "Argument 7", - }); + "Argument 7", + ); // 2. wrappedKey = copyBuffer(wrappedKey); @@ -1709,18 +1665,21 @@ class SubtleCrypto { webidl.assertBranded(this, SubtleCryptoPrototype); const prefix = "Failed to execute 'generateKey' on 'SubtleCrypto'"; webidl.requiredArguments(arguments.length, 3, prefix); - algorithm = webidl.converters.AlgorithmIdentifier(algorithm, { + algorithm = webidl.converters.AlgorithmIdentifier( + algorithm, prefix, - context: "Argument 1", - }); - extractable = webidl.converters["boolean"](extractable, { + "Argument 1", + ); + extractable = webidl.converters["boolean"]( + extractable, prefix, - context: "Argument 2", - }); - keyUsages = webidl.converters["sequence"](keyUsages, { + "Argument 2", + ); + keyUsages = webidl.converters["sequence"]( + keyUsages, prefix, - context: "Argument 3", - }); + "Argument 3", + ); const usages = keyUsages; @@ -4722,10 +4681,11 @@ class Crypto { ops.op_crypto_get_random_values(typedArray); return typedArray; } - typedArray = webidl.converters.ArrayBufferView(typedArray, { + typedArray = webidl.converters.ArrayBufferView( + typedArray, prefix, - context: "Argument 1", - }); + "Argument 1", + ); switch (tag) { case "Int8Array": case "Uint8ClampedArray": diff --git a/ext/crypto/01_webidl.js b/ext/crypto/01_webidl.js index 715e5a9773..cd0655b3b8 100644 --- a/ext/crypto/01_webidl.js +++ b/ext/crypto/01_webidl.js @@ -14,23 +14,28 @@ const { SafeArrayIterator, } = primordials; -webidl.converters.AlgorithmIdentifier = (V, opts) => { +webidl.converters.AlgorithmIdentifier = (V, prefix, context, opts) => { // Union for (object or DOMString) if (webidl.type(V) == "Object") { - return webidl.converters.object(V, opts); + return webidl.converters.object(V, prefix, context, opts); } - return webidl.converters.DOMString(V, opts); + return webidl.converters.DOMString(V, prefix, context, opts); }; -webidl.converters["BufferSource or JsonWebKey"] = (V, opts) => { +webidl.converters["BufferSource or JsonWebKey"] = ( + V, + prefix, + context, + opts, +) => { // Union for (BufferSource or JsonWebKey) if ( ArrayBufferIsView(V) || ObjectPrototypeIsPrototypeOf(ArrayBufferPrototype, V) ) { - return webidl.converters.BufferSource(V, opts); + return webidl.converters.BufferSource(V, prefix, context, opts); } - return webidl.converters.JsonWebKey(V, opts); + return webidl.converters.JsonWebKey(V, prefix, context, opts); }; webidl.converters.KeyType = webidl.createEnumConverter("KeyType", [ @@ -81,8 +86,11 @@ const dictRsaKeyGenParams = [ ...new SafeArrayIterator(dictAlgorithm), { key: "modulusLength", - converter: (V, opts) => - webidl.converters["unsigned long"](V, { ...opts, enforceRange: true }), + converter: (V, prefix, context, opts) => + webidl.converters["unsigned long"](V, prefix, context, { + ...opts, + enforceRange: true, + }), required: true, }, { @@ -155,8 +163,11 @@ const dictAesKeyGenParams = [ ...new SafeArrayIterator(dictAlgorithm), { key: "length", - converter: (V, opts) => - webidl.converters["unsigned short"](V, { ...opts, enforceRange: true }), + converter: (V, prefix, context, opts) => + webidl.converters["unsigned short"](V, prefix, context, { + ...opts, + enforceRange: true, + }), required: true, }, ]; @@ -173,8 +184,11 @@ const dictHmacKeyGenParams = [ }, { key: "length", - converter: (V, opts) => - webidl.converters["unsigned long"](V, { ...opts, enforceRange: true }), + converter: (V, prefix, context, opts) => + webidl.converters["unsigned long"](V, prefix, context, { + ...opts, + enforceRange: true, + }), }, ]; @@ -185,8 +199,11 @@ const dictRsaPssParams = [ ...new SafeArrayIterator(dictAlgorithm), { key: "saltLength", - converter: (V, opts) => - webidl.converters["unsigned long"](V, { ...opts, enforceRange: true }), + converter: (V, prefix, context, opts) => + webidl.converters["unsigned long"](V, prefix, context, { + ...opts, + enforceRange: true, + }), required: true, }, ]; @@ -226,8 +243,11 @@ const dictHmacImportParams = [ }, { key: "length", - converter: (V, opts) => - webidl.converters["unsigned long"](V, { ...opts, enforceRange: true }), + converter: (V, prefix, context, opts) => + webidl.converters["unsigned long"](V, prefix, context, { + ...opts, + enforceRange: true, + }), }, ]; @@ -374,8 +394,11 @@ const dictPbkdf2Params = [ }, { key: "iterations", - converter: (V, opts) => - webidl.converters["unsigned long"](V, { ...opts, enforceRange: true }), + converter: (V, prefix, context, opts) => + webidl.converters["unsigned long"](V, prefix, context, { + ...opts, + enforceRange: true, + }), required: true, }, { @@ -392,8 +415,11 @@ const dictAesDerivedKeyParams = [ ...new SafeArrayIterator(dictAlgorithm), { key: "length", - converter: (V, opts) => - webidl.converters["unsigned long"](V, { ...opts, enforceRange: true }), + converter: (V, prefix, context, opts) => + webidl.converters["unsigned long"](V, prefix, context, { + ...opts, + enforceRange: true, + }), required: true, }, ]; @@ -416,8 +442,11 @@ const dictAesGcmParams = [ }, { key: "tagLength", - converter: (V, opts) => - webidl.converters["unsigned long"](V, { ...opts, enforceRange: true }), + converter: (V, prefix, context, opts) => + webidl.converters["unsigned long"](V, prefix, context, { + ...opts, + enforceRange: true, + }), }, { key: "additionalData", @@ -434,8 +463,11 @@ const dictAesCtrParams = [ }, { key: "length", - converter: (V, opts) => - webidl.converters["unsigned short"](V, { ...opts, enforceRange: true }), + converter: (V, prefix, context, opts) => + webidl.converters["unsigned short"](V, prefix, context, { + ...opts, + enforceRange: true, + }), required: true, }, ]; diff --git a/ext/fetch/20_headers.js b/ext/fetch/20_headers.js index 29b4540f94..7ec6751fae 100644 --- a/ext/fetch/20_headers.js +++ b/ext/fetch/20_headers.js @@ -251,10 +251,7 @@ class Headers { constructor(init = undefined) { const prefix = "Failed to construct 'Headers'"; if (init !== undefined) { - init = webidl.converters["HeadersInit"](init, { - prefix, - context: "Argument 1", - }); + init = webidl.converters["HeadersInit"](init, prefix, "Argument 1"); } this[webidl.brand] = webidl.brand; @@ -272,14 +269,8 @@ class Headers { webidl.assertBranded(this, HeadersPrototype); const prefix = "Failed to execute 'append' on 'Headers'"; webidl.requiredArguments(arguments.length, 2, prefix); - name = webidl.converters["ByteString"](name, { - prefix, - context: "Argument 1", - }); - value = webidl.converters["ByteString"](value, { - prefix, - context: "Argument 2", - }); + name = webidl.converters["ByteString"](name, prefix, "Argument 1"); + value = webidl.converters["ByteString"](value, prefix, "Argument 2"); appendHeader(this, name, value); } @@ -289,10 +280,7 @@ class Headers { delete(name) { const prefix = "Failed to execute 'delete' on 'Headers'"; webidl.requiredArguments(arguments.length, 1, prefix); - name = webidl.converters["ByteString"](name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters["ByteString"](name, prefix, "Argument 1"); if (!RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, name)) { throw new TypeError("Header name is not valid."); @@ -317,10 +305,7 @@ class Headers { get(name) { const prefix = "Failed to execute 'get' on 'Headers'"; webidl.requiredArguments(arguments.length, 1, prefix); - name = webidl.converters["ByteString"](name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters["ByteString"](name, prefix, "Argument 1"); if (!RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, name)) { throw new TypeError("Header name is not valid."); @@ -336,10 +321,7 @@ class Headers { has(name) { const prefix = "Failed to execute 'has' on 'Headers'"; webidl.requiredArguments(arguments.length, 1, prefix); - name = webidl.converters["ByteString"](name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters["ByteString"](name, prefix, "Argument 1"); if (!RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, name)) { throw new TypeError("Header name is not valid."); @@ -363,14 +345,8 @@ class Headers { webidl.assertBranded(this, HeadersPrototype); const prefix = "Failed to execute 'set' on 'Headers'"; webidl.requiredArguments(arguments.length, 2, prefix); - name = webidl.converters["ByteString"](name, { - prefix, - context: "Argument 1", - }); - value = webidl.converters["ByteString"](value, { - prefix, - context: "Argument 2", - }); + name = webidl.converters["ByteString"](name, prefix, "Argument 1"); + value = webidl.converters["ByteString"](value, prefix, "Argument 2"); value = normalizeHeaderValue(value); @@ -420,19 +396,29 @@ webidl.mixinPairIterable("Headers", Headers, _iterableHeaders, 0, 1); webidl.configurePrototype(Headers); const HeadersPrototype = Headers.prototype; -webidl.converters["HeadersInit"] = (V, opts) => { +webidl.converters["HeadersInit"] = (V, prefix, context, opts) => { // Union for (sequence> or record) if (webidl.type(V) === "Object" && V !== null) { if (V[SymbolIterator] !== undefined) { - return webidl.converters["sequence>"](V, opts); + return webidl.converters["sequence>"]( + V, + prefix, + context, + opts, + ); } - return webidl.converters["record"](V, opts); + return webidl.converters["record"]( + V, + prefix, + context, + opts, + ); } throw webidl.makeException( TypeError, "The provided value is not of type '(sequence> or record)'", - opts.prefix, - opts.context, + prefix, + context, ); }; webidl.converters["Headers"] = webidl.createInterfaceConverter( diff --git a/ext/fetch/21_formdata.js b/ext/fetch/21_formdata.js index 1961643d2e..330ed92e6e 100644 --- a/ext/fetch/21_formdata.js +++ b/ext/fetch/21_formdata.js @@ -103,26 +103,26 @@ class FormData { const prefix = "Failed to execute 'append' on 'FormData'"; webidl.requiredArguments(arguments.length, 2, prefix); - name = webidl.converters["USVString"](name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters["USVString"](name, prefix, "Argument 1"); if (ObjectPrototypeIsPrototypeOf(BlobPrototype, valueOrBlobValue)) { - valueOrBlobValue = webidl.converters["Blob"](valueOrBlobValue, { + valueOrBlobValue = webidl.converters["Blob"]( + valueOrBlobValue, prefix, - context: "Argument 2", - }); + "Argument 2", + ); if (filename !== undefined) { - filename = webidl.converters["USVString"](filename, { + filename = webidl.converters["USVString"]( + filename, prefix, - context: "Argument 3", - }); + "Argument 3", + ); } } else { - valueOrBlobValue = webidl.converters["USVString"](valueOrBlobValue, { + valueOrBlobValue = webidl.converters["USVString"]( + valueOrBlobValue, prefix, - context: "Argument 2", - }); + "Argument 2", + ); } const entry = createEntry(name, valueOrBlobValue, filename); @@ -139,10 +139,7 @@ class FormData { const prefix = "Failed to execute 'name' on 'FormData'"; webidl.requiredArguments(arguments.length, 1, prefix); - name = webidl.converters["USVString"](name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters["USVString"](name, prefix, "Argument 1"); const list = this[entryList]; for (let i = 0; i < list.length; i++) { @@ -162,10 +159,7 @@ class FormData { const prefix = "Failed to execute 'get' on 'FormData'"; webidl.requiredArguments(arguments.length, 1, prefix); - name = webidl.converters["USVString"](name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters["USVString"](name, prefix, "Argument 1"); const entries = this[entryList]; for (let i = 0; i < entries.length; ++i) { @@ -184,10 +178,7 @@ class FormData { const prefix = "Failed to execute 'getAll' on 'FormData'"; webidl.requiredArguments(arguments.length, 1, prefix); - name = webidl.converters["USVString"](name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters["USVString"](name, prefix, "Argument 1"); const returnList = []; const entries = this[entryList]; @@ -207,10 +198,7 @@ class FormData { const prefix = "Failed to execute 'has' on 'FormData'"; webidl.requiredArguments(arguments.length, 1, prefix); - name = webidl.converters["USVString"](name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters["USVString"](name, prefix, "Argument 1"); const entries = this[entryList]; for (let i = 0; i < entries.length; ++i) { @@ -231,26 +219,26 @@ class FormData { const prefix = "Failed to execute 'set' on 'FormData'"; webidl.requiredArguments(arguments.length, 2, prefix); - name = webidl.converters["USVString"](name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters["USVString"](name, prefix, "Argument 1"); if (ObjectPrototypeIsPrototypeOf(BlobPrototype, valueOrBlobValue)) { - valueOrBlobValue = webidl.converters["Blob"](valueOrBlobValue, { + valueOrBlobValue = webidl.converters["Blob"]( + valueOrBlobValue, prefix, - context: "Argument 2", - }); + "Argument 2", + ); if (filename !== undefined) { - filename = webidl.converters["USVString"](filename, { + filename = webidl.converters["USVString"]( + filename, prefix, - context: "Argument 3", - }); + "Argument 3", + ); } } else { - valueOrBlobValue = webidl.converters["USVString"](valueOrBlobValue, { + valueOrBlobValue = webidl.converters["USVString"]( + valueOrBlobValue, prefix, - context: "Argument 2", - }); + "Argument 2", + ); } const entry = createEntry(name, valueOrBlobValue, filename); diff --git a/ext/fetch/22_body.js b/ext/fetch/22_body.js index 875ec0620f..82703af761 100644 --- a/ext/fetch/22_body.js +++ b/ext/fetch/22_body.js @@ -448,16 +448,16 @@ function extractBody(object) { return { body, contentType }; } -webidl.converters["BodyInit_DOMString"] = (V, opts) => { +webidl.converters["BodyInit_DOMString"] = (V, prefix, context, opts) => { // Union for (ReadableStream or Blob or ArrayBufferView or ArrayBuffer or FormData or URLSearchParams or USVString) if (ObjectPrototypeIsPrototypeOf(ReadableStreamPrototype, V)) { - return webidl.converters["ReadableStream"](V, opts); + return webidl.converters["ReadableStream"](V, prefix, context, opts); } else if (ObjectPrototypeIsPrototypeOf(BlobPrototype, V)) { - return webidl.converters["Blob"](V, opts); + return webidl.converters["Blob"](V, prefix, context, opts); } else if (ObjectPrototypeIsPrototypeOf(FormDataPrototype, V)) { - return webidl.converters["FormData"](V, opts); + return webidl.converters["FormData"](V, prefix, context, opts); } else if (ObjectPrototypeIsPrototypeOf(URLSearchParamsPrototype, V)) { - return webidl.converters["URLSearchParams"](V, opts); + return webidl.converters["URLSearchParams"](V, prefix, context, opts); } if (typeof V === "object") { if ( @@ -465,16 +465,16 @@ webidl.converters["BodyInit_DOMString"] = (V, opts) => { // deno-lint-ignore prefer-primordials ObjectPrototypeIsPrototypeOf(SharedArrayBuffer.prototype, V) ) { - return webidl.converters["ArrayBuffer"](V, opts); + return webidl.converters["ArrayBuffer"](V, prefix, context, opts); } if (ArrayBufferIsView(V)) { - return webidl.converters["ArrayBufferView"](V, opts); + return webidl.converters["ArrayBufferView"](V, prefix, context, opts); } } // BodyInit conversion is passed to extractBody(), which calls core.encode(). // core.encode() will UTF-8 encode strings with replacement, being equivalent to the USV normalization. // Therefore we can convert to DOMString instead of USVString and avoid a costly redundant conversion. - return webidl.converters["DOMString"](V, opts); + return webidl.converters["DOMString"](V, prefix, context, opts); }; webidl.converters["BodyInit_DOMString?"] = webidl.createNullableConverter( webidl.converters["BodyInit_DOMString"], diff --git a/ext/fetch/23_request.js b/ext/fetch/23_request.js index 22c1f83544..d3132dc625 100644 --- a/ext/fetch/23_request.js +++ b/ext/fetch/23_request.js @@ -274,14 +274,12 @@ class Request { constructor(input, init = {}) { const prefix = "Failed to construct 'Request'"; webidl.requiredArguments(arguments.length, 1, prefix); - input = webidl.converters["RequestInfo_DOMString"](input, { + input = webidl.converters["RequestInfo_DOMString"]( + input, prefix, - context: "Argument 1", - }); - init = webidl.converters["RequestInit"](init, { - prefix, - context: "Argument 2", - }); + "Argument 1", + ); + init = webidl.converters["RequestInit"](init, prefix, "Argument 2"); this[webidl.brand] = webidl.brand; @@ -501,15 +499,15 @@ webidl.converters["Request"] = webidl.createInterfaceConverter( "Request", RequestPrototype, ); -webidl.converters["RequestInfo_DOMString"] = (V, opts) => { +webidl.converters["RequestInfo_DOMString"] = (V, prefix, context, opts) => { // Union for (Request or USVString) if (typeof V == "object") { if (ObjectPrototypeIsPrototypeOf(RequestPrototype, V)) { - return webidl.converters["Request"](V, opts); + return webidl.converters["Request"](V, prefix, context, opts); } } // Passed to new URL(...) which implicitly converts DOMString -> USVString - return webidl.converters["DOMString"](V, opts); + return webidl.converters["DOMString"](V, prefix, context, opts); }; webidl.converters["RequestRedirect"] = webidl.createEnumConverter( "RequestRedirect", diff --git a/ext/fetch/23_response.js b/ext/fetch/23_response.js index 86799252b2..52ebc91fe6 100644 --- a/ext/fetch/23_response.js +++ b/ext/fetch/23_response.js @@ -257,14 +257,8 @@ class Response { */ static redirect(url, status = 302) { const prefix = "Failed to call 'Response.redirect'"; - url = webidl.converters["USVString"](url, { - prefix, - context: "Argument 1", - }); - status = webidl.converters["unsigned short"](status, { - prefix, - context: "Argument 2", - }); + url = webidl.converters["USVString"](url, prefix, "Argument 1"); + status = webidl.converters["unsigned short"](status, prefix, "Argument 2"); const baseURL = getLocationHref(); const parsedURL = new URL(url, baseURL); @@ -291,10 +285,7 @@ class Response { static json(data = undefined, init = {}) { const prefix = "Failed to call 'Response.json'"; data = webidl.converters.any(data); - init = webidl.converters["ResponseInit_fast"](init, { - prefix, - context: "Argument 2", - }); + init = webidl.converters["ResponseInit_fast"](init, prefix, "Argument 2"); const str = serializeJSValueToJSONString(data); const res = extractBody(str); @@ -315,14 +306,8 @@ class Response { */ constructor(body = null, init = undefined) { const prefix = "Failed to construct 'Response'"; - body = webidl.converters["BodyInit_DOMString?"](body, { - prefix, - context: "Argument 1", - }); - init = webidl.converters["ResponseInit_fast"](init, { - prefix, - context: "Argument 2", - }); + body = webidl.converters["BodyInit_DOMString?"](body, prefix, "Argument 1"); + init = webidl.converters["ResponseInit_fast"](init, prefix, "Argument 2"); this[_response] = newInnerResponse(); this[_headers] = headersFromHeaderList( @@ -463,7 +448,12 @@ webidl.converters["ResponseInit"] = webidl.createDictionaryConverter( converter: webidl.converters["HeadersInit"], }], ); -webidl.converters["ResponseInit_fast"] = function (init, opts) { +webidl.converters["ResponseInit_fast"] = function ( + init, + prefix, + context, + opts, +) { if (init === undefined || init === null) { return { status: 200, statusText: "", headers: undefined }; } @@ -482,7 +472,7 @@ webidl.converters["ResponseInit_fast"] = function (init, opts) { return { status, statusText, headers }; } // Slow default path - return webidl.converters["ResponseInit"](init, opts); + return webidl.converters["ResponseInit"](init, prefix, context, opts); }; /** diff --git a/ext/fetch/26_fetch.js b/ext/fetch/26_fetch.js index 42e1ae962a..5084fab343 100644 --- a/ext/fetch/26_fetch.js +++ b/ext/fetch/26_fetch.js @@ -523,10 +523,11 @@ function handleWasmStreaming(source, rid) { // This implements part of // https://webassembly.github.io/spec/web-api/#compile-a-potential-webassembly-response try { - const res = webidl.converters["Response"](source, { - prefix: "Failed to call 'WebAssembly.compileStreaming'", - context: "Argument 1", - }); + const res = webidl.converters["Response"]( + source, + "Failed to call 'WebAssembly.compileStreaming'", + "Argument 1", + ); // 2.3. // The spec is ambiguous here, see diff --git a/ext/url/00_url.js b/ext/url/00_url.js index 7f674e9aa2..169cbe3a51 100644 --- a/ext/url/00_url.js +++ b/ext/url/00_url.js @@ -104,7 +104,8 @@ class URLSearchParams { init = webidl.converters ["sequence> or record or USVString"]( init, - { prefix, context: "Argument 1" }, + prefix, + "Argument 1", ); this[webidl.brand] = webidl.brand; if (!init) { @@ -158,14 +159,8 @@ class URLSearchParams { webidl.assertBranded(this, URLSearchParamsPrototype); const prefix = "Failed to execute 'append' on 'URLSearchParams'"; webidl.requiredArguments(arguments.length, 2, prefix); - name = webidl.converters.USVString(name, { - prefix, - context: "Argument 1", - }); - value = webidl.converters.USVString(value, { - prefix, - context: "Argument 2", - }); + name = webidl.converters.USVString(name, prefix, "Argument 1"); + value = webidl.converters.USVString(value, prefix, "Argument 2"); ArrayPrototypePush(this[_list], [name, value]); this.#updateUrlSearch(); } @@ -177,10 +172,7 @@ class URLSearchParams { webidl.assertBranded(this, URLSearchParamsPrototype); const prefix = "Failed to execute 'append' on 'URLSearchParams'"; webidl.requiredArguments(arguments.length, 1, prefix); - name = webidl.converters.USVString(name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters.USVString(name, prefix, "Argument 1"); const list = this[_list]; let i = 0; while (i < list.length) { @@ -201,10 +193,7 @@ class URLSearchParams { webidl.assertBranded(this, URLSearchParamsPrototype); const prefix = "Failed to execute 'getAll' on 'URLSearchParams'"; webidl.requiredArguments(arguments.length, 1, prefix); - name = webidl.converters.USVString(name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters.USVString(name, prefix, "Argument 1"); const values = []; const entries = this[_list]; for (let i = 0; i < entries.length; ++i) { @@ -224,10 +213,7 @@ class URLSearchParams { webidl.assertBranded(this, URLSearchParamsPrototype); const prefix = "Failed to execute 'get' on 'URLSearchParams'"; webidl.requiredArguments(arguments.length, 1, prefix); - name = webidl.converters.USVString(name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters.USVString(name, prefix, "Argument 1"); const entries = this[_list]; for (let i = 0; i < entries.length; ++i) { const entry = entries[i]; @@ -246,10 +232,7 @@ class URLSearchParams { webidl.assertBranded(this, URLSearchParamsPrototype); const prefix = "Failed to execute 'has' on 'URLSearchParams'"; webidl.requiredArguments(arguments.length, 1, prefix); - name = webidl.converters.USVString(name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters.USVString(name, prefix, "Argument 1"); return ArrayPrototypeSome(this[_list], (entry) => entry[0] === name); } @@ -261,14 +244,8 @@ class URLSearchParams { webidl.assertBranded(this, URLSearchParamsPrototype); const prefix = "Failed to execute 'set' on 'URLSearchParams'"; webidl.requiredArguments(arguments.length, 2, prefix); - name = webidl.converters.USVString(name, { - prefix, - context: "Argument 1", - }); - value = webidl.converters.USVString(value, { - prefix, - context: "Argument 2", - }); + name = webidl.converters.USVString(name, prefix, "Argument 1"); + value = webidl.converters.USVString(value, prefix, "Argument 2"); const list = this[_list]; @@ -372,12 +349,9 @@ class URL { constructor(url, base = undefined) { const prefix = "Failed to construct 'URL'"; webidl.requiredArguments(arguments.length, 1, prefix); - url = webidl.converters.DOMString(url, { prefix, context: "Argument 1" }); + url = webidl.converters.DOMString(url, prefix, "Argument 1"); if (base !== undefined) { - base = webidl.converters.DOMString(base, { - prefix, - context: "Argument 2", - }); + base = webidl.converters.DOMString(base, prefix, "Argument 2"); } this[webidl.brand] = webidl.brand; const status = opUrlParse(url, base); @@ -392,12 +366,9 @@ class URL { static canParse(url, base = undefined) { const prefix = "Failed to call 'URL.canParse'"; webidl.requiredArguments(arguments.length, 1, prefix); - url = webidl.converters.DOMString(url, { prefix, context: "Argument 1" }); + url = webidl.converters.DOMString(url, prefix, "Argument 1"); if (base !== undefined) { - base = webidl.converters.DOMString(base, { - prefix, - context: "Argument 2", - }); + base = webidl.converters.DOMString(base, prefix, "Argument 2"); } const status = opUrlParse(url, base); return status === 0 || status === 1; @@ -467,10 +438,7 @@ class URL { webidl.assertBranded(this, URLPrototype); const prefix = "Failed to set 'hash' on 'URL'"; webidl.requiredArguments(arguments.length, 1, prefix); - value = webidl.converters.DOMString(value, { - prefix, - context: "Argument 1", - }); + value = webidl.converters.DOMString(value, prefix, "Argument 1"); try { this.#serialization = opUrlReparse( this.#serialization, @@ -495,10 +463,7 @@ class URL { webidl.assertBranded(this, URLPrototype); const prefix = "Failed to set 'host' on 'URL'"; webidl.requiredArguments(arguments.length, 1, prefix); - value = webidl.converters.DOMString(value, { - prefix, - context: "Argument 1", - }); + value = webidl.converters.DOMString(value, prefix, "Argument 1"); try { this.#serialization = opUrlReparse( this.#serialization, @@ -523,10 +488,7 @@ class URL { webidl.assertBranded(this, URLPrototype); const prefix = "Failed to set 'hostname' on 'URL'"; webidl.requiredArguments(arguments.length, 1, prefix); - value = webidl.converters.DOMString(value, { - prefix, - context: "Argument 1", - }); + value = webidl.converters.DOMString(value, prefix, "Argument 1"); try { this.#serialization = opUrlReparse( this.#serialization, @@ -550,10 +512,7 @@ class URL { webidl.assertBranded(this, URLPrototype); const prefix = "Failed to set 'href' on 'URL'"; webidl.requiredArguments(arguments.length, 1, prefix); - value = webidl.converters.DOMString(value, { - prefix, - context: "Argument 1", - }); + value = webidl.converters.DOMString(value, prefix, "Argument 1"); const status = opUrlParse(value); this.#serialization = getSerialization(status, value); this.#updateComponents(); @@ -606,10 +565,7 @@ class URL { webidl.assertBranded(this, URLPrototype); const prefix = "Failed to set 'password' on 'URL'"; webidl.requiredArguments(arguments.length, 1, prefix); - value = webidl.converters.DOMString(value, { - prefix, - context: "Argument 1", - }); + value = webidl.converters.DOMString(value, prefix, "Argument 1"); try { this.#serialization = opUrlReparse( this.#serialization, @@ -639,10 +595,7 @@ class URL { webidl.assertBranded(this, URLPrototype); const prefix = "Failed to set 'pathname' on 'URL'"; webidl.requiredArguments(arguments.length, 1, prefix); - value = webidl.converters.DOMString(value, { - prefix, - context: "Argument 1", - }); + value = webidl.converters.DOMString(value, prefix, "Argument 1"); try { this.#serialization = opUrlReparse( this.#serialization, @@ -674,10 +627,7 @@ class URL { webidl.assertBranded(this, URLPrototype); const prefix = "Failed to set 'port' on 'URL'"; webidl.requiredArguments(arguments.length, 1, prefix); - value = webidl.converters.DOMString(value, { - prefix, - context: "Argument 1", - }); + value = webidl.converters.DOMString(value, prefix, "Argument 1"); try { this.#serialization = opUrlReparse( this.#serialization, @@ -702,10 +652,7 @@ class URL { webidl.assertBranded(this, URLPrototype); const prefix = "Failed to set 'protocol' on 'URL'"; webidl.requiredArguments(arguments.length, 1, prefix); - value = webidl.converters.DOMString(value, { - prefix, - context: "Argument 1", - }); + value = webidl.converters.DOMString(value, prefix, "Argument 1"); try { this.#serialization = opUrlReparse( this.#serialization, @@ -733,10 +680,7 @@ class URL { webidl.assertBranded(this, URLPrototype); const prefix = "Failed to set 'search' on 'URL'"; webidl.requiredArguments(arguments.length, 1, prefix); - value = webidl.converters.DOMString(value, { - prefix, - context: "Argument 1", - }); + value = webidl.converters.DOMString(value, prefix, "Argument 1"); try { this.#serialization = opUrlReparse( this.#serialization, @@ -773,10 +717,7 @@ class URL { webidl.assertBranded(this, URLPrototype); const prefix = "Failed to set 'username' on 'URL'"; webidl.requiredArguments(arguments.length, 1, prefix); - value = webidl.converters.DOMString(value, { - prefix, - context: "Argument 1", - }); + value = webidl.converters.DOMString(value, prefix, "Argument 1"); try { this.#serialization = opUrlReparse( this.#serialization, @@ -827,15 +768,25 @@ function parseUrlEncoded(bytes) { webidl .converters[ "sequence> or record or USVString" - ] = (V, opts) => { + ] = (V, prefix, context, opts) => { // Union for (sequence> or record or USVString) if (webidl.type(V) === "Object" && V !== null) { if (V[SymbolIterator] !== undefined) { - return webidl.converters["sequence>"](V, opts); + return webidl.converters["sequence>"]( + V, + prefix, + context, + opts, + ); } - return webidl.converters["record"](V, opts); + return webidl.converters["record"]( + V, + prefix, + context, + opts, + ); } - return webidl.converters.USVString(V, opts); + return webidl.converters.USVString(V, prefix, context, opts); }; export { diff --git a/ext/url/01_urlpattern.js b/ext/url/01_urlpattern.js index 1ed02f8d38..3c08bc1b89 100644 --- a/ext/url/01_urlpattern.js +++ b/ext/url/01_urlpattern.js @@ -56,15 +56,9 @@ class URLPattern { this[webidl.brand] = webidl.brand; const prefix = "Failed to construct 'URLPattern'"; webidl.requiredArguments(arguments.length, 1, prefix); - input = webidl.converters.URLPatternInput(input, { - prefix, - context: "Argument 1", - }); + input = webidl.converters.URLPatternInput(input, prefix, "Argument 1"); if (baseURL !== undefined) { - baseURL = webidl.converters.USVString(baseURL, { - prefix, - context: "Argument 2", - }); + baseURL = webidl.converters.USVString(baseURL, prefix, "Argument 2"); } const components = ops.op_urlpattern_parse(input, baseURL); @@ -134,15 +128,9 @@ class URLPattern { webidl.assertBranded(this, URLPatternPrototype); const prefix = "Failed to execute 'test' on 'URLPattern'"; webidl.requiredArguments(arguments.length, 1, prefix); - input = webidl.converters.URLPatternInput(input, { - prefix, - context: "Argument 1", - }); + input = webidl.converters.URLPatternInput(input, prefix, "Argument 1"); if (baseURL !== undefined) { - baseURL = webidl.converters.USVString(baseURL, { - prefix, - context: "Argument 2", - }); + baseURL = webidl.converters.USVString(baseURL, prefix, "Argument 2"); } const res = ops.op_urlpattern_process_match_input( @@ -175,15 +163,9 @@ class URLPattern { webidl.assertBranded(this, URLPatternPrototype); const prefix = "Failed to execute 'exec' on 'URLPattern'"; webidl.requiredArguments(arguments.length, 1, prefix); - input = webidl.converters.URLPatternInput(input, { - prefix, - context: "Argument 1", - }); + input = webidl.converters.URLPatternInput(input, prefix, "Argument 1"); if (baseURL !== undefined) { - baseURL = webidl.converters.USVString(baseURL, { - prefix, - context: "Argument 2", - }); + baseURL = webidl.converters.USVString(baseURL, prefix, "Argument 2"); } const res = ops.op_urlpattern_process_match_input( @@ -258,12 +240,12 @@ webidl.converters.URLPatternInit = webidl { key: "baseURL", converter: webidl.converters.USVString }, ]); -webidl.converters["URLPatternInput"] = (V, opts) => { +webidl.converters["URLPatternInput"] = (V, prefix, context, opts) => { // Union for (URLPatternInit or USVString) if (typeof V == "object") { - return webidl.converters.URLPatternInit(V, opts); + return webidl.converters.URLPatternInit(V, prefix, context, opts); } - return webidl.converters.USVString(V, opts); + return webidl.converters.USVString(V, prefix, context, opts); }; export { URLPattern }; diff --git a/ext/web/01_dom_exception.js b/ext/web/01_dom_exception.js index c465a06fdc..31d2cdc29f 100644 --- a/ext/web/01_dom_exception.js +++ b/ext/web/01_dom_exception.js @@ -94,14 +94,16 @@ class DOMException { // https://webidl.spec.whatwg.org/#dom-domexception-domexception constructor(message = "", name = "Error") { - message = webidl.converters.DOMString(message, { - prefix: "Failed to construct 'DOMException'", - context: "Argument 1", - }); - name = webidl.converters.DOMString(name, { - prefix: "Failed to construct 'DOMException'", - context: "Argument 2", - }); + message = webidl.converters.DOMString( + message, + "Failed to construct 'DOMException'", + "Argument 1", + ); + name = webidl.converters.DOMString( + name, + "Failed to construct 'DOMException'", + "Argument 2", + ); const code = nameToCodeMapping[name] ?? 0; this[_message] = message; diff --git a/ext/web/02_event.js b/ext/web/02_event.js index 0372293cff..e7553ea89b 100644 --- a/ext/web/02_event.js +++ b/ext/web/02_event.js @@ -122,7 +122,7 @@ const isTrusted = ObjectGetOwnPropertyDescriptor({ }, }, "isTrusted").get; -const eventInitConverter = webidl.createDictionaryConverter("EventInit", [{ +webidl.converters.EventInit = webidl.createDictionaryConverter("EventInit", [{ key: "bubbles", defaultValue: false, converter: webidl.converters.boolean, @@ -167,14 +167,16 @@ class Event { 1, "Failed to construct 'Event'", ); - type = webidl.converters.DOMString(type, { - prefix: "Failed to construct 'Event'", - context: "Argument 1", - }); - const eventInit = eventInitConverter(eventInitDict, { - prefix: "Failed to construct 'Event'", - context: "Argument 2", - }); + type = webidl.converters.DOMString( + type, + "Failed to construct 'Event'", + "Argument 1", + ); + const eventInit = webidl.converters.EventInit( + eventInitDict, + "Failed to construct 'Event'", + "Argument 2", + ); this[_attributes] = { type, ...eventInit, @@ -947,13 +949,13 @@ function lazyAddEventListenerOptionsConverter() { ); } -webidl.converters.AddEventListenerOptions = (V, opts) => { +webidl.converters.AddEventListenerOptions = (V, prefix, context, opts) => { if (webidl.type(V) !== "Object" || V === null) { V = { capture: Boolean(V) }; } lazyAddEventListenerOptionsConverter(); - return addEventListenerOptionsConverter(V, opts); + return addEventListenerOptionsConverter(V, prefix, context, opts); }; class EventTarget { @@ -973,10 +975,11 @@ class EventTarget { webidl.requiredArguments(arguments.length, 2, prefix); - options = webidl.converters.AddEventListenerOptions(options, { + options = webidl.converters.AddEventListenerOptions( + options, prefix, - context: "Argument 3", - }); + "Argument 3", + ); if (callback === null) { return; diff --git a/ext/web/03_abort_signal.js b/ext/web/03_abort_signal.js index 2122d642ef..8857eb5504 100644 --- a/ext/web/03_abort_signal.js +++ b/ext/web/03_abort_signal.js @@ -46,9 +46,14 @@ class AbortSignal extends EventTarget { static timeout(millis) { const prefix = "Failed to call 'AbortSignal.timeout'"; webidl.requiredArguments(arguments.length, 1, prefix); - millis = webidl.converters["unsigned long long"](millis, { - enforceRange: true, - }); + millis = webidl.converters["unsigned long long"]( + millis, + prefix, + "Argument 1", + { + enforceRange: true, + }, + ); const signal = new AbortSignal(illegalConstructorKey); signal[timerId] = setTimeout( diff --git a/ext/web/05_base64.js b/ext/web/05_base64.js index 05dd1431c2..3971b2e585 100644 --- a/ext/web/05_base64.js +++ b/ext/web/05_base64.js @@ -23,10 +23,7 @@ const { function atob(data) { const prefix = "Failed to execute 'atob'"; webidl.requiredArguments(arguments.length, 1, prefix); - data = webidl.converters.DOMString(data, { - prefix, - context: "Argument 1", - }); + data = webidl.converters.DOMString(data, prefix, "Argument 1"); try { return ops.op_base64_atob(data); } catch (e) { @@ -47,10 +44,7 @@ function atob(data) { function btoa(data) { const prefix = "Failed to execute 'btoa'"; webidl.requiredArguments(arguments.length, 1, prefix); - data = webidl.converters.DOMString(data, { - prefix, - context: "Argument 1", - }); + data = webidl.converters.DOMString(data, prefix, "Argument 1"); try { return ops.op_base64_btoa(data); } catch (e) { diff --git a/ext/web/06_streams.js b/ext/web/06_streams.js index c8a7b9c47e..6d390308d4 100644 --- a/ext/web/06_streams.js +++ b/ext/web/06_streams.js @@ -4666,10 +4666,7 @@ class ByteLengthQueuingStrategy { constructor(init) { const prefix = "Failed to construct 'ByteLengthQueuingStrategy'"; webidl.requiredArguments(arguments.length, 1, prefix); - init = webidl.converters.QueuingStrategyInit(init, { - prefix, - context: "Argument 1", - }); + init = webidl.converters.QueuingStrategyInit(init, prefix, "Argument 1"); this[webidl.brand] = webidl.brand; this[_globalObject] = globalThis; this[_highWaterMark] = init.highWaterMark; @@ -4723,10 +4720,7 @@ class CountQueuingStrategy { constructor(init) { const prefix = "Failed to construct 'CountQueuingStrategy'"; webidl.requiredArguments(arguments.length, 1, prefix); - init = webidl.converters.QueuingStrategyInit(init, { - prefix, - context: "Argument 1", - }); + init = webidl.converters.QueuingStrategyInit(init, prefix, "Argument 1"); this[webidl.brand] = webidl.brand; this[_globalObject] = globalThis; this[_highWaterMark] = init.highWaterMark; @@ -4803,18 +4797,20 @@ class ReadableStream { constructor(underlyingSource = undefined, strategy = undefined) { const prefix = "Failed to construct 'ReadableStream'"; if (underlyingSource !== undefined) { - underlyingSource = webidl.converters.object(underlyingSource, { + underlyingSource = webidl.converters.object( + underlyingSource, prefix, - context: "Argument 1", - }); + "Argument 1", + ); } else { underlyingSource = null; } if (strategy !== undefined) { - strategy = webidl.converters.QueuingStrategy(strategy, { + strategy = webidl.converters.QueuingStrategy( + strategy, prefix, - context: "Argument 2", - }); + "Argument 2", + ); } else { strategy = {}; } @@ -4823,7 +4819,8 @@ class ReadableStream { if (underlyingSource !== undefined) { underlyingSourceDict = webidl.converters.UnderlyingSource( underlyingSource, - { prefix, context: "underlyingSource" }, + prefix, + "underlyingSource", ); } initializeReadableStream(this); @@ -4890,10 +4887,11 @@ class ReadableStream { webidl.assertBranded(this, ReadableStreamPrototype); const prefix = "Failed to execute 'getReader' on 'ReadableStream'"; if (options !== undefined) { - options = webidl.converters.ReadableStreamGetReaderOptions(options, { + options = webidl.converters.ReadableStreamGetReaderOptions( + options, prefix, - context: "Argument 1", - }); + "Argument 1", + ); } else { options = {}; } @@ -4915,14 +4913,16 @@ class ReadableStream { webidl.assertBranded(this, ReadableStreamPrototype); const prefix = "Failed to execute 'pipeThrough' on 'ReadableStream'"; webidl.requiredArguments(arguments.length, 1, prefix); - transform = webidl.converters.ReadableWritablePair(transform, { + transform = webidl.converters.ReadableWritablePair( + transform, prefix, - context: "Argument 1", - }); - options = webidl.converters.StreamPipeOptions(options, { + "Argument 1", + ); + options = webidl.converters.StreamPipeOptions( + options, prefix, - context: "Argument 2", - }); + "Argument 2", + ); const { readable, writable } = transform; const { preventClose, preventAbort, preventCancel, signal } = options; if (isReadableStreamLocked(this)) { @@ -4953,14 +4953,16 @@ class ReadableStream { webidl.assertBranded(this, ReadableStreamPrototype); const prefix = "Failed to execute 'pipeTo' on 'ReadableStream'"; webidl.requiredArguments(arguments.length, 1, prefix); - destination = webidl.converters.WritableStream(destination, { + destination = webidl.converters.WritableStream( + destination, prefix, - context: "Argument 1", - }); - options = webidl.converters.StreamPipeOptions(options, { + "Argument 1", + ); + options = webidl.converters.StreamPipeOptions( + options, prefix, - context: "Argument 2", - }); + "Argument 2", + ); } catch (err) { return PromiseReject(err); } @@ -4999,10 +5001,11 @@ class ReadableStream { values(options = {}) { webidl.assertBranded(this, ReadableStreamPrototype); const prefix = "Failed to execute 'values' on 'ReadableStream'"; - options = webidl.converters.ReadableStreamIteratorOptions(options, { + options = webidl.converters.ReadableStreamIteratorOptions( + options, prefix, - context: "Argument 1", - }); + "Argument 1", + ); /** @type {AsyncIterableIterator} */ const iterator = ObjectCreate(readableStreamAsyncIteratorPrototype); const reader = acquireReadableStreamDefaultReader(this); @@ -5044,10 +5047,7 @@ class ReadableStreamDefaultReader { constructor(stream) { const prefix = "Failed to construct 'ReadableStreamDefaultReader'"; webidl.requiredArguments(arguments.length, 1, prefix); - stream = webidl.converters.ReadableStream(stream, { - prefix, - context: "Argument 1", - }); + stream = webidl.converters.ReadableStream(stream, prefix, "Argument 1"); this[webidl.brand] = webidl.brand; setUpReadableStreamDefaultReader(this, stream); } @@ -5144,10 +5144,7 @@ class ReadableStreamBYOBReader { constructor(stream) { const prefix = "Failed to construct 'ReadableStreamBYOBReader'"; webidl.requiredArguments(arguments.length, 1, prefix); - stream = webidl.converters.ReadableStream(stream, { - prefix, - context: "Argument 1", - }); + stream = webidl.converters.ReadableStream(stream, prefix, "Argument 1"); this[webidl.brand] = webidl.brand; setUpReadableStreamBYOBReader(this, stream); } @@ -5160,10 +5157,7 @@ class ReadableStreamBYOBReader { try { webidl.assertBranded(this, ReadableStreamBYOBReaderPrototype); const prefix = "Failed to execute 'read' on 'ReadableStreamBYOBReader'"; - view = webidl.converters.ArrayBufferView(view, { - prefix, - context: "Argument 1", - }); + view = webidl.converters.ArrayBufferView(view, prefix, "Argument 1"); } catch (err) { return PromiseReject(err); } @@ -5286,11 +5280,14 @@ class ReadableStreamBYOBRequest { webidl.assertBranded(this, ReadableStreamBYOBRequestPrototype); const prefix = "Failed to execute 'respond' on 'ReadableStreamBYOBRequest'"; webidl.requiredArguments(arguments.length, 1, prefix); - bytesWritten = webidl.converters["unsigned long long"](bytesWritten, { - enforceRange: true, + bytesWritten = webidl.converters["unsigned long long"]( + bytesWritten, prefix, - context: "Argument 1", - }); + "Argument 1", + { + enforceRange: true, + }, + ); if (this[_controller] === undefined) { throw new TypeError("This BYOB request has been invalidated"); @@ -5319,10 +5316,7 @@ class ReadableStreamBYOBRequest { const prefix = "Failed to execute 'respondWithNewView' on 'ReadableStreamBYOBRequest'"; webidl.requiredArguments(arguments.length, 1, prefix); - view = webidl.converters.ArrayBufferView(view, { - prefix, - context: "Argument 1", - }); + view = webidl.converters.ArrayBufferView(view, prefix, "Argument 1"); if (this[_controller] === undefined) { throw new TypeError("This BYOB request has been invalidated"); @@ -5414,10 +5408,7 @@ class ReadableByteStreamController { "Failed to execute 'enqueue' on 'ReadableByteStreamController'"; webidl.requiredArguments(arguments.length, 1, prefix); const arg1 = "Argument 1"; - chunk = webidl.converters.ArrayBufferView(chunk, { - prefix, - context: arg1, - }); + chunk = webidl.converters.ArrayBufferView(chunk, prefix, arg1); let buffer, byteLength; if (TypedArrayPrototypeGetSymbolToStringTag(chunk) === undefined) { buffer = DataViewPrototypeGetBuffer(/** @type {DataView} */ (chunk)); @@ -5700,27 +5691,27 @@ class TransformStream { ) { const prefix = "Failed to construct 'TransformStream'"; if (transformer !== undefined) { - transformer = webidl.converters.object(transformer, { - prefix, - context: "Argument 1", - }); + transformer = webidl.converters.object(transformer, prefix, "Argument 1"); } - writableStrategy = webidl.converters.QueuingStrategy(writableStrategy, { + writableStrategy = webidl.converters.QueuingStrategy( + writableStrategy, prefix, - context: "Argument 2", - }); - readableStrategy = webidl.converters.QueuingStrategy(readableStrategy, { + "Argument 2", + ); + readableStrategy = webidl.converters.QueuingStrategy( + readableStrategy, prefix, - context: "Argument 2", - }); + "Argument 3", + ); this[webidl.brand] = webidl.brand; if (transformer === undefined) { transformer = null; } - const transformerDict = webidl.converters.Transformer(transformer, { + const transformerDict = webidl.converters.Transformer( + transformer, prefix, - context: "transformer", - }); + "transformer", + ); if (transformerDict.readableType !== undefined) { throw new RangeError( `${prefix}: readableType transformers not supported.`, @@ -5887,22 +5878,25 @@ class WritableStream { constructor(underlyingSink = undefined, strategy = {}) { const prefix = "Failed to construct 'WritableStream'"; if (underlyingSink !== undefined) { - underlyingSink = webidl.converters.object(underlyingSink, { + underlyingSink = webidl.converters.object( + underlyingSink, prefix, - context: "Argument 1", - }); + "Argument 1", + ); } - strategy = webidl.converters.QueuingStrategy(strategy, { + strategy = webidl.converters.QueuingStrategy( + strategy, prefix, - context: "Argument 2", - }); + "Argument 2", + ); this[webidl.brand] = webidl.brand; if (underlyingSink === undefined) { underlyingSink = null; } const underlyingSinkDict = webidl.converters.UnderlyingSink( underlyingSink, - { prefix, context: "underlyingSink" }, + prefix, + "underlyingSink", ); if (underlyingSinkDict.type != null) { throw new RangeError( @@ -6003,10 +5997,7 @@ class WritableStreamDefaultWriter { constructor(stream) { const prefix = "Failed to construct 'WritableStreamDefaultWriter'"; webidl.requiredArguments(arguments.length, 1, prefix); - stream = webidl.converters.WritableStream(stream, { - prefix, - context: "Argument 1", - }); + stream = webidl.converters.WritableStream(stream, prefix, "Argument 1"); this[webidl.brand] = webidl.brand; setUpWritableStreamDefaultWriter(this, stream); } @@ -6251,8 +6242,8 @@ webidl.converters.UnderlyingSource = webidl }, { key: "autoAllocateChunkSize", - converter: (V, opts) => - webidl.converters["unsigned long long"](V, { + converter: (V, prefix, context, opts) => + webidl.converters["unsigned long long"](V, prefix, context, { ...opts, enforceRange: true, }), diff --git a/ext/web/08_text_encoding.js b/ext/web/08_text_encoding.js index 2e19c3d1ff..e7e535f906 100644 --- a/ext/web/08_text_encoding.js +++ b/ext/web/08_text_encoding.js @@ -53,14 +53,12 @@ class TextDecoder { */ constructor(label = "utf-8", options = {}) { const prefix = "Failed to construct 'TextDecoder'"; - label = webidl.converters.DOMString(label, { + label = webidl.converters.DOMString(label, prefix, "Argument 1"); + options = webidl.converters.TextDecoderOptions( + options, prefix, - context: "Argument 1", - }); - options = webidl.converters.TextDecoderOptions(options, { - prefix, - context: "Argument 2", - }); + "Argument 2", + ); const encoding = ops.op_encoding_normalize_label(label); this.#encoding = encoding; this.#fatal = options.fatal; @@ -95,18 +93,17 @@ class TextDecoder { webidl.assertBranded(this, TextDecoderPrototype); const prefix = "Failed to execute 'decode' on 'TextDecoder'"; if (input !== undefined) { - input = webidl.converters.BufferSource(input, { - prefix, - context: "Argument 1", + input = webidl.converters.BufferSource(input, prefix, "Argument 1", { allowShared: true, }); } let stream = false; if (options !== undefined) { - options = webidl.converters.TextDecodeOptions(options, { + options = webidl.converters.TextDecodeOptions( + options, prefix, - context: "Argument 2", - }); + "Argument 2", + ); stream = options.stream; } @@ -215,13 +212,13 @@ class TextEncoder { */ encode(input = "") { webidl.assertBranded(this, TextEncoderPrototype); - const prefix = "Failed to execute 'encode' on 'TextEncoder'"; // The WebIDL type of `input` is `USVString`, but `core.encode` already // converts lone surrogates to the replacement character. - input = webidl.converters.DOMString(input, { - prefix, - context: "Argument 1", - }); + input = webidl.converters.DOMString( + input, + "Failed to execute 'encode' on 'TextEncoder'", + "Argument 1", + ); return core.encode(input); } @@ -235,15 +232,15 @@ class TextEncoder { const prefix = "Failed to execute 'encodeInto' on 'TextEncoder'"; // The WebIDL type of `source` is `USVString`, but the ops bindings // already convert lone surrogates to the replacement character. - source = webidl.converters.DOMString(source, { + source = webidl.converters.DOMString(source, prefix, "Argument 1"); + destination = webidl.converters.Uint8Array( + destination, prefix, - context: "Argument 1", - }); - destination = webidl.converters.Uint8Array(destination, { - prefix, - context: "Argument 2", - allowShared: true, - }); + "Argument 2", + { + allowShared: true, + }, + ); ops.op_encoding_encode_into(source, destination, encodeIntoBuf); return { read: encodeIntoBuf[0], @@ -269,21 +266,19 @@ class TextDecoderStream { */ constructor(label = "utf-8", options = {}) { const prefix = "Failed to construct 'TextDecoderStream'"; - label = webidl.converters.DOMString(label, { + label = webidl.converters.DOMString(label, prefix, "Argument 1"); + options = webidl.converters.TextDecoderOptions( + options, prefix, - context: "Argument 1", - }); - options = webidl.converters.TextDecoderOptions(options, { - prefix, - context: "Argument 2", - }); + "Argument 2", + ); this.#decoder = new TextDecoder(label, options); this.#transform = new TransformStream({ // The transform and flush functions need access to TextDecoderStream's // `this`, so they are defined as functions rather than methods. transform: (chunk, controller) => { try { - chunk = webidl.converters.BufferSource(chunk, { + chunk = webidl.converters.BufferSource(chunk, prefix, "chunk", { allowShared: true, }); const decoded = this.#decoder.decode(chunk, { stream: true }); diff --git a/ext/web/09_file.js b/ext/web/09_file.js index 8f0072e05b..a81176b385 100644 --- a/ext/web/09_file.js +++ b/ext/web/09_file.js @@ -218,14 +218,16 @@ class Blob { */ constructor(blobParts = [], options = {}) { const prefix = "Failed to construct 'Blob'"; - blobParts = webidl.converters["sequence"](blobParts, { - context: "Argument 1", + blobParts = webidl.converters["sequence"]( + blobParts, prefix, - }); - options = webidl.converters["BlobPropertyBag"](options, { - context: "Argument 2", + "Argument 1", + ); + options = webidl.converters["BlobPropertyBag"]( + options, prefix, - }); + "Argument 2", + ); this[webidl.brand] = webidl.brand; @@ -261,24 +263,21 @@ class Blob { webidl.assertBranded(this, BlobPrototype); const prefix = "Failed to execute 'slice' on 'Blob'"; if (start !== undefined) { - start = webidl.converters["long long"](start, { + start = webidl.converters["long long"](start, prefix, "Argument 1", { clamp: true, - context: "Argument 1", - prefix, }); } if (end !== undefined) { - end = webidl.converters["long long"](end, { + end = webidl.converters["long long"](end, prefix, "Argument 2", { clamp: true, - context: "Argument 2", - prefix, }); } if (contentType !== undefined) { - contentType = webidl.converters["DOMString"](contentType, { - context: "Argument 3", + contentType = webidl.converters["DOMString"]( + contentType, prefix, - }); + "Argument 3", + ); } // deno-lint-ignore no-this-alias @@ -430,27 +429,27 @@ webidl.converters["Blob"] = webidl.createInterfaceConverter( "Blob", Blob.prototype, ); -webidl.converters["BlobPart"] = (V, opts) => { +webidl.converters["BlobPart"] = (V, prefix, context, opts) => { // Union for ((ArrayBuffer or ArrayBufferView) or Blob or USVString) if (typeof V == "object") { if (ObjectPrototypeIsPrototypeOf(BlobPrototype, V)) { - return webidl.converters["Blob"](V, opts); + return webidl.converters["Blob"](V, prefix, context, opts); } if ( ObjectPrototypeIsPrototypeOf(ArrayBufferPrototype, V) || // deno-lint-ignore prefer-primordials ObjectPrototypeIsPrototypeOf(SharedArrayBuffer.prototype, V) ) { - return webidl.converters["ArrayBuffer"](V, opts); + return webidl.converters["ArrayBuffer"](V, prefix, context, opts); } if (ArrayBufferIsView(V)) { - return webidl.converters["ArrayBufferView"](V, opts); + return webidl.converters["ArrayBufferView"](V, prefix, context, opts); } } // BlobPart is passed to processBlobParts after conversion, which calls core.encode() // on the string. // core.encode() is equivalent to USVString normalization. - return webidl.converters["DOMString"](V, opts); + return webidl.converters["DOMString"](V, prefix, context, opts); }; webidl.converters["sequence"] = webidl.createSequenceConverter( webidl.converters["BlobPart"], @@ -494,18 +493,17 @@ class File extends Blob { const prefix = "Failed to construct 'File'"; webidl.requiredArguments(arguments.length, 2, prefix); - fileBits = webidl.converters["sequence"](fileBits, { - context: "Argument 1", + fileBits = webidl.converters["sequence"]( + fileBits, prefix, - }); - fileName = webidl.converters["USVString"](fileName, { - context: "Argument 2", + "Argument 1", + ); + fileName = webidl.converters["USVString"](fileName, prefix, "Argument 2"); + options = webidl.converters["FilePropertyBag"]( + options, prefix, - }); - options = webidl.converters["FilePropertyBag"](options, { - context: "Argument 3", - prefix, - }); + "Argument 3", + ); super(fileBits, options); diff --git a/ext/web/10_filereader.js b/ext/web/10_filereader.js index 897ac7e937..fe5dbb9150 100644 --- a/ext/web/10_filereader.js +++ b/ext/web/10_filereader.js @@ -383,10 +383,7 @@ class FileReader extends EventTarget { const prefix = "Failed to execute 'readAsText' on 'FileReader'"; webidl.requiredArguments(arguments.length, 1, prefix); if (encoding !== undefined) { - encoding = webidl.converters["DOMString"](encoding, { - prefix, - context: "Argument 2", - }); + encoding = webidl.converters["DOMString"](encoding, prefix, "Argument 2"); } // alias for readAsArrayBuffer this.#readOperation(blob, { kind: "Text", encoding }); diff --git a/ext/web/11_blob_url.js b/ext/web/11_blob_url.js index 7c4adf4e99..3ac240d90a 100644 --- a/ext/web/11_blob_url.js +++ b/ext/web/11_blob_url.js @@ -24,10 +24,7 @@ import { URL } from "ext:deno_url/00_url.js"; function createObjectURL(blob) { const prefix = "Failed to execute 'createObjectURL' on 'URL'"; webidl.requiredArguments(arguments.length, 1, prefix); - blob = webidl.converters["Blob"](blob, { - context: "Argument 1", - prefix, - }); + blob = webidl.converters["Blob"](blob, prefix, "Argument 1"); return ops.op_blob_create_object_url(blob.type, getParts(blob)); } @@ -39,10 +36,7 @@ function createObjectURL(blob) { function revokeObjectURL(url) { const prefix = "Failed to execute 'revokeObjectURL' on 'URL'"; webidl.requiredArguments(arguments.length, 1, prefix); - url = webidl.converters["DOMString"](url, { - context: "Argument 1", - prefix, - }); + url = webidl.converters["DOMString"](url, prefix, "Argument 1"); ops.op_blob_revoke_object_url(url); } diff --git a/ext/web/13_message_port.js b/ext/web/13_message_port.js index 39820a52f7..fdc678a4f9 100644 --- a/ext/web/13_message_port.js +++ b/ext/web/13_message_port.js @@ -110,16 +110,15 @@ class MessagePort extends EventTarget { ) { const transfer = webidl.converters["sequence"]( transferOrOptions, - { prefix, context: "Argument 2" }, + prefix, + "Argument 2", ); options = { transfer }; } else { options = webidl.converters.StructuredSerializeOptions( transferOrOptions, - { - prefix, - context: "Argument 2", - }, + prefix, + "Argument 2", ); } const { transfer } = options; @@ -330,10 +329,11 @@ webidl.converters.StructuredSerializeOptions = webidl function structuredClone(value, options) { const prefix = "Failed to execute 'structuredClone'"; webidl.requiredArguments(arguments.length, 1, prefix); - options = webidl.converters.StructuredSerializeOptions(options, { + options = webidl.converters.StructuredSerializeOptions( + options, prefix, - context: "Argument 2", - }); + "Argument 2", + ); const messageData = serializeJsMessageData(value, options.transfer); return deserializeJsMessageData(messageData)[0]; } diff --git a/ext/web/14_compression.js b/ext/web/14_compression.js index 1731b3bf3b..2ba7746bd5 100644 --- a/ext/web/14_compression.js +++ b/ext/web/14_compression.js @@ -29,19 +29,13 @@ class CompressionStream { constructor(format) { const prefix = "Failed to construct 'CompressionStream'"; webidl.requiredArguments(arguments.length, 1, prefix); - format = webidl.converters.CompressionFormat(format, { - prefix, - context: "Argument 1", - }); + format = webidl.converters.CompressionFormat(format, prefix, "Argument 1"); const rid = ops.op_compression_new(format, false); this.#transform = new TransformStream({ transform(chunk, controller) { - chunk = webidl.converters.BufferSource(chunk, { - prefix, - context: "chunk", - }); + chunk = webidl.converters.BufferSource(chunk, prefix, "chunk"); const output = ops.op_compression_write( rid, chunk, @@ -77,19 +71,13 @@ class DecompressionStream { constructor(format) { const prefix = "Failed to construct 'DecompressionStream'"; webidl.requiredArguments(arguments.length, 1, prefix); - format = webidl.converters.CompressionFormat(format, { - prefix, - context: "Argument 1", - }); + format = webidl.converters.CompressionFormat(format, prefix, "Argument 1"); const rid = ops.op_compression_new(format, true); this.#transform = new TransformStream({ transform(chunk, controller) { - chunk = webidl.converters.BufferSource(chunk, { - prefix, - context: "chunk", - }); + chunk = webidl.converters.BufferSource(chunk, prefix, "chunk"); const output = ops.op_compression_write( rid, chunk, diff --git a/ext/web/15_performance.js b/ext/web/15_performance.js index d494a5328b..72f4d3a7e5 100644 --- a/ext/web/15_performance.js +++ b/ext/web/15_performance.js @@ -41,11 +41,16 @@ webidl.converters["PerformanceMarkOptions"] = webidl ], ); -webidl.converters["DOMString or DOMHighResTimeStamp"] = (V, opts) => { +webidl.converters["DOMString or DOMHighResTimeStamp"] = ( + V, + prefix, + context, + opts, +) => { if (webidl.type(V) === "Number" && V !== null) { - return webidl.converters.DOMHighResTimeStamp(V, opts); + return webidl.converters.DOMHighResTimeStamp(V, prefix, context, opts); } - return webidl.converters.DOMString(V, opts); + return webidl.converters.DOMString(V, prefix, context, opts); }; webidl.converters["PerformanceMeasureOptions"] = webidl @@ -71,11 +76,21 @@ webidl.converters["PerformanceMeasureOptions"] = webidl ], ); -webidl.converters["DOMString or PerformanceMeasureOptions"] = (V, opts) => { +webidl.converters["DOMString or PerformanceMeasureOptions"] = ( + V, + prefix, + context, + opts, +) => { if (webidl.type(V) === "Object" && V !== null) { - return webidl.converters["PerformanceMeasureOptions"](V, opts); + return webidl.converters["PerformanceMeasureOptions"]( + V, + prefix, + context, + opts, + ); } - return webidl.converters.DOMString(V, opts); + return webidl.converters.DOMString(V, prefix, context, opts); }; function setTimeOrigin(origin) { @@ -221,15 +236,13 @@ class PerformanceMark extends PerformanceEntry { const prefix = "Failed to construct 'PerformanceMark'"; webidl.requiredArguments(arguments.length, 1, prefix); - name = webidl.converters.DOMString(name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters.DOMString(name, prefix, "Argument 1"); - options = webidl.converters.PerformanceMarkOptions(options, { + options = webidl.converters.PerformanceMarkOptions( + options, prefix, - context: "Argument 2", - }); + "Argument 2", + ); const { detail = null, startTime = now() } = options; @@ -345,10 +358,11 @@ class Performance extends EventTarget { clearMarks(markName = undefined) { webidl.assertBranded(this, PerformancePrototype); if (markName !== undefined) { - markName = webidl.converters.DOMString(markName, { - prefix: "Failed to execute 'clearMarks' on 'Performance'", - context: "Argument 1", - }); + markName = webidl.converters.DOMString( + markName, + "Failed to execute 'clearMarks' on 'Performance'", + "Argument 1", + ); performanceEntries = ArrayPrototypeFilter( performanceEntries, @@ -365,10 +379,11 @@ class Performance extends EventTarget { clearMeasures(measureName = undefined) { webidl.assertBranded(this, PerformancePrototype); if (measureName !== undefined) { - measureName = webidl.converters.DOMString(measureName, { - prefix: "Failed to execute 'clearMeasures' on 'Performance'", - context: "Argument 1", - }); + measureName = webidl.converters.DOMString( + measureName, + "Failed to execute 'clearMeasures' on 'Performance'", + "Argument 1", + ); performanceEntries = ArrayPrototypeFilter( performanceEntries, @@ -396,16 +411,10 @@ class Performance extends EventTarget { const prefix = "Failed to execute 'getEntriesByName' on 'Performance'"; webidl.requiredArguments(arguments.length, 1, prefix); - name = webidl.converters.DOMString(name, { - prefix, - context: "Argument 1", - }); + name = webidl.converters.DOMString(name, prefix, "Argument 1"); if (type !== undefined) { - type = webidl.converters.DOMString(type, { - prefix, - context: "Argument 2", - }); + type = webidl.converters.DOMString(type, prefix, "Argument 2"); } return filterByNameType(name, type); @@ -416,10 +425,7 @@ class Performance extends EventTarget { const prefix = "Failed to execute 'getEntriesByName' on 'Performance'"; webidl.requiredArguments(arguments.length, 1, prefix); - type = webidl.converters.DOMString(type, { - prefix, - context: "Argument 1", - }); + type = webidl.converters.DOMString(type, prefix, "Argument 1"); return filterByNameType(undefined, type); } @@ -432,15 +438,13 @@ class Performance extends EventTarget { const prefix = "Failed to execute 'mark' on 'Performance'"; webidl.requiredArguments(arguments.length, 1, prefix); - markName = webidl.converters.DOMString(markName, { - prefix, - context: "Argument 1", - }); + markName = webidl.converters.DOMString(markName, prefix, "Argument 1"); - markOptions = webidl.converters.PerformanceMarkOptions(markOptions, { + markOptions = webidl.converters.PerformanceMarkOptions( + markOptions, prefix, - context: "Argument 2", - }); + "Argument 2", + ); // 3.1.1.1 If the global object is a Window object and markName uses the // same name as a read only attribute in the PerformanceTiming interface, @@ -460,22 +464,21 @@ class Performance extends EventTarget { const prefix = "Failed to execute 'measure' on 'Performance'"; webidl.requiredArguments(arguments.length, 1, prefix); - measureName = webidl.converters.DOMString(measureName, { + measureName = webidl.converters.DOMString( + measureName, prefix, - context: "Argument 1", - }); + "Argument 1", + ); startOrMeasureOptions = webidl.converters - ["DOMString or PerformanceMeasureOptions"](startOrMeasureOptions, { + ["DOMString or PerformanceMeasureOptions"]( + startOrMeasureOptions, prefix, - context: "Argument 2", - }); + "Argument 2", + ); if (endMark !== undefined) { - endMark = webidl.converters.DOMString(endMark, { - prefix, - context: "Argument 3", - }); + endMark = webidl.converters.DOMString(endMark, prefix, "Argument 3"); } if ( diff --git a/ext/webidl/00_webidl.js b/ext/webidl/00_webidl.js index 7788741048..71b7982b75 100644 --- a/ext/webidl/00_webidl.js +++ b/ext/webidl/00_webidl.js @@ -191,7 +191,7 @@ function createIntegerConversion(bitLength, typeOpts) { const twoToTheBitLength = MathPow(2, bitLength); const twoToOneLessThanTheBitLength = MathPow(2, bitLength - 1); - return (V, opts = {}) => { + return (V, prefix = undefined, context = undefined, opts = {}) => { let x = toNumber(V); x = censorNegativeZero(x); @@ -200,8 +200,8 @@ function createIntegerConversion(bitLength, typeOpts) { throw makeException( TypeError, "is not a finite number", - opts.prefix, - opts.context, + prefix, + context, ); } @@ -211,8 +211,8 @@ function createIntegerConversion(bitLength, typeOpts) { throw makeException( TypeError, `is outside the accepted range of ${lowerBound} to ${upperBound}, inclusive`, - opts.prefix, - opts.context, + prefix, + context, ); } @@ -250,7 +250,7 @@ function createLongLongConversion(bitLength, { unsigned }) { const lowerBound = unsigned ? 0 : NumberMIN_SAFE_INTEGER; const asBigIntN = unsigned ? BigIntAsUintN : BigIntAsIntN; - return (V, opts = {}) => { + return (V, prefix = undefined, context = undefined, opts = {}) => { let x = toNumber(V); x = censorNegativeZero(x); @@ -259,8 +259,8 @@ function createLongLongConversion(bitLength, { unsigned }) { throw makeException( TypeError, "is not a finite number", - opts.prefix, - opts.context, + prefix, + context, ); } @@ -270,8 +270,8 @@ function createLongLongConversion(bitLength, { unsigned }) { throw makeException( TypeError, `is outside the accepted range of ${lowerBound} to ${upperBound}, inclusive`, - opts.prefix, - opts.context, + prefix, + context, ); } @@ -320,15 +320,15 @@ converters["unsigned long long"] = createLongLongConversion(64, { unsigned: true, }); -converters.float = (V, opts) => { +converters.float = (V, prefix, context, _opts) => { const x = toNumber(V); if (!NumberIsFinite(x)) { throw makeException( TypeError, "is not a finite floating-point value", - opts.prefix, - opts.context, + prefix, + context, ); } @@ -342,15 +342,15 @@ converters.float = (V, opts) => { throw makeException( TypeError, "is outside the range of a single-precision floating-point value", - opts.prefix, - opts.context, + prefix, + context, ); } return y; }; -converters["unrestricted float"] = (V, _opts) => { +converters["unrestricted float"] = (V, _prefix, _context, _opts) => { const x = toNumber(V); if (isNaN(x)) { @@ -364,28 +364,28 @@ converters["unrestricted float"] = (V, _opts) => { return MathFround(x); }; -converters.double = (V, opts) => { +converters.double = (V, prefix, context, _opts) => { const x = toNumber(V); if (!NumberIsFinite(x)) { throw makeException( TypeError, "is not a finite floating-point value", - opts.prefix, - opts.context, + prefix, + context, ); } return x; }; -converters["unrestricted double"] = (V, _opts) => { +converters["unrestricted double"] = (V, _prefix, _context, _opts) => { const x = toNumber(V); return x; }; -converters.DOMString = function (V, opts = {}) { +converters.DOMString = function (V, prefix, context, opts = {}) { if (typeof V === "string") { return V; } else if (V === null && opts.treatNullAsEmptyString) { @@ -394,8 +394,8 @@ converters.DOMString = function (V, opts = {}) { throw makeException( TypeError, "is a symbol, which cannot be converted to a string", - opts.prefix, - opts.context, + prefix, + context, ); } @@ -404,21 +404,21 @@ converters.DOMString = function (V, opts = {}) { // deno-lint-ignore no-control-regex const IS_BYTE_STRING = new SafeRegExp(/^[\x00-\xFF]*$/); -converters.ByteString = (V, opts) => { - const x = converters.DOMString(V, opts); +converters.ByteString = (V, prefix, context, opts) => { + const x = converters.DOMString(V, prefix, context, opts); if (!RegExpPrototypeTest(IS_BYTE_STRING, x)) { throw makeException( TypeError, "is not a valid ByteString", - opts.prefix, - opts.context, + prefix, + context, ); } return x; }; -converters.USVString = (V, opts) => { - const S = converters.DOMString(V, opts); +converters.USVString = (V, prefix, context, opts) => { + const S = converters.DOMString(V, prefix, context, opts); const n = S.length; let U = ""; for (let i = 0; i < n; ++i) { @@ -444,13 +444,13 @@ converters.USVString = (V, opts) => { return U; }; -converters.object = (V, opts) => { +converters.object = (V, prefix, context, _opts) => { if (type(V) !== "Object") { throw makeException( TypeError, "is not an object", - opts.prefix, - opts.context, + prefix, + context, ); } @@ -461,13 +461,13 @@ converters.object = (V, opts) => { // Neither Function nor VoidFunction is defined with [TreatNonObjectAsNull], so // handling for that is omitted. -function convertCallbackFunction(V, opts) { +function convertCallbackFunction(V, prefix, context, _opts) { if (typeof V !== "function") { throw makeException( TypeError, "is not a function", - opts.prefix, - opts.context, + prefix, + context, ); } return V; @@ -487,34 +487,44 @@ function isSharedArrayBuffer(V) { return ObjectPrototypeIsPrototypeOf(SharedArrayBuffer.prototype, V); } -converters.ArrayBuffer = (V, opts = {}) => { +converters.ArrayBuffer = ( + V, + prefix = undefined, + context = undefined, + opts = {}, +) => { if (!isNonSharedArrayBuffer(V)) { if (opts.allowShared && !isSharedArrayBuffer(V)) { throw makeException( TypeError, "is not an ArrayBuffer or SharedArrayBuffer", - opts.prefix, - opts.context, + prefix, + context, ); } throw makeException( TypeError, "is not an ArrayBuffer", - opts.prefix, - opts.context, + prefix, + context, ); } return V; }; -converters.DataView = (V, opts = {}) => { +converters.DataView = ( + V, + prefix = undefined, + context = undefined, + opts = {}, +) => { if (!isDataView(V)) { throw makeException( TypeError, "is not a DataView", - opts.prefix, - opts.context, + prefix, + context, ); } @@ -522,8 +532,8 @@ converters.DataView = (V, opts = {}) => { throw makeException( TypeError, "is backed by a SharedArrayBuffer, which is not allowed", - opts.prefix, - opts.context, + prefix, + context, ); } @@ -547,13 +557,18 @@ ArrayPrototypeForEach( const article = RegExpPrototypeTest(new SafeRegExp(/^[AEIOU]/), name) ? "an" : "a"; - converters[name] = (V, opts = {}) => { + converters[name] = ( + V, + prefix = undefined, + context = undefined, + opts = {}, + ) => { if (TypedArrayPrototypeGetSymbolToStringTag(V) !== name) { throw makeException( TypeError, `is not ${article} ${name} object`, - opts.prefix, - opts.context, + prefix, + context, ); } if ( @@ -563,8 +578,8 @@ ArrayPrototypeForEach( throw makeException( TypeError, "is a view on a SharedArrayBuffer, which is not allowed", - opts.prefix, - opts.context, + prefix, + context, ); } @@ -575,13 +590,18 @@ ArrayPrototypeForEach( // Common definitions -converters.ArrayBufferView = (V, opts = {}) => { +converters.ArrayBufferView = ( + V, + prefix = undefined, + context = undefined, + opts = {}, +) => { if (!ArrayBufferIsView(V)) { throw makeException( TypeError, "is not a view on an ArrayBuffer or SharedArrayBuffer", - opts.prefix, - opts.context, + prefix, + context, ); } let buffer; @@ -594,15 +614,20 @@ converters.ArrayBufferView = (V, opts = {}) => { throw makeException( TypeError, "is a view on a SharedArrayBuffer, which is not allowed", - opts.prefix, - opts.context, + prefix, + context, ); } return V; }; -converters.BufferSource = (V, opts = {}) => { +converters.BufferSource = ( + V, + prefix = undefined, + context = undefined, + opts = {}, +) => { if (ArrayBufferIsView(V)) { let buffer; if (TypedArrayPrototypeGetSymbolToStringTag(V) !== undefined) { @@ -614,8 +639,8 @@ converters.BufferSource = (V, opts = {}) => { throw makeException( TypeError, "is a view on a SharedArrayBuffer, which is not allowed", - opts.prefix, - opts.context, + prefix, + context, ); } @@ -626,8 +651,8 @@ converters.BufferSource = (V, opts = {}) => { throw makeException( TypeError, "is not an ArrayBuffer or a view on one", - opts.prefix, - opts.context, + prefix, + context, ); } if ( @@ -638,8 +663,8 @@ converters.BufferSource = (V, opts = {}) => { throw makeException( TypeError, "is not an ArrayBuffer, SharedArrayBuffer, or a view on one", - opts.prefix, - opts.context, + prefix, + context, ); } @@ -744,7 +769,7 @@ function createDictionaryConverter(name, ...dictionaries) { } } - return function (V, opts = {}) { + return function (V, prefix = undefined, context = undefined, opts = {}) { const typeV = type(V); switch (typeV) { case "Undefined": @@ -755,8 +780,8 @@ function createDictionaryConverter(name, ...dictionaries) { throw makeException( TypeError, "can not be converted to a dictionary", - opts.prefix, - opts.context, + prefix, + context, ); } const esDict = V; @@ -780,18 +805,23 @@ function createDictionaryConverter(name, ...dictionaries) { } if (esMemberValue !== undefined) { - const context = `'${key}' of '${name}'${ - opts.context ? ` (${opts.context})` : "" + const memberContext = `'${key}' of '${name}'${ + context ? ` (${context})` : "" }`; const converter = member.converter; - const idlMemberValue = converter(esMemberValue, { ...opts, context }); + const idlMemberValue = converter( + esMemberValue, + prefix, + memberContext, + opts, + ); idlDict[key] = idlMemberValue; } else if (member.required) { throw makeException( TypeError, `can not be converted to '${name}' because '${key}' is required in '${name}'.`, - opts.prefix, - opts.context, + prefix, + context, ); } } @@ -804,13 +834,13 @@ function createDictionaryConverter(name, ...dictionaries) { function createEnumConverter(name, values) { const E = new SafeSet(values); - return function (V, opts = {}) { + return function (V, prefix = undefined, _context = undefined, _opts = {}) { const S = String(V); if (!E.has(S)) { throw new TypeError( `${ - opts.prefix ? opts.prefix + ": " : "" + prefix ? prefix + ": " : "" }The provided value '${S}' is not a valid enum value of type ${name}.`, ); } @@ -820,7 +850,7 @@ function createEnumConverter(name, values) { } function createNullableConverter(converter) { - return (V, opts = {}) => { + return (V, prefix = undefined, context = undefined, opts = {}) => { // FIXME: If Type(V) is not Object, and the conversion to an IDL value is // being performed due to V being assigned to an attribute whose type is a // nullable callback function that is annotated with @@ -828,19 +858,19 @@ function createNullableConverter(converter) { // value null. if (V === null || V === undefined) return null; - return converter(V, opts); + return converter(V, prefix, context, opts); }; } // https://heycam.github.io/webidl/#es-sequence function createSequenceConverter(converter) { - return function (V, opts = {}) { + return function (V, prefix = undefined, context = undefined, opts = {}) { if (type(V) !== "Object") { throw makeException( TypeError, "can not be converted to sequence.", - opts.prefix, - opts.context, + prefix, + context, ); } const iter = V?.[SymbolIterator]?.(); @@ -848,8 +878,8 @@ function createSequenceConverter(converter) { throw makeException( TypeError, "can not be converted to sequence.", - opts.prefix, - opts.context, + prefix, + context, ); } const array = []; @@ -859,15 +889,17 @@ function createSequenceConverter(converter) { throw makeException( TypeError, "can not be converted to sequence.", - opts.prefix, - opts.context, + prefix, + context, ); } if (res.done === true) break; - const val = converter(res.value, { - ...opts, - context: `${opts.context}, index ${array.length}`, - }); + const val = converter( + res.value, + prefix, + `${context}, index ${array.length}`, + opts, + ); ArrayPrototypePush(array, val); } return array; @@ -875,13 +907,13 @@ function createSequenceConverter(converter) { } function createRecordConverter(keyConverter, valueConverter) { - return (V, opts) => { + return (V, prefix, context, opts) => { if (type(V) !== "Object") { throw makeException( TypeError, "can not be converted to dictionary.", - opts.prefix, - opts.context, + prefix, + context, ); } const result = {}; @@ -891,9 +923,9 @@ function createRecordConverter(keyConverter, valueConverter) { if (!ObjectPrototypeHasOwnProperty(V, key)) { continue; } - const typedKey = keyConverter(key, opts); + const typedKey = keyConverter(key, prefix, context, opts); const value = V[key]; - const typedValue = valueConverter(value, opts); + const typedValue = valueConverter(value, prefix, context, opts); result[typedKey] = typedValue; } return result; @@ -904,9 +936,9 @@ function createRecordConverter(keyConverter, valueConverter) { const key = keys[i]; const desc = ObjectGetOwnPropertyDescriptor(V, key); if (desc !== undefined && desc.enumerable === true) { - const typedKey = keyConverter(key, opts); + const typedKey = keyConverter(key, prefix, context, opts); const value = V[key]; - const typedValue = valueConverter(value, opts); + const typedValue = valueConverter(value, prefix, context, opts); result[typedKey] = typedValue; } } @@ -915,8 +947,11 @@ function createRecordConverter(keyConverter, valueConverter) { } function createPromiseConverter(converter) { - return (V, opts) => - PromisePrototypeThen(PromiseResolve(V), (V) => converter(V, opts)); + return (V, prefix, context, opts) => + PromisePrototypeThen( + PromiseResolve(V), + (V) => converter(V, prefix, context, opts), + ); } function invokeCallbackFunction( @@ -929,10 +964,7 @@ function invokeCallbackFunction( ) { try { const rv = ReflectApply(callable, thisArg, args); - return returnValueConverter(rv, { - prefix, - context: "return value", - }); + return returnValueConverter(rv, prefix, "return value"); } catch (err) { if (returnsPromise === true) { return PromiseReject(err); @@ -944,13 +976,13 @@ function invokeCallbackFunction( const brand = Symbol("[[webidl.brand]]"); function createInterfaceConverter(name, prototype) { - return (V, opts) => { + return (V, prefix, context, _opts) => { if (!ObjectPrototypeIsPrototypeOf(prototype, V) || V[brand] !== brand) { throw makeException( TypeError, `is not of type ${name}.`, - opts.prefix, - opts.context, + prefix, + context, ); } return V; diff --git a/ext/webidl/internal.d.ts b/ext/webidl/internal.d.ts index 095e5ab91d..cc4422a274 100644 --- a/ext/webidl/internal.d.ts +++ b/ext/webidl/internal.d.ts @@ -5,29 +5,13 @@ /// declare module "ext:deno_webidl/00_webidl.js" { - interface ValueConverterOpts { - /** - * The prefix for error messages created by this converter. - * Examples: - * - `Failed to construct 'Event'` - * - `Failed to execute 'removeEventListener' on 'EventTarget'` - */ - prefix: string; - /** - * The context of this value error messages created by this converter. - * Examples: - * - `Argument 1` - * - `Argument 3` - */ - context: string; - } function makeException( ErrorType: any, message: string, - prefix: string, - context: string, + prefix?: string, + context?: string, ): any; - interface IntConverterOpts extends ValueConverterOpts { + interface IntConverterOpts { /** * Wether to throw if the number is outside of the acceptable values for * this type. @@ -38,13 +22,13 @@ declare module "ext:deno_webidl/00_webidl.js" { */ clamp?: boolean; } - interface StringConverterOpts extends ValueConverterOpts { + interface StringConverterOpts { /** * Wether to treat `null` value as an empty string. */ treatNullAsEmptyString?: boolean; } - interface BufferConverterOpts extends ValueConverterOpts { + interface BufferConverterOpts { /** * Wether to allow `SharedArrayBuffer` (not just `ArrayBuffer`). */ @@ -55,148 +39,322 @@ declare module "ext:deno_webidl/00_webidl.js" { /** * Convert a value into a `boolean` (bool). */ - boolean(v: any, opts?: IntConverterOpts): boolean; + boolean( + v: any, + prefix?: string, + context?: string, + opts?: IntConverterOpts, + ): boolean; /** * Convert a value into a `byte` (int8). */ - byte(v: any, opts?: IntConverterOpts): number; + byte( + v: any, + prefix?: string, + context?: string, + opts?: IntConverterOpts, + ): number; /** * Convert a value into a `octet` (uint8). */ - octet(v: any, opts?: IntConverterOpts): number; + octet( + v: any, + prefix?: string, + context?: string, + opts?: IntConverterOpts, + ): number; /** * Convert a value into a `short` (int16). */ - short(v: any, opts?: IntConverterOpts): number; + short( + v: any, + prefix?: string, + context?: string, + opts?: IntConverterOpts, + ): number; /** * Convert a value into a `unsigned short` (uint16). */ - ["unsigned short"](v: any, opts?: IntConverterOpts): number; + ["unsigned short"]( + v: any, + prefix?: string, + context?: string, + opts?: IntConverterOpts, + ): number; /** * Convert a value into a `long` (int32). */ - long(v: any, opts?: IntConverterOpts): number; + long( + v: any, + prefix?: string, + context?: string, + opts?: IntConverterOpts, + ): number; /** * Convert a value into a `unsigned long` (uint32). */ - ["unsigned long"](v: any, opts?: IntConverterOpts): number; + ["unsigned long"]( + v: any, + prefix?: string, + context?: string, + opts?: IntConverterOpts, + ): number; /** * Convert a value into a `long long` (int64). * **Note this is truncated to a JS number (53 bit precision).** */ - ["long long"](v: any, opts?: IntConverterOpts): number; + ["long long"]( + v: any, + prefix?: string, + context?: string, + opts?: IntConverterOpts, + ): number; /** * Convert a value into a `unsigned long long` (uint64). * **Note this is truncated to a JS number (53 bit precision).** */ - ["unsigned long long"](v: any, opts?: IntConverterOpts): number; + ["unsigned long long"]( + v: any, + prefix?: string, + context?: string, + opts?: IntConverterOpts, + ): number; /** * Convert a value into a `float` (f32). */ - float(v: any, opts?: ValueConverterOpts): number; + float( + v: any, + prefix?: string, + context?: string, + opts?: any, + ): number; /** * Convert a value into a `unrestricted float` (f32, infinity, or NaN). */ - ["unrestricted float"](v: any, opts?: ValueConverterOpts): number; + ["unrestricted float"]( + v: any, + prefix?: string, + context?: string, + opts?: any, + ): number; /** * Convert a value into a `double` (f64). */ - double(v: any, opts?: ValueConverterOpts): number; + double( + v: any, + prefix?: string, + context?: string, + opts?: any, + ): number; /** * Convert a value into a `unrestricted double` (f64, infinity, or NaN). */ - ["unrestricted double"](v: any, opts?: ValueConverterOpts): number; + ["unrestricted double"]( + v: any, + prefix?: string, + context?: string, + opts?: any, + ): number; /** * Convert a value into a `DOMString` (string). */ - DOMString(v: any, opts?: StringConverterOpts): string; + DOMString( + v: any, + prefix?: string, + context?: string, + opts?: StringConverterOpts, + ): string; /** * Convert a value into a `ByteString` (string with only u8 codepoints). */ - ByteString(v: any, opts?: StringConverterOpts): string; + ByteString( + v: any, + prefix?: string, + context?: string, + opts?: StringConverterOpts, + ): string; /** * Convert a value into a `USVString` (string with only valid non * surrogate Unicode code points). */ - USVString(v: any, opts?: StringConverterOpts): string; + USVString( + v: any, + prefix?: string, + context?: string, + opts?: StringConverterOpts, + ): string; /** * Convert a value into an `object` (object). */ - object(v: any, opts?: ValueConverterOpts): object; + object( + v: any, + prefix?: string, + context?: string, + opts?: any, + ): object; /** * Convert a value into an `ArrayBuffer` (ArrayBuffer). */ - ArrayBuffer(v: any, opts?: BufferConverterOpts): ArrayBuffer; + ArrayBuffer( + v: any, + prefix?: string, + context?: string, + opts?: BufferConverterOpts, + ): ArrayBuffer; /** * Convert a value into a `DataView` (ArrayBuffer). */ - DataView(v: any, opts?: BufferConverterOpts): DataView; + DataView( + v: any, + prefix?: string, + context?: string, + opts?: BufferConverterOpts, + ): DataView; /** * Convert a value into a `Int8Array` (Int8Array). */ - Int8Array(v: any, opts?: BufferConverterOpts): Int8Array; + Int8Array( + v: any, + prefix?: string, + context?: string, + opts?: BufferConverterOpts, + ): Int8Array; /** * Convert a value into a `Int16Array` (Int16Array). */ - Int16Array(v: any, opts?: BufferConverterOpts): Int16Array; + Int16Array( + v: any, + prefix?: string, + context?: string, + opts?: BufferConverterOpts, + ): Int16Array; /** * Convert a value into a `Int32Array` (Int32Array). */ - Int32Array(v: any, opts?: BufferConverterOpts): Int32Array; + Int32Array( + v: any, + prefix?: string, + context?: string, + opts?: BufferConverterOpts, + ): Int32Array; /** * Convert a value into a `Uint8Array` (Uint8Array). */ - Uint8Array(v: any, opts?: BufferConverterOpts): Uint8Array; + Uint8Array( + v: any, + prefix?: string, + context?: string, + opts?: BufferConverterOpts, + ): Uint8Array; /** * Convert a value into a `Uint16Array` (Uint16Array). */ - Uint16Array(v: any, opts?: BufferConverterOpts): Uint16Array; + Uint16Array( + v: any, + prefix?: string, + context?: string, + opts?: BufferConverterOpts, + ): Uint16Array; /** * Convert a value into a `Uint32Array` (Uint32Array). */ - Uint32Array(v: any, opts?: BufferConverterOpts): Uint32Array; + Uint32Array( + v: any, + prefix?: string, + context?: string, + opts?: BufferConverterOpts, + ): Uint32Array; /** * Convert a value into a `Uint8ClampedArray` (Uint8ClampedArray). */ Uint8ClampedArray( v: any, + prefix?: string, + context?: string, opts?: BufferConverterOpts, ): Uint8ClampedArray; /** * Convert a value into a `Float32Array` (Float32Array). */ - Float32Array(v: any, opts?: BufferConverterOpts): Float32Array; + Float32Array( + v: any, + prefix?: string, + context?: string, + opts?: BufferConverterOpts, + ): Float32Array; /** * Convert a value into a `Float64Array` (Float64Array). */ - Float64Array(v: any, opts?: BufferConverterOpts): Float64Array; + Float64Array( + v: any, + prefix?: string, + context?: string, + opts?: BufferConverterOpts, + ): Float64Array; /** * Convert a value into an `ArrayBufferView` (ArrayBufferView). */ - ArrayBufferView(v: any, opts?: BufferConverterOpts): ArrayBufferView; + ArrayBufferView( + v: any, + prefix?: string, + context?: string, + opts?: BufferConverterOpts, + ): ArrayBufferView; /** * Convert a value into a `BufferSource` (ArrayBuffer or ArrayBufferView). */ BufferSource( v: any, + prefix?: string, + context?: string, opts?: BufferConverterOpts, ): ArrayBuffer | ArrayBufferView; /** * Convert a value into a `DOMTimeStamp` (u64). Alias for unsigned long long */ - DOMTimeStamp(v: any, opts?: IntConverterOpts): number; + DOMTimeStamp( + v: any, + prefix?: string, + context?: string, + opts?: IntConverterOpts, + ): number; /** * Convert a value into a `Function` ((...args: any[]) => any). */ - Function(v: any, opts?: ValueConverterOpts): (...args: any) => any; + Function( + v: any, + prefix?: string, + context?: string, + opts?: any, + ): (...args: any) => any; /** * Convert a value into a `VoidFunction` (() => void). */ - VoidFunction(v: any, opts?: ValueConverterOpts): () => void; - ["UVString?"](v: any, opts?: ValueConverterOpts): string | null; - ["sequence"](v: any, opts?: ValueConverterOpts): number[]; + VoidFunction( + v: any, + prefix?: string, + context?: string, + opts?: any, + ): () => void; + ["UVString?"]( + v: any, + prefix?: string, + context?: string, + opts?: StringConverterOpts, + ): string | null; + ["sequence"]( + v: any, + prefix?: string, + context?: string, + opts?: any, + ): number[]; - [type: string]: (v: any, opts: ValueConverterOpts) => any; + [type: string]: ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => any; }; /** @@ -210,7 +368,12 @@ declare module "ext:deno_webidl/00_webidl.js" { type Dictionary = DictionaryMember[]; interface DictionaryMember { key: string; - converter: (v: any, opts: ValueConverterOpts) => any; + converter: ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => any; defaultValue?: any; required?: boolean; } @@ -221,7 +384,12 @@ declare module "ext:deno_webidl/00_webidl.js" { function createDictionaryConverter( name: string, ...dictionaries: Dictionary[] - ): (v: any, opts: ValueConverterOpts) => T; + ): ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => T; /** * Create a converter for enums. @@ -229,28 +397,63 @@ declare module "ext:deno_webidl/00_webidl.js" { function createEnumConverter( name: string, values: string[], - ): (v: any, opts: ValueConverterOpts) => string; + ): ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => string; /** * Create a converter that makes the contained type nullable. */ function createNullableConverter( - converter: (v: any, opts: ValueConverterOpts) => T, - ): (v: any, opts: ValueConverterOpts) => T | null; + converter: ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => T, + ): ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => T | null; /** * Create a converter that converts a sequence of the inner type. */ function createSequenceConverter( - converter: (v: any, opts: ValueConverterOpts) => T, - ): (v: any, opts: ValueConverterOpts) => T[]; + converter: ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => T, + ): ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => T[]; /** * Create a converter that converts a Promise of the inner type. */ function createPromiseConverter( - converter: (v: any, opts: ValueConverterOpts) => T, - ): (v: any, opts: ValueConverterOpts) => Promise; + converter: ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => T, + ): ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => Promise; /** * Invoke a callback function. @@ -259,7 +462,12 @@ declare module "ext:deno_webidl/00_webidl.js" { callable: (...args: any) => any, args: any[], thisArg: any, - returnValueConverter: (v: any, opts: ValueConverterOpts) => T, + returnValueConverter: ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => T, prefix: string, returnsPromise?: boolean, ): T; @@ -290,17 +498,34 @@ declare module "ext:deno_webidl/00_webidl.js" { function createInterfaceConverter( name: string, prototype: any, - ): (v: any, opts: ValueConverterOpts) => any; + ): ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => any; function createRecordConverter< K extends string | number | symbol, V, >( - keyConverter: (v: any, opts: ValueConverterOpts) => K, - valueConverter: (v: any, opts: ValueConverterOpts) => V, + keyConverter: ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => K, + valueConverter: ( + v: any, + prefix?: string, + context?: string, + opts?: any, + ) => V, ): ( v: Record, - opts: ValueConverterOpts, + prefix?: string, + context?: string, + opts?: any, ) => any; /** diff --git a/ext/websocket/01_websocket.js b/ext/websocket/01_websocket.js index 7d46fdf2a0..dab34a0236 100644 --- a/ext/websocket/01_websocket.js +++ b/ext/websocket/01_websocket.js @@ -52,20 +52,25 @@ const { TypedArrayPrototypeGetSymbolToStringTag, } = primordials; -webidl.converters["sequence or DOMString"] = (V, opts) => { +webidl.converters["sequence or DOMString"] = ( + V, + prefix, + context, + opts, +) => { // Union for (sequence or DOMString) if (webidl.type(V) === "Object" && V !== null) { if (V[SymbolIterator] !== undefined) { - return webidl.converters["sequence"](V, opts); + return webidl.converters["sequence"](V, prefix, context, opts); } } - return webidl.converters.DOMString(V, opts); + return webidl.converters.DOMString(V, prefix, context, opts); }; -webidl.converters["WebSocketSend"] = (V, opts) => { +webidl.converters["WebSocketSend"] = (V, prefix, context, opts) => { // Union for (Blob or ArrayBufferView or ArrayBuffer or USVString) if (ObjectPrototypeIsPrototypeOf(BlobPrototype, V)) { - return webidl.converters["Blob"](V, opts); + return webidl.converters["Blob"](V, prefix, context, opts); } if (typeof V === "object") { if ( @@ -73,13 +78,13 @@ webidl.converters["WebSocketSend"] = (V, opts) => { // deno-lint-ignore prefer-primordials ObjectPrototypeIsPrototypeOf(SharedArrayBuffer.prototype, V) ) { - return webidl.converters["ArrayBuffer"](V, opts); + return webidl.converters["ArrayBuffer"](V, prefix, context, opts); } if (ArrayBufferIsView(V)) { - return webidl.converters["ArrayBufferView"](V, opts); + return webidl.converters["ArrayBufferView"](V, prefix, context, opts); } } - return webidl.converters["USVString"](V, opts); + return webidl.converters["USVString"](V, prefix, context, opts); }; /** role */ @@ -158,9 +163,10 @@ class WebSocket extends EventTarget { } set binaryType(value) { webidl.assertBranded(this, WebSocketPrototype); - value = webidl.converters.DOMString(value, { - prefix: "Failed to set 'binaryType' on 'WebSocket'", - }); + value = webidl.converters.DOMString( + value, + "Failed to set 'binaryType' on 'WebSocket'", + ); if (value === "blob" || value === "arraybuffer") { this[_binaryType] = value; } @@ -177,16 +183,11 @@ class WebSocket extends EventTarget { this[webidl.brand] = webidl.brand; const prefix = "Failed to construct 'WebSocket'"; webidl.requiredArguments(arguments.length, 1, prefix); - url = webidl.converters.USVString(url, { - prefix, - context: "Argument 1", - }); + url = webidl.converters.USVString(url, prefix, "Argument 1"); protocols = webidl.converters["sequence or DOMString"]( protocols, - { - prefix, - context: "Argument 2", - }, + prefix, + "Argument 2", ); let wsURL; @@ -304,10 +305,7 @@ class WebSocket extends EventTarget { const prefix = "Failed to execute 'send' on 'WebSocket'"; webidl.requiredArguments(arguments.length, 1, prefix); - data = webidl.converters.WebSocketSend(data, { - prefix, - context: "Argument 1", - }); + data = webidl.converters.WebSocketSend(data, prefix, "Argument 1"); if (this[_readyState] !== OPEN) { throw new DOMException("readyState not OPEN", "InvalidStateError"); @@ -372,18 +370,13 @@ class WebSocket extends EventTarget { const prefix = "Failed to execute 'close' on 'WebSocket'"; if (code !== undefined) { - code = webidl.converters["unsigned short"](code, { - prefix, + code = webidl.converters["unsigned short"](code, prefix, "Argument 1", { clamp: true, - context: "Argument 1", }); } if (reason !== undefined) { - reason = webidl.converters.USVString(reason, { - prefix, - context: "Argument 2", - }); + reason = webidl.converters.USVString(reason, prefix, "Argument 2"); } if (!this[_server]) { diff --git a/ext/websocket/02_websocketstream.js b/ext/websocket/02_websocketstream.js index 8e7100cdb3..fef17b7018 100644 --- a/ext/websocket/02_websocketstream.js +++ b/ext/websocket/02_websocketstream.js @@ -88,14 +88,12 @@ class WebSocketStream { this[webidl.brand] = webidl.brand; const prefix = "Failed to construct 'WebSocketStream'"; webidl.requiredArguments(arguments.length, 1, prefix); - url = webidl.converters.USVString(url, { + url = webidl.converters.USVString(url, prefix, "Argument 1"); + options = webidl.converters.WebSocketStreamOptions( + options, prefix, - context: "Argument 1", - }); - options = webidl.converters.WebSocketStreamOptions(options, { - prefix, - context: "Argument 2", - }); + "Argument 2", + ); const wsURL = new URL(url); @@ -366,10 +364,11 @@ class WebSocketStream { close(closeInfo) { webidl.assertBranded(this, WebSocketStreamPrototype); - closeInfo = webidl.converters.WebSocketCloseInfo(closeInfo, { - prefix: "Failed to execute 'close' on 'WebSocketStream'", - context: "Argument 1", - }); + closeInfo = webidl.converters.WebSocketCloseInfo( + closeInfo, + "Failed to execute 'close' on 'WebSocketStream'", + "Argument 1", + ); if ( closeInfo.code && diff --git a/ext/webstorage/01_webstorage.js b/ext/webstorage/01_webstorage.js index bc6d173b78..58c68c832d 100644 --- a/ext/webstorage/01_webstorage.js +++ b/ext/webstorage/01_webstorage.js @@ -36,10 +36,7 @@ class Storage { webidl.assertBranded(this, StoragePrototype); const prefix = "Failed to execute 'key' on 'Storage'"; webidl.requiredArguments(arguments.length, 1, prefix); - index = webidl.converters["unsigned long"](index, { - prefix, - context: "Argument 1", - }); + index = webidl.converters["unsigned long"](index, prefix, "Argument 1"); return ops.op_webstorage_key(index, this[_persistent]); } @@ -48,14 +45,8 @@ class Storage { webidl.assertBranded(this, StoragePrototype); const prefix = "Failed to execute 'setItem' on 'Storage'"; webidl.requiredArguments(arguments.length, 2, prefix); - key = webidl.converters.DOMString(key, { - prefix, - context: "Argument 1", - }); - value = webidl.converters.DOMString(value, { - prefix, - context: "Argument 2", - }); + key = webidl.converters.DOMString(key, prefix, "Argument 1"); + value = webidl.converters.DOMString(value, prefix, "Argument 2"); ops.op_webstorage_set(key, value, this[_persistent]); } @@ -64,10 +55,7 @@ class Storage { webidl.assertBranded(this, StoragePrototype); const prefix = "Failed to execute 'getItem' on 'Storage'"; webidl.requiredArguments(arguments.length, 1, prefix); - key = webidl.converters.DOMString(key, { - prefix, - context: "Argument 1", - }); + key = webidl.converters.DOMString(key, prefix, "Argument 1"); return ops.op_webstorage_get(key, this[_persistent]); } @@ -76,10 +64,7 @@ class Storage { webidl.assertBranded(this, StoragePrototype); const prefix = "Failed to execute 'removeItem' on 'Storage'"; webidl.requiredArguments(arguments.length, 1, prefix); - key = webidl.converters.DOMString(key, { - prefix, - context: "Argument 1", - }); + key = webidl.converters.DOMString(key, prefix, "Argument 1"); ops.op_webstorage_remove(key, this[_persistent]); } diff --git a/runtime/js/11_workers.js b/runtime/js/11_workers.js index f8ed122b81..b08a5737e5 100644 --- a/runtime/js/11_workers.js +++ b/runtime/js/11_workers.js @@ -202,7 +202,7 @@ class Worker extends EventTarget { postMessage(message, transferOrOptions = {}) { const prefix = "Failed to execute 'postMessage' on 'MessagePort'"; - webidl.requiredArguments(arguments.length, 1, { prefix }); + webidl.requiredArguments(arguments.length, 1, prefix); message = webidl.converters.any(message); let options; if ( @@ -212,16 +212,15 @@ class Worker extends EventTarget { ) { const transfer = webidl.converters["sequence"]( transferOrOptions, - { prefix, context: "Argument 2" }, + prefix, + "Argument 2", ); options = { transfer }; } else { options = webidl.converters.StructuredSerializeOptions( transferOrOptions, - { - prefix, - context: "Argument 2", - }, + prefix, + "Argument 2", ); } const { transfer } = options; diff --git a/runtime/js/99_main.js b/runtime/js/99_main.js index 0ed692f3c9..01cf2973c8 100644 --- a/runtime/js/99_main.js +++ b/runtime/js/99_main.js @@ -101,7 +101,7 @@ function workerClose() { function postMessage(message, transferOrOptions = {}) { const prefix = "Failed to execute 'postMessage' on 'DedicatedWorkerGlobalScope'"; - webidl.requiredArguments(arguments.length, 1, { prefix }); + webidl.requiredArguments(arguments.length, 1, prefix); message = webidl.converters.any(message); let options; if ( @@ -111,16 +111,15 @@ function postMessage(message, transferOrOptions = {}) { ) { const transfer = webidl.converters["sequence"]( transferOrOptions, - { prefix, context: "Argument 2" }, + prefix, + "Argument 2", ); options = { transfer }; } else { options = webidl.converters.StructuredSerializeOptions( transferOrOptions, - { - prefix, - context: "Argument 2", - }, + prefix, + "Argument 2", ); } const { transfer } = options; From 89160e7cd8647fdf2ebaec45259775be89aa69c7 Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Mon, 1 May 2023 18:07:32 +0530 Subject: [PATCH 092/320] chore(ext/websocket): readd autobahn|testsuite fuzzingclient (#18903) This reverts commit https://github.com/denoland/deno/commit/17d1c7e444542f43229a047853605ac22081abdf. The `Deno.serve` signature update in https://github.com/denoland/deno/pull/18759 broke the testee server right after this patch landed on `main`. --- .dprint.json | 7 +++-- .github/workflows/ci.generate.ts | 9 +++++++ .github/workflows/ci.yml | 5 ++++ .gitignore | 2 ++ ext/websocket/autobahn/autobahn_server.js | 20 ++++++++++++++ ext/websocket/autobahn/fuzzingclient.js | 33 +++++++++++++++++++++++ ext/websocket/autobahn/fuzzingclient.json | 26 ++++++++++++++++++ 7 files changed, 100 insertions(+), 2 deletions(-) create mode 100644 ext/websocket/autobahn/autobahn_server.js create mode 100644 ext/websocket/autobahn/fuzzingclient.js create mode 100644 ext/websocket/autobahn/fuzzingclient.json diff --git a/.dprint.json b/.dprint.json index b8af9469f4..07124d6625 100644 --- a/.dprint.json +++ b/.dprint.json @@ -13,7 +13,9 @@ "associations": "**/*.rs", "rustfmt": "rustfmt --config imports_granularity=item" }, - "includes": ["**/*.{ts,tsx,js,jsx,json,md,toml,rs}"], + "includes": [ + "**/*.{ts,tsx,js,jsx,json,md,toml,rs}" + ], "excludes": [ ".cargo_home", ".git", @@ -48,7 +50,8 @@ "tools/node_compat/TODO.md", "tools/node_compat/versions", "tools/wpt/expectation.json", - "tools/wpt/manifest.json" + "tools/wpt/manifest.json", + "ext/websocket/autobahn/reports" ], "plugins": [ "https://plugins.dprint.dev/typescript-0.84.2.wasm", diff --git a/.github/workflows/ci.generate.ts b/.github/workflows/ci.generate.ts index ea9f93bc1c..b5fa91afb6 100755 --- a/.github/workflows/ci.generate.ts +++ b/.github/workflows/ci.generate.ts @@ -642,6 +642,15 @@ const ci = { run: 'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/canary/$(git rev-parse HEAD)/', }, + { + name: "Autobahn testsuite", + if: [ + "matrix.job == 'test' && matrix.profile == 'release' &&", + "!startsWith(github.ref, 'refs/tags/') && startsWith(matrix.os, 'ubuntu')", + ].join("\n"), + run: + "target/release/deno run -A --unstable ext/websocket/autobahn/fuzzingclient.js", + }, { name: "Test debug", if: [ diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 205e5c069f..9f2c788c25 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -399,6 +399,11 @@ jobs: env: CLOUDSDK_PYTHON: '${{env.pythonLocation}}\python.exe' run: 'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/canary/$(git rev-parse HEAD)/' + - name: Autobahn testsuite + if: |- + !(github.event_name == 'pull_request' && matrix.skip_pr) && (matrix.job == 'test' && matrix.profile == 'release' && + !startsWith(github.ref, 'refs/tags/') && startsWith(matrix.os, 'ubuntu')) + run: target/release/deno run -A --unstable ext/websocket/autobahn/fuzzingclient.js - name: Test debug if: |- !(github.event_name == 'pull_request' && matrix.skip_pr) && (matrix.job == 'test' && matrix.profile == 'debug' && diff --git a/.gitignore b/.gitignore index 6f806b1433..a8738ea41d 100644 --- a/.gitignore +++ b/.gitignore @@ -26,3 +26,5 @@ gclient_config.py_entries # WPT generated cert files /tools/wpt/certs/index.txt* /tools/wpt/certs/serial* + +/ext/websocket/autobahn/reports diff --git a/ext/websocket/autobahn/autobahn_server.js b/ext/websocket/autobahn/autobahn_server.js new file mode 100644 index 0000000000..b5f399a5b6 --- /dev/null +++ b/ext/websocket/autobahn/autobahn_server.js @@ -0,0 +1,20 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +import { parse } from "../../../test_util/std/flags/mod.ts"; + +const { port } = parse(Deno.args, { + number: ["port"], + default: { + port: 6969, + }, +}); + +const { serve } = Deno; + +// A message-based WebSocket echo server. +serve({ port }, (request) => { + const { socket, response } = Deno.upgradeWebSocket(request); + socket.onmessage = (event) => { + socket.send(event.data); + }; + return response; +}); diff --git a/ext/websocket/autobahn/fuzzingclient.js b/ext/websocket/autobahn/fuzzingclient.js new file mode 100644 index 0000000000..8aa7166958 --- /dev/null +++ b/ext/websocket/autobahn/fuzzingclient.js @@ -0,0 +1,33 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +// deno-lint-ignore-file + +import { $ } from "https://deno.land/x/dax@0.31.0/mod.ts"; + +const pwd = new URL(".", import.meta.url).pathname; + +const AUTOBAHN_TESTSUITE_DOCKER = + "crossbario/autobahn-testsuite:0.8.2@sha256:5d4ba3aa7d6ab2fdbf6606f3f4ecbe4b66f205ce1cbc176d6cdf650157e52242"; + +const self = Deno.execPath(); +$`${self} run -A --unstable ${pwd}/autobahn_server.js`.spawn(); +await $`docker run --name fuzzingserver -v ${pwd}/fuzzingclient.json:/fuzzingclient.json:ro -v ${pwd}/reports:/reports -p 9001:9001 --net=host --rm ${AUTOBAHN_TESTSUITE_DOCKER} wstest -m fuzzingclient -s fuzzingclient.json` + .cwd(pwd); + +const { deno_websocket } = JSON.parse( + Deno.readTextFileSync(`${pwd}/reports/servers/index.json`), +); +const result = Object.values(deno_websocket); + +function failed(name) { + return name != "OK" && name != "INFORMATIONAL" && name != "NON-STRICT"; +} + +const failedtests = result.filter((outcome) => failed(outcome.behavior)); + +console.log( + `%c${result.length - failedtests.length} / ${result.length} tests OK`, + `color: ${failedtests.length == 0 ? "green" : "red"}`, +); + +Deno.exit(failedtests.length == 0 ? 0 : 1); diff --git a/ext/websocket/autobahn/fuzzingclient.json b/ext/websocket/autobahn/fuzzingclient.json new file mode 100644 index 0000000000..fcee80c993 --- /dev/null +++ b/ext/websocket/autobahn/fuzzingclient.json @@ -0,0 +1,26 @@ +{ + "outdir": "./reports/servers", + "servers": [ + { + "agent": "deno_websocket", + "url": "ws://localhost:6969" + } + ], + "cases": [ + "1.*", + "2.*", + "3.*", + "4.*", + "5.*", + "6.*", + "7.*", + "9.*", + "10.*" + ], + "exclude-cases": [ + "11.*", + "12.*", + "13.*" + ], + "exclude-agent-cases": {} +} From 94a148cdb6f7660518c75a3c20109bf64848f0f1 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Mon, 1 May 2023 08:59:38 -0400 Subject: [PATCH 093/320] refactor(cli): use CliMainWorker in standalone (#18880) Uses `CliMainWorker` in all the cli code. --- cli/args/mod.rs | 9 ++ cli/module_loader.rs | 230 ++++++++++++++++++-------------- cli/proc_state.rs | 17 ++- cli/standalone/mod.rs | 302 +++++++++++++----------------------------- cli/worker.rs | 101 +++++++++----- 5 files changed, 308 insertions(+), 351 deletions(-) diff --git a/cli/args/mod.rs b/cli/args/mod.rs index 440403f62c..4038fb0998 100644 --- a/cli/args/mod.rs +++ b/cli/args/mod.rs @@ -1202,6 +1202,11 @@ impl StorageKeyResolver { }) } + /// Creates a storage key resolver that will always resolve to being empty. + pub fn empty() -> Self { + Self(Some(None)) + } + /// Resolves the storage key to use based on the current flags, config, or main module. pub fn resolve_storage_key( &self, @@ -1397,5 +1402,9 @@ mod test { resolver.resolve_storage_key(&specifier), Some("value".to_string()) ); + + // test empty + let resolver = StorageKeyResolver::empty(); + assert_eq!(resolver.resolve_storage_key(&specifier), None); } } diff --git a/cli/module_loader.rs b/cli/module_loader.rs index 7de45af28a..d8a5b73c4d 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -20,6 +20,7 @@ use crate::tools::check::TypeChecker; use crate::util::progress_bar::ProgressBar; use crate::util::text_encoding::code_without_source_map; use crate::util::text_encoding::source_map_from_code; +use crate::worker::ModuleLoaderFactory; use deno_ast::MediaType; use deno_core::anyhow::anyhow; @@ -223,102 +224,14 @@ pub struct ModuleCodeSource { pub media_type: MediaType, } -struct SharedCliModuleLoaderState { - lib_window: TsTypeLib, - lib_worker: TsTypeLib, - is_inspecting: bool, - is_repl: bool, +struct PreparedModuleLoader { emitter: Arc, graph_container: Arc, - module_load_preparer: Arc, parsed_source_cache: Arc, - resolver: Arc, - npm_module_loader: NpmModuleLoader, } -pub struct CliModuleLoaderFactory { - state: Arc, -} - -impl CliModuleLoaderFactory { - pub fn new( - options: &CliOptions, - emitter: Arc, - graph_container: Arc, - module_load_preparer: Arc, - parsed_source_cache: Arc, - resolver: Arc, - npm_module_loader: NpmModuleLoader, - ) -> Self { - Self { - state: Arc::new(SharedCliModuleLoaderState { - lib_window: options.ts_type_lib_window(), - lib_worker: options.ts_type_lib_worker(), - is_inspecting: options.is_inspecting(), - is_repl: matches!(options.sub_command(), DenoSubcommand::Repl(_)), - emitter, - graph_container, - module_load_preparer, - parsed_source_cache, - resolver, - npm_module_loader, - }), - } - } - - pub fn create_for_main( - &self, - root_permissions: PermissionsContainer, - dynamic_permissions: PermissionsContainer, - ) -> CliModuleLoader { - self.create_with_lib( - self.state.lib_window, - root_permissions, - dynamic_permissions, - ) - } - - pub fn create_for_worker( - &self, - root_permissions: PermissionsContainer, - dynamic_permissions: PermissionsContainer, - ) -> CliModuleLoader { - self.create_with_lib( - self.state.lib_worker, - root_permissions, - dynamic_permissions, - ) - } - - fn create_with_lib( - &self, - lib: TsTypeLib, - root_permissions: PermissionsContainer, - dynamic_permissions: PermissionsContainer, - ) -> CliModuleLoader { - CliModuleLoader { - lib, - root_permissions, - dynamic_permissions, - shared: self.state.clone(), - } - } -} - -pub struct CliModuleLoader { - lib: TsTypeLib, - /// The initial set of permissions used to resolve the static imports in the - /// worker. These are "allow all" for main worker, and parent thread - /// permissions for Web Worker. - root_permissions: PermissionsContainer, - /// Permissions used to resolve dynamic imports, these get passed as - /// "root permissions" for Web Worker. - dynamic_permissions: PermissionsContainer, - shared: Arc, -} - -impl CliModuleLoader { - fn load_prepared_module( +impl PreparedModuleLoader { + pub fn load_prepared_module( &self, specifier: &ModuleSpecifier, maybe_referrer: Option<&ModuleSpecifier>, @@ -327,7 +240,7 @@ impl CliModuleLoader { unreachable!(); // Node built-in modules should be handled internally. } - let graph = self.shared.graph_container.graph(); + let graph = self.graph_container.graph(); match graph.get(specifier) { Some(deno_graph::Module::Json(JsonModule { source, @@ -360,11 +273,9 @@ impl CliModuleLoader { | MediaType::Jsx | MediaType::Tsx => { // get emit text - self.shared.emitter.emit_parsed_source( - specifier, - *media_type, - source, - )? + self + .emitter + .emit_parsed_source(specifier, *media_type, source)? } MediaType::TsBuildInfo | MediaType::Wasm | MediaType::SourceMap => { panic!("Unexpected media type {media_type} for {specifier}") @@ -372,7 +283,7 @@ impl CliModuleLoader { }; // at this point, we no longer need the parsed source in memory, so free it - self.shared.parsed_source_cache.free(specifier); + self.parsed_source_cache.free(specifier); Ok(ModuleCodeSource { code, @@ -389,7 +300,113 @@ impl CliModuleLoader { } } } +} +struct SharedCliModuleLoaderState { + lib_window: TsTypeLib, + lib_worker: TsTypeLib, + is_inspecting: bool, + is_repl: bool, + graph_container: Arc, + module_load_preparer: Arc, + prepared_module_loader: PreparedModuleLoader, + resolver: Arc, + npm_module_loader: NpmModuleLoader, +} + +pub struct CliModuleLoaderFactory { + shared: Arc, +} + +impl CliModuleLoaderFactory { + pub fn new( + options: &CliOptions, + emitter: Arc, + graph_container: Arc, + module_load_preparer: Arc, + parsed_source_cache: Arc, + resolver: Arc, + npm_module_loader: NpmModuleLoader, + ) -> Self { + Self { + shared: Arc::new(SharedCliModuleLoaderState { + lib_window: options.ts_type_lib_window(), + lib_worker: options.ts_type_lib_worker(), + is_inspecting: options.is_inspecting(), + is_repl: matches!(options.sub_command(), DenoSubcommand::Repl(_)), + prepared_module_loader: PreparedModuleLoader { + emitter, + graph_container: graph_container.clone(), + parsed_source_cache, + }, + graph_container, + module_load_preparer, + resolver, + npm_module_loader, + }), + } + } + + fn create_with_lib( + &self, + lib: TsTypeLib, + root_permissions: PermissionsContainer, + dynamic_permissions: PermissionsContainer, + ) -> Rc { + Rc::new(CliModuleLoader { + lib, + root_permissions, + dynamic_permissions, + shared: self.shared.clone(), + }) + } +} + +impl ModuleLoaderFactory for CliModuleLoaderFactory { + fn create_for_main( + &self, + root_permissions: PermissionsContainer, + dynamic_permissions: PermissionsContainer, + ) -> Rc { + self.create_with_lib( + self.shared.lib_window, + root_permissions, + dynamic_permissions, + ) + } + + fn create_for_worker( + &self, + root_permissions: PermissionsContainer, + dynamic_permissions: PermissionsContainer, + ) -> Rc { + self.create_with_lib( + self.shared.lib_worker, + root_permissions, + dynamic_permissions, + ) + } + + fn create_source_map_getter(&self) -> Option> { + Some(Box::new(CliSourceMapGetter { + shared: self.shared.clone(), + })) + } +} + +struct CliModuleLoader { + lib: TsTypeLib, + /// The initial set of permissions used to resolve the static imports in the + /// worker. These are "allow all" for main worker, and parent thread + /// permissions for Web Worker. + root_permissions: PermissionsContainer, + /// Permissions used to resolve dynamic imports, these get passed as + /// "root permissions" for Web Worker. + dynamic_permissions: PermissionsContainer, + shared: Arc, +} + +impl CliModuleLoader { fn load_sync( &self, specifier: &ModuleSpecifier, @@ -409,7 +426,10 @@ impl CliModuleLoader { )? { code_source } else { - self.load_prepared_module(specifier, maybe_referrer)? + self + .shared + .prepared_module_loader + .load_prepared_module(specifier, maybe_referrer)? }; let code = if self.shared.is_inspecting { // we need the code with the source map in order for @@ -584,7 +604,11 @@ impl ModuleLoader for CliModuleLoader { } } -impl SourceMapGetter for CliModuleLoader { +struct CliSourceMapGetter { + shared: Arc, +} + +impl SourceMapGetter for CliSourceMapGetter { fn get_source_map(&self, file_name: &str) -> Option> { let specifier = resolve_url(file_name).ok()?; match specifier.scheme() { @@ -593,7 +617,11 @@ impl SourceMapGetter for CliModuleLoader { "wasm" | "file" | "http" | "https" | "data" | "blob" => (), _ => return None, } - let source = self.load_prepared_module(&specifier, None).ok()?; + let source = self + .shared + .prepared_module_loader + .load_prepared_module(&specifier, None) + .ok()?; source_map_from_code(&source.code) } diff --git a/cli/proc_state.rs b/cli/proc_state.rs index 321bf45c3f..6c1a5e7c56 100644 --- a/cli/proc_state.rs +++ b/cli/proc_state.rs @@ -34,6 +34,7 @@ use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; use crate::worker::CliMainWorkerFactory; use crate::worker::CliMainWorkerOptions; +use crate::worker::HasNodeSpecifierChecker; use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; @@ -334,9 +335,9 @@ impl ProcState { StorageKeyResolver::from_options(&self.options), self.npm_resolver.clone(), self.node_resolver.clone(), - self.graph_container.clone(), + Box::new(CliHasNodeSpecifierChecker(self.graph_container.clone())), self.blob_store.clone(), - CliModuleLoaderFactory::new( + Box::new(CliModuleLoaderFactory::new( &self.options, self.emitter.clone(), self.graph_container.clone(), @@ -348,7 +349,7 @@ impl ProcState { self.node_code_translator.clone(), self.node_resolver.clone(), ), - ), + )), self.root_cert_store.clone(), self.node_fs.clone(), self.maybe_inspector_server.clone(), @@ -383,7 +384,7 @@ impl ProcState { } maybe_binary_command_name }, - origin_data_folder_path: self.dir.origin_data_folder_path(), + origin_data_folder_path: Some(self.dir.origin_data_folder_path()), seed: self.options.seed(), unsafely_ignore_certificate_errors: self .options @@ -395,6 +396,14 @@ impl ProcState { } } +struct CliHasNodeSpecifierChecker(Arc); + +impl HasNodeSpecifierChecker for CliHasNodeSpecifierChecker { + fn has_node_specifier(&self) -> bool { + self.0.graph().has_node_specifier + } +} + /// Keeps track of what module specifiers were resolved as CJS. #[derive(Default)] pub struct CjsResolutionStore(Mutex>); diff --git a/cli/standalone/mod.rs b/cli/standalone/mod.rs index d0126168d8..556346535b 100644 --- a/cli/standalone/mod.rs +++ b/cli/standalone/mod.rs @@ -3,8 +3,8 @@ use crate::args::get_root_cert_store; use crate::args::CaData; use crate::args::CacheSetting; +use crate::args::StorageKeyResolver; use crate::cache::DenoDir; -use crate::colors; use crate::file_fetcher::get_source_from_data_url; use crate::http_util::HttpClient; use crate::npm::create_npm_fs_resolver; @@ -12,42 +12,30 @@ use crate::npm::CliNpmRegistryApi; use crate::npm::CliNpmResolver; use crate::npm::NpmCache; use crate::npm::NpmResolution; -use crate::ops; use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; use crate::util::v8::construct_v8_flags; -use crate::version; +use crate::worker::CliMainWorkerFactory; +use crate::worker::CliMainWorkerOptions; +use crate::worker::HasNodeSpecifierChecker; +use crate::worker::ModuleLoaderFactory; use crate::CliGraphResolver; use deno_core::anyhow::Context; use deno_core::error::type_error; use deno_core::error::AnyError; -use deno_core::futures::task::LocalFutureObj; use deno_core::futures::FutureExt; -use deno_core::located_script_name; use deno_core::v8_set_flags; -use deno_core::CompiledWasmModuleStore; use deno_core::ModuleLoader; use deno_core::ModuleSpecifier; use deno_core::ModuleType; use deno_core::ResolutionKind; -use deno_core::SharedArrayBufferStore; use deno_graph::source::Resolver; -use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel; use deno_runtime::deno_node; -use deno_runtime::deno_tls::rustls::RootCertStore; +use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_web::BlobStore; -use deno_runtime::fmt_errors::format_js_error; -use deno_runtime::ops::worker_host::CreateWebWorkerCb; -use deno_runtime::ops::worker_host::WorkerEventCb; use deno_runtime::permissions::Permissions; use deno_runtime::permissions::PermissionsContainer; -use deno_runtime::web_worker::WebWorker; -use deno_runtime::web_worker::WebWorkerOptions; -use deno_runtime::worker::MainWorker; -use deno_runtime::worker::WorkerOptions; -use deno_runtime::BootstrapOptions; use import_map::parse_from_json; -use log::Level; use std::pin::Pin; use std::rc::Rc; use std::sync::Arc; @@ -137,95 +125,40 @@ impl ModuleLoader for EmbeddedModuleLoader { } } -fn web_worker_callback() -> Arc { - Arc::new(|worker| { - let fut = async move { Ok(worker) }; - LocalFutureObj::new(Box::new(fut)) - }) +struct StandaloneModuleLoaderFactory { + loader: EmbeddedModuleLoader, } -struct SharedWorkerState { - npm_resolver: Arc, - root_cert_store: RootCertStore, - node_fs: Arc, - blob_store: BlobStore, - broadcast_channel: InMemoryBroadcastChannel, - shared_array_buffer_store: SharedArrayBufferStore, - compiled_wasm_module_store: CompiledWasmModuleStore, - // options - argv: Vec, - seed: Option, - unsafely_ignore_certificate_errors: Option>, - unstable: bool, +impl ModuleLoaderFactory for StandaloneModuleLoaderFactory { + fn create_for_main( + &self, + _root_permissions: PermissionsContainer, + _dynamic_permissions: PermissionsContainer, + ) -> Rc { + Rc::new(self.loader.clone()) + } + + fn create_for_worker( + &self, + _root_permissions: PermissionsContainer, + _dynamic_permissions: PermissionsContainer, + ) -> Rc { + Rc::new(self.loader.clone()) + } + + fn create_source_map_getter( + &self, + ) -> Option> { + None + } } -fn create_web_worker_callback( - shared: &Arc, - module_loader: &EmbeddedModuleLoader, -) -> Arc { - let shared = shared.clone(); - let module_loader = module_loader.clone(); - Arc::new(move |args| { - let module_loader = Rc::new(module_loader.clone()); +struct StandaloneHasNodeSpecifierChecker; - let create_web_worker_cb = - create_web_worker_callback(&shared, &module_loader); - let web_worker_cb = web_worker_callback(); - - let options = WebWorkerOptions { - bootstrap: BootstrapOptions { - args: shared.argv.clone(), - cpu_count: std::thread::available_parallelism() - .map(|p| p.get()) - .unwrap_or(1), - debug_flag: false, - enable_testing_features: false, - locale: deno_core::v8::icu::get_language_tag(), - location: Some(args.main_module.clone()), - no_color: !colors::use_color(), - is_tty: colors::is_tty(), - runtime_version: version::deno().to_string(), - ts_version: version::TYPESCRIPT.to_string(), - unstable: shared.unstable, - user_agent: version::get_user_agent().to_string(), - inspect: false, - }, - extensions: ops::cli_exts(shared.npm_resolver.clone()), - startup_snapshot: Some(crate::js::deno_isolate_init()), - unsafely_ignore_certificate_errors: shared - .unsafely_ignore_certificate_errors - .clone(), - root_cert_store: Some(shared.root_cert_store.clone()), - seed: shared.seed, - module_loader, - node_fs: Some(shared.node_fs.clone()), - npm_resolver: None, // not currently supported - create_web_worker_cb, - preload_module_cb: web_worker_cb.clone(), - pre_execute_module_cb: web_worker_cb, - format_js_error_fn: Some(Arc::new(format_js_error)), - source_map_getter: None, - worker_type: args.worker_type, - maybe_inspector_server: None, - get_error_class_fn: Some(&get_error_class_name), - blob_store: shared.blob_store.clone(), - broadcast_channel: shared.broadcast_channel.clone(), - shared_array_buffer_store: Some(shared.shared_array_buffer_store.clone()), - compiled_wasm_module_store: Some( - shared.compiled_wasm_module_store.clone(), - ), - cache_storage_dir: None, - stdio: Default::default(), - }; - - WebWorker::bootstrap_from_options( - args.name, - args.permissions, - args.main_module, - args.worker_id, - options, - ) - }) +impl HasNodeSpecifierChecker for StandaloneHasNodeSpecifierChecker { + fn has_node_specifier(&self) -> bool { + false + } } pub async fn run( @@ -273,121 +206,68 @@ pub async fn run( npm_fs_resolver, None, )); + let node_resolver = + Arc::new(NodeResolver::new(node_fs.clone(), npm_resolver.clone())); + let module_loader_factory = StandaloneModuleLoaderFactory { + loader: EmbeddedModuleLoader { + eszip: Arc::new(eszip), + maybe_import_map_resolver: metadata.maybe_import_map.map( + |(base, source)| { + Arc::new(CliGraphResolver::new( + None, + Some(Arc::new( + parse_from_json(&base, &source).unwrap().import_map, + )), + false, + npm_api.clone(), + npm_resolution.clone(), + Default::default(), + )) + }, + ), + }, + }; - let shared = Arc::new(SharedWorkerState { - npm_resolver, + let worker_factory = CliMainWorkerFactory::new( + StorageKeyResolver::empty(), + npm_resolver.clone(), + node_resolver, + Box::new(StandaloneHasNodeSpecifierChecker), + BlobStore::default(), + Box::new(module_loader_factory), root_cert_store, node_fs, - blob_store: BlobStore::default(), - broadcast_channel: InMemoryBroadcastChannel::default(), - shared_array_buffer_store: SharedArrayBufferStore::default(), - compiled_wasm_module_store: CompiledWasmModuleStore::default(), - argv: metadata.argv, - seed: metadata.seed, - unsafely_ignore_certificate_errors: metadata - .unsafely_ignore_certificate_errors, - unstable: metadata.unstable, - }); + None, + CliMainWorkerOptions { + argv: metadata.argv, + debug: false, + coverage_dir: None, + enable_testing_features: false, + has_node_modules_dir: false, + inspect_brk: false, + inspect_wait: false, + is_inspecting: false, + is_npm_main: false, + location: metadata.location, + // todo(dsherret): support a binary command being compiled + maybe_binary_npm_command_name: None, + origin_data_folder_path: None, + seed: metadata.seed, + unsafely_ignore_certificate_errors: metadata + .unsafely_ignore_certificate_errors, + unstable: metadata.unstable, + }, + ); + + v8_set_flags(construct_v8_flags(&metadata.v8_flags, vec![])); let permissions = PermissionsContainer::new(Permissions::from_options( &metadata.permissions, )?); - let module_loader = EmbeddedModuleLoader { - eszip: Arc::new(eszip), - maybe_import_map_resolver: metadata.maybe_import_map.map( - |(base, source)| { - Arc::new(CliGraphResolver::new( - None, - Some(Arc::new( - parse_from_json(&base, &source).unwrap().import_map, - )), - false, - npm_api.clone(), - npm_resolution.clone(), - Default::default(), - )) - }, - ), - }; - let create_web_worker_cb = - create_web_worker_callback(&shared, &module_loader); - let web_worker_cb = web_worker_callback(); + let mut worker = worker_factory + .create_main_worker(main_module.clone(), permissions) + .await?; - v8_set_flags(construct_v8_flags(&metadata.v8_flags, vec![])); - - let options = WorkerOptions { - bootstrap: BootstrapOptions { - args: shared.argv.clone(), - cpu_count: std::thread::available_parallelism() - .map(|p| p.get()) - .unwrap_or(1), - debug_flag: metadata - .log_level - .map(|l| l == Level::Debug) - .unwrap_or(false), - enable_testing_features: false, - locale: deno_core::v8::icu::get_language_tag(), - location: metadata.location, - no_color: !colors::use_color(), - is_tty: colors::is_tty(), - runtime_version: version::deno().to_string(), - ts_version: version::TYPESCRIPT.to_string(), - unstable: metadata.unstable, - user_agent: version::get_user_agent().to_string(), - inspect: false, - }, - extensions: ops::cli_exts(shared.npm_resolver.clone()), - startup_snapshot: Some(crate::js::deno_isolate_init()), - unsafely_ignore_certificate_errors: shared - .unsafely_ignore_certificate_errors - .clone(), - root_cert_store: Some(shared.root_cert_store.clone()), - seed: metadata.seed, - source_map_getter: None, - format_js_error_fn: Some(Arc::new(format_js_error)), - create_web_worker_cb, - web_worker_preload_module_cb: web_worker_cb.clone(), - web_worker_pre_execute_module_cb: web_worker_cb, - maybe_inspector_server: None, - should_break_on_first_statement: false, - should_wait_for_inspector_session: false, - module_loader: Rc::new(module_loader), - node_fs: Some(shared.node_fs.clone()), - npm_resolver: None, // not currently supported - get_error_class_fn: Some(&get_error_class_name), - cache_storage_dir: None, - origin_storage_dir: None, - blob_store: shared.blob_store.clone(), - broadcast_channel: shared.broadcast_channel.clone(), - shared_array_buffer_store: Some(shared.shared_array_buffer_store.clone()), - compiled_wasm_module_store: Some(shared.compiled_wasm_module_store.clone()), - stdio: Default::default(), - }; - let mut worker = MainWorker::bootstrap_from_options( - main_module.clone(), - permissions, - options, - ); - worker.execute_main_module(main_module).await?; - worker.dispatch_load_event(located_script_name!())?; - - loop { - worker.run_event_loop(false).await?; - if !worker.dispatch_beforeunload_event(located_script_name!())? { - break; - } - } - - worker.dispatch_unload_event(located_script_name!())?; - std::process::exit(0); -} - -fn get_error_class_name(e: &AnyError) -> &'static str { - deno_runtime::errors::get_error_class_name(e).unwrap_or_else(|| { - panic!( - "Error '{}' contains boxed error of unsupported type:{}", - e, - e.chain().map(|e| format!("\n {e:?}")).collect::() - ); - }) + let exit_code = worker.run().await?; + std::process::exit(exit_code) } diff --git a/cli/worker.rs b/cli/worker.rs index 3dad2fbe14..1beaa27baf 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -13,7 +13,9 @@ use deno_core::url::Url; use deno_core::CompiledWasmModuleStore; use deno_core::Extension; use deno_core::ModuleId; +use deno_core::ModuleLoader; use deno_core::SharedArrayBufferStore; +use deno_core::SourceMapGetter; use deno_runtime::colors; use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel; use deno_runtime::deno_node; @@ -35,8 +37,6 @@ use deno_semver::npm::NpmPackageReqReference; use crate::args::StorageKeyResolver; use crate::errors; -use crate::graph_util::ModuleGraphContainer; -use crate::module_loader::CliModuleLoaderFactory; use crate::npm::CliNpmResolver; use crate::ops; use crate::tools; @@ -44,6 +44,28 @@ use crate::tools::coverage::CoverageCollector; use crate::util::checksum; use crate::version; +pub trait ModuleLoaderFactory: Send + Sync { + fn create_for_main( + &self, + root_permissions: PermissionsContainer, + dynamic_permissions: PermissionsContainer, + ) -> Rc; + + fn create_for_worker( + &self, + root_permissions: PermissionsContainer, + dynamic_permissions: PermissionsContainer, + ) -> Rc; + + fn create_source_map_getter(&self) -> Option>; +} + +// todo(dsherret): this is temporary and we should remove this +// once we no longer conditionally initialize the node runtime +pub trait HasNodeSpecifierChecker: Send + Sync { + fn has_node_specifier(&self) -> bool; +} + pub struct CliMainWorkerOptions { pub argv: Vec, pub debug: bool, @@ -56,26 +78,34 @@ pub struct CliMainWorkerOptions { pub is_npm_main: bool, pub location: Option, pub maybe_binary_npm_command_name: Option, - pub origin_data_folder_path: PathBuf, + pub origin_data_folder_path: Option, pub seed: Option, pub unsafely_ignore_certificate_errors: Option>, pub unstable: bool, } struct SharedWorkerState { - pub options: CliMainWorkerOptions, - pub storage_key_resolver: StorageKeyResolver, - pub npm_resolver: Arc, - pub node_resolver: Arc, - pub graph_container: Arc, - pub blob_store: BlobStore, - pub broadcast_channel: InMemoryBroadcastChannel, - pub shared_array_buffer_store: SharedArrayBufferStore, - pub compiled_wasm_module_store: CompiledWasmModuleStore, - pub module_loader_factory: CliModuleLoaderFactory, - pub root_cert_store: RootCertStore, - pub node_fs: Arc, - pub maybe_inspector_server: Option>, + options: CliMainWorkerOptions, + storage_key_resolver: StorageKeyResolver, + npm_resolver: Arc, + node_resolver: Arc, + has_node_specifier_checker: Box, + blob_store: BlobStore, + broadcast_channel: InMemoryBroadcastChannel, + shared_array_buffer_store: SharedArrayBufferStore, + compiled_wasm_module_store: CompiledWasmModuleStore, + module_loader_factory: Box, + root_cert_store: RootCertStore, + node_fs: Arc, + maybe_inspector_server: Option>, +} + +impl SharedWorkerState { + pub fn should_initialize_node_runtime(&self) -> bool { + self.npm_resolver.has_packages() + || self.has_node_specifier_checker.has_node_specifier() + || self.options.is_npm_main + } } pub struct CliMainWorker { @@ -227,9 +257,7 @@ impl CliMainWorker { &mut self, id: ModuleId, ) -> Result<(), AnyError> { - if self.shared.npm_resolver.has_packages() - || self.shared.graph_container.graph().has_node_specifier - { + if self.shared.should_initialize_node_runtime() { self.initialize_main_module_for_node()?; } self.worker.evaluate_module(id).await @@ -275,9 +303,9 @@ impl CliMainWorkerFactory { storage_key_resolver: StorageKeyResolver, npm_resolver: Arc, node_resolver: Arc, - graph_container: Arc, + has_node_specifier_checker: Box, blob_store: BlobStore, - module_loader_factory: CliModuleLoaderFactory, + module_loader_factory: Box, root_cert_store: RootCertStore, node_fs: Arc, maybe_inspector_server: Option>, @@ -289,7 +317,7 @@ impl CliMainWorkerFactory { storage_key_resolver, npm_resolver, node_resolver, - graph_container, + has_node_specifier_checker, blob_store, broadcast_channel: Default::default(), shared_array_buffer_store: Default::default(), @@ -345,11 +373,11 @@ impl CliMainWorkerFactory { (main_module, false) }; - let module_loader = - Rc::new(shared.module_loader_factory.create_for_main( - PermissionsContainer::allow_all(), - permissions.clone(), - )); + let module_loader = shared + .module_loader_factory + .create_for_main(PermissionsContainer::allow_all(), permissions.clone()); + let maybe_source_map_getter = + shared.module_loader_factory.create_source_map_getter(); let maybe_inspector_server = shared.maybe_inspector_server.clone(); let create_web_worker_cb = @@ -366,6 +394,8 @@ impl CliMainWorkerFactory { shared .options .origin_data_folder_path + .as_ref() + .unwrap() // must be set if storage key resolver returns a value .join(checksum::gen(&[key.as_bytes()])) }); let cache_storage_dir = maybe_storage_key.map(|key| { @@ -405,7 +435,7 @@ impl CliMainWorkerFactory { .clone(), root_cert_store: Some(shared.root_cert_store.clone()), seed: shared.options.seed, - source_map_getter: Some(Box::new(module_loader.clone())), + source_map_getter: maybe_source_map_getter, format_js_error_fn: Some(Arc::new(format_js_error)), create_web_worker_cb, web_worker_preload_module_cb, @@ -461,7 +491,7 @@ fn create_web_worker_pre_execute_module_callback( let shared = shared.clone(); let fut = async move { // this will be up to date after pre-load - if shared.npm_resolver.has_packages() { + if shared.should_initialize_node_runtime() { deno_node::initialize_runtime( &mut worker.js_runtime, shared.options.has_node_modules_dir, @@ -482,11 +512,12 @@ fn create_web_worker_callback( Arc::new(move |args| { let maybe_inspector_server = shared.maybe_inspector_server.clone(); - let module_loader = - Rc::new(shared.module_loader_factory.create_for_worker( - args.parent_permissions.clone(), - args.permissions.clone(), - )); + let module_loader = shared.module_loader_factory.create_for_worker( + args.parent_permissions.clone(), + args.permissions.clone(), + ); + let maybe_source_map_getter = + shared.module_loader_factory.create_source_map_getter(); let create_web_worker_cb = create_web_worker_callback(shared.clone(), stdio.clone()); let preload_module_cb = create_web_worker_preload_module_callback(&shared); @@ -536,7 +567,7 @@ fn create_web_worker_callback( preload_module_cb, pre_execute_module_cb, format_js_error_fn: Some(Arc::new(format_js_error)), - source_map_getter: Some(Box::new(module_loader.clone())), + source_map_getter: maybe_source_map_getter, module_loader, node_fs: Some(shared.node_fs.clone()), npm_resolver: Some(shared.npm_resolver.clone()), From 6728ad4203d731e555dabf89ec6157f113454ce6 Mon Sep 17 00:00:00 2001 From: Kenta Moriuchi Date: Mon, 1 May 2023 22:30:02 +0900 Subject: [PATCH 094/320] fix(core): Use primordials for methods (#18839) I would like to get this change into Deno before merging https://github.com/denoland/deno_lint/pull/1152 --- core/01_core.js | 2 +- ext/cache/01_cache.js | 11 +- ext/console/01_console.js | 427 +++++++++++++++------------- ext/crypto/00_crypto.js | 24 +- ext/fetch/23_request.js | 7 +- ext/fs/30_fs.js | 9 +- ext/http/00_serve.js | 5 +- ext/http/01_http.js | 12 +- ext/net/01_net.js | 19 +- ext/url/00_url.js | 61 +++- ext/url/01_urlpattern.js | 5 +- ext/web/06_streams.js | 28 +- ext/web/13_message_port.js | 2 +- ext/websocket/02_websocketstream.js | 7 +- runtime/js/11_workers.js | 8 +- runtime/js/30_os.js | 4 +- runtime/js/99_main.js | 17 +- 17 files changed, 378 insertions(+), 270 deletions(-) diff --git a/core/01_core.js b/core/01_core.js index 3972dec333..72cbe31f71 100644 --- a/core/01_core.js +++ b/core/01_core.js @@ -28,13 +28,13 @@ SafeArrayIterator, SafeMap, SafePromisePrototypeFinally, - setQueueMicrotask, StringPrototypeSlice, StringPrototypeSplit, SymbolFor, SyntaxError, TypeError, URIError, + setQueueMicrotask, } = window.__bootstrap.primordials; const { ops, asyncOps } = window.Deno.core; diff --git a/ext/cache/01_cache.js b/ext/cache/01_cache.js index 8cbf540fa7..9b5404acbb 100644 --- a/ext/cache/01_cache.js +++ b/ext/cache/01_cache.js @@ -4,9 +4,12 @@ const core = globalThis.Deno.core; import * as webidl from "ext:deno_webidl/00_webidl.js"; const primordials = globalThis.__bootstrap.primordials; const { + ArrayPrototypePush, + ObjectPrototypeIsPrototypeOf, + StringPrototypeSplit, + StringPrototypeTrim, Symbol, TypeError, - ObjectPrototypeIsPrototypeOf, } = primordials; import { Request, @@ -101,10 +104,10 @@ class Cache { // Step 7. const varyHeader = getHeader(innerResponse.headerList, "vary"); if (varyHeader) { - const fieldValues = varyHeader.split(","); + const fieldValues = StringPrototypeSplit(varyHeader, ","); for (let i = 0; i < fieldValues.length; ++i) { const field = fieldValues[i]; - if (field.trim() === "*") { + if (StringPrototypeTrim(field) === "*") { throw new TypeError("Vary header must not contain '*'"); } } @@ -258,7 +261,7 @@ class Cache { statusText: meta.responseStatusText, }, ); - responses.push(response); + ArrayPrototypePush(responses, response); } } // Step 5.4-5.5: don't apply in this context. diff --git a/ext/console/01_console.js b/ext/console/01_console.js index 318cf9cb42..3b2f449178 100644 --- a/ext/console/01_console.js +++ b/ext/console/01_console.js @@ -6,118 +6,134 @@ const core = globalThis.Deno.core; const internals = globalThis.__bootstrap.internals; const primordials = globalThis.__bootstrap.primordials; const { - DateNow, - Boolean, - ObjectKeys, - ObjectAssign, - ObjectCreate, - ObjectFreeze, - ObjectValues, - ObjectFromEntries, - ObjectPrototypeHasOwnProperty, - ObjectPrototypeIsPrototypeOf, - ObjectDefineProperty, - String, - SafeStringIterator, - DatePrototype, - MapPrototypeEntries, - SetPrototypeGetSize, - StringPrototypeRepeat, - StringPrototypeEndsWith, - StringPrototypeIndexOf, - RegExpPrototypeExec, - RegExpPrototypeSymbolReplace, - StringPrototypeReplace, - StringPrototypeReplaceAll, - ObjectPrototype, - FunctionPrototypeCall, - StringPrototypeSplit, - StringPrototypeSlice, - StringPrototypeCharCodeAt, - MathFloor, - StringPrototypePadEnd, - ObjectGetOwnPropertySymbols, - ObjectGetOwnPropertyNames, - SymbolPrototypeGetDescription, - SymbolPrototypeToString, - ArrayPrototypePushApply, - ObjectPrototypePropertyIsEnumerable, - StringPrototypeMatch, - StringPrototypePadStart, - StringPrototypeTrim, - StringPrototypeIncludes, - NumberIsInteger, - NumberParseInt, - SafeArrayIterator, - SafeMap, - ArrayPrototypeShift, AggregateErrorPrototype, - RegExpPrototypeTest, - ObjectPrototypeToString, - ArrayPrototypeSort, - ArrayPrototypeUnshift, - DatePrototypeGetTime, - DatePrototypeToISOString, - SafeRegExp, - SetPrototype, - Symbol, - SymbolToStringTag, - SymbolHasInstance, - SymbolFor, - ObjectGetOwnPropertyDescriptor, - ObjectIs, - Uint8Array, - isNaN, - TypedArrayPrototypeGetSymbolToStringTag, - TypedArrayPrototypeGetLength, - ReflectOwnKeys, Array, - RegExpPrototypeToString, - ArrayIsArray, - SymbolIterator, ArrayBufferIsView, - ArrayPrototypeJoin, - ArrayPrototypeMap, - ArrayPrototypeReduce, - ObjectSetPrototypeOf, - ArrayPrototypePush, - ArrayPrototypeIncludes, + ArrayBufferPrototypeGetByteLength, + ArrayIsArray, ArrayPrototypeFill, ArrayPrototypeFilter, ArrayPrototypeFind, - FunctionPrototypeBind, - MapPrototype, - MapPrototypeHas, - MapPrototypeGet, - MapPrototypeSet, - MapPrototypeDelete, - MapPrototypeForEach, - MapPrototypeGetSize, + ArrayPrototypeForEach, + ArrayPrototypeIncludes, + ArrayPrototypeJoin, + ArrayPrototypeMap, + ArrayPrototypePop, + ArrayPrototypePush, + ArrayPrototypePushApply, + ArrayPrototypeReduce, + ArrayPrototypeShift, + ArrayPrototypeSlice, + ArrayPrototypeSort, + ArrayPrototypeSplice, + ArrayPrototypeUnshift, + BigIntPrototypeValueOf, + Boolean, + BooleanPrototypeValueOf, + DateNow, + DatePrototype, + DatePrototypeGetTime, + DatePrototypeToISOString, Error, - ErrorPrototype, ErrorCaptureStackTrace, - MathSqrt, + ErrorPrototype, + FunctionPrototypeBind, + FunctionPrototypeCall, + FunctionPrototypeToString, + MapPrototype, + MapPrototypeDelete, + MapPrototypeEntries, + MapPrototypeForEach, + MapPrototypeGet, + MapPrototypeGetSize, + MapPrototypeHas, + MapPrototypeSet, MathAbs, + MathFloor, MathMax, MathMin, MathRound, + MathSqrt, Number, + NumberIsInteger, + NumberParseInt, NumberPrototypeToString, + NumberPrototypeValueOf, + ObjectAssign, + ObjectCreate, + ObjectDefineProperty, + ObjectFreeze, + ObjectFromEntries, + ObjectGetOwnPropertyDescriptor, + ObjectGetOwnPropertyNames, + ObjectGetOwnPropertySymbols, + ObjectGetPrototypeOf, + ObjectIs, + ObjectKeys, + ObjectPrototype, + ObjectPrototypeHasOwnProperty, + ObjectPrototypeIsPrototypeOf, + ObjectPrototypePropertyIsEnumerable, + ObjectPrototypeToString, + ObjectSetPrototypeOf, + ObjectValues, Proxy, ReflectGet, ReflectGetOwnPropertyDescriptor, ReflectGetPrototypeOf, ReflectHas, - BigIntPrototypeValueOf, - ObjectGetPrototypeOf, - FunctionPrototypeToString, - StringPrototypeStartsWith, - SetPrototypeValues, + ReflectOwnKeys, + RegExpPrototypeExec, + RegExpPrototypeSymbolReplace, + RegExpPrototypeTest, + RegExpPrototypeToString, + SafeArrayIterator, + SafeMap, + SafeMapIterator, + SafeRegExp, SafeSet, SafeSetIterator, + SafeStringIterator, + SetPrototype, + SetPrototypeAdd, + SetPrototypeHas, + SetPrototypeGetSize, + SetPrototypeValues, + String, + StringPrototypeCharCodeAt, + StringPrototypeCodePointAt, + StringPrototypeEndsWith, + StringPrototypeIncludes, + StringPrototypeIndexOf, + StringPrototypeLastIndexOf, + StringPrototypeMatch, + StringPrototypeNormalize, + StringPrototypePadEnd, + StringPrototypePadStart, + StringPrototypeRepeat, + StringPrototypeReplace, + StringPrototypeReplaceAll, + StringPrototypeSlice, + StringPrototypeSplit, + StringPrototypeStartsWith, + StringPrototypeToLowerCase, + StringPrototypeTrim, + StringPrototypeValueOf, + Symbol, + SymbolFor, + SymbolHasInstance, + SymbolIterator, + SymbolPrototypeGetDescription, + SymbolPrototypeToString, + SymbolPrototypeValueOf, + SymbolToStringTag, TypedArrayPrototypeGetByteLength, - SafeMapIterator, - ArrayBufferPrototype, + TypedArrayPrototypeGetLength, + TypedArrayPrototypeGetSymbolToStringTag, + Uint8Array, + WeakMapPrototypeHas, + WeakSetPrototypeHas, + isNaN, } = primordials; let noColor = false; @@ -227,45 +243,9 @@ defineColorAlias("inverse", "swapColors"); defineColorAlias("inverse", "swapcolors"); defineColorAlias("doubleunderline", "doubleUnderline"); -// https://tc39.es/ecma262/#sec-boolean.prototype.valueof -const _booleanValueOf = Boolean.prototype.valueOf; - -// https://tc39.es/ecma262/#sec-number.prototype.valueof -const _numberValueOf = Number.prototype.valueOf; - -// https://tc39.es/ecma262/#sec-string.prototype.valueof -const _stringValueOf = String.prototype.valueOf; - -// https://tc39.es/ecma262/#sec-symbol.prototype.valueof -const _symbolValueOf = Symbol.prototype.valueOf; - -// https://tc39.es/ecma262/#sec-weakmap.prototype.has -const _weakMapHas = WeakMap.prototype.has; - -// https://tc39.es/ecma262/#sec-weakset.prototype.has -const _weakSetHas = WeakSet.prototype.has; - -// https://tc39.es/ecma262/#sec-get-arraybuffer.prototype.bytelength -const _getArrayBufferByteLength = ObjectGetOwnPropertyDescriptor( - ArrayBufferPrototype, - "byteLength", -).get; - // https://tc39.es/ecma262/#sec-get-sharedarraybuffer.prototype.bytelength let _getSharedArrayBufferByteLength; -// https://tc39.es/ecma262/#sec-get-set.prototype.size -const _getSetSize = ObjectGetOwnPropertyDescriptor( - SetPrototype, - "size", -).get; - -// https://tc39.es/ecma262/#sec-get-map.prototype.size -const _getMapSize = ObjectGetOwnPropertyDescriptor( - MapPrototype, - "size", -).get; - function isObjectLike(value) { return value !== null && typeof value === "object"; } @@ -284,7 +264,7 @@ export function isArgumentsObject(value) { export function isArrayBuffer(value) { try { - _getArrayBufferByteLength.call(value); + ArrayBufferPrototypeGetByteLength(value); return true; } catch { return false; @@ -311,7 +291,7 @@ export function isBooleanObject(value) { } try { - _booleanValueOf.call(value); + BooleanPrototypeValueOf(value); return true; } catch { return false; @@ -352,7 +332,7 @@ export function isGeneratorFunction( export function isMap(value) { try { - _getMapSize.call(value); + MapPrototypeGetSize(value); return true; } catch { return false; @@ -391,7 +371,7 @@ export function isNumberObject(value) { } try { - _numberValueOf.call(value); + NumberPrototypeValueOf(value); return true; } catch { return false; @@ -427,7 +407,7 @@ export function isRegExp(value) { export function isSet(value) { try { - _getSetSize.call(value); + SetPrototypeGetSize(value); return true; } catch { return false; @@ -454,7 +434,7 @@ export function isSharedArrayBuffer( ).get; try { - _getSharedArrayBufferByteLength.call(value); + FunctionPrototypeCall(_getSharedArrayBufferByteLength, value); return true; } catch { return false; @@ -467,7 +447,7 @@ export function isStringObject(value) { } try { - _stringValueOf.call(value); + StringPrototypeValueOf(value); return true; } catch { return false; @@ -480,7 +460,7 @@ export function isSymbolObject(value) { } try { - _symbolValueOf.call(value); + SymbolPrototypeValueOf(value); return true; } catch { return false; @@ -491,7 +471,7 @@ export function isWeakMap( value, ) { try { - _weakMapHas.call(value, null); + WeakMapPrototypeHas(value, null); return true; } catch { return false; @@ -502,7 +482,7 @@ export function isWeakSet( value, ) { try { - _weakSetHas.call(value, null); + WeakSetPrototypeHas(value, null); return true; } catch { return false; @@ -552,7 +532,7 @@ const keyStrRegExp = new SafeRegExp("^[a-zA-Z_][a-zA-Z_0-9]*$"); const numberRegExp = new SafeRegExp("^(0|[1-9][0-9]*)$"); // TODO(wafuwafu13): Figure out -const escapeFn = (str) => meta[str.charCodeAt(0)]; +const escapeFn = (str) => meta[StringPrototypeCharCodeAt(str, 0)]; function stylizeNoColor(str) { return str; @@ -711,16 +691,16 @@ function formatValue( // Using an array here is actually better for the average case than using // a Set. `seen` will only check for the depth and will never grow too large. - if (ctx.seen.includes(value)) { + if (ArrayPrototypeIncludes(ctx.seen, value)) { let index = 1; if (ctx.circular === undefined) { ctx.circular = new SafeMap(); - ctx.circular.set(value, index); + MapPrototypeSet(ctx.circular, value, index); } else { index = ctx.circular.get(value); if (index === undefined) { index = ctx.circular.size + 1; - ctx.circular.set(value, index); + MapPrototypeSet(ctx.circular, value, index); } } return ctx.stylize(`[Circular *${index}]`, "special"); @@ -1006,7 +986,7 @@ function formatRaw(ctx, value, recurseTimes, typedArray, proxyDetails) { } else if (isModuleNamespaceObject(value)) { braces[0] = `${getPrefix(constructor, tag, "Module")}{`; // Special handle keys for namespace objects. - formatter = formatNamespaceObject.bind(null, keys); + formatter = FunctionPrototypeBind(formatNamespaceObject, null, keys); } else if (isBoxedPrimitive(value)) { base = getBoxedBase(value, ctx, keys, constructor, tag); if (keys.length === 0 && protoProps === undefined) { @@ -1039,7 +1019,7 @@ function formatRaw(ctx, value, recurseTimes, typedArray, proxyDetails) { } recurseTimes += 1; - ctx.seen.push(value); + ArrayPrototypePush(ctx.seen, value); ctx.currentDepth = recurseTimes; let output; const indentationLvl = ctx.indentationLvl; @@ -1075,15 +1055,19 @@ function formatRaw(ctx, value, recurseTimes, typedArray, proxyDetails) { } } } - ctx.seen.pop(); + ArrayPrototypePop(ctx.seen); if (ctx.sorted) { const comparator = ctx.sorted === true ? undefined : ctx.sorted; if (extrasType === kObjectType) { output = ArrayPrototypeSort(output, comparator); } else if (keys.length > 1) { - const sorted = output.slice(output.length - keys.length).sort(comparator); - output.splice( + const sorted = ArrayPrototypeSort( + ArrayPrototypeSlice(output, output.length - keys.length), + comparator, + ); + ArrayPrototypeSplice( + output, output.length - keys.length, keys.length, ...new SafeArrayIterator(sorted), @@ -1118,8 +1102,9 @@ function formatRaw(ctx, value, recurseTimes, typedArray, proxyDetails) { const builtInObjectsRegExp = new SafeRegExp("^[A-Z][a-zA-Z0-9]+$"); const builtInObjects = new SafeSet( - ObjectGetOwnPropertyNames(globalThis).filter((e) => - builtInObjectsRegExp.test(e) + ArrayPrototypeFilter( + ObjectGetOwnPropertyNames(globalThis), + (e) => RegExpPrototypeTest(builtInObjectsRegExp, e), ), ); @@ -1145,7 +1130,7 @@ function addPrototypeProperties( if ( descriptor !== undefined && typeof descriptor.value === "function" && - builtInObjects.has(descriptor.value.name) + SetPrototypeHas(builtInObjects, descriptor.value.name) ) { return; } @@ -1154,18 +1139,17 @@ function addPrototypeProperties( if (depth === 0) { keySet = new SafeSet(); } else { - Array.prototype.forEach.call(keys, (key) => keySet.add(key)); + ArrayPrototypeForEach(keys, (key) => SetPrototypeAdd(keySet, key)); } // Get all own property names and symbols. keys = ReflectOwnKeys(obj); - Array.prototype.push.call(ctx.seen, main); + ArrayPrototypePush(ctx.seen, main); for (const key of new SafeArrayIterator(keys)) { // Ignore the `constructor` property and keys that exist on layers above. if ( key === "constructor" || - // deno-lint-ignore no-prototype-builtins - main.hasOwnProperty(key) || - (depth !== 0 && keySet.has(key)) + ObjectPrototypeHasOwnProperty(main, key) || + (depth !== 0 && SetPrototypeHas(keySet, key)) ) { continue; } @@ -1184,12 +1168,12 @@ function addPrototypeProperties( ); if (ctx.colors) { // Faint! - Array.prototype.push.call(output, `\u001b[2m${value}\u001b[22m`); + ArrayPrototypePush(output, `\u001b[2m${value}\u001b[22m`); } else { - Array.prototype.push.call(output, value); + ArrayPrototypePush(output, value); } } - Array.prototype.pop.call(ctx.seen); + ArrayPrototypePop(ctx.seen); // Limit the inspection to up to three prototype layers. Using `recurseTimes` // is not a good choice here, because it's as if the properties are declared // on the current object from the users perspective. @@ -1218,7 +1202,7 @@ function getConstructorName(obj, ctx, recurseTimes, protoProps) { if ( protoProps !== undefined && (firstProto !== obj || - !builtInObjects.has(descriptor.value.name)) + !SetPrototypeHas(builtInObjects, descriptor.value.name)) ) { addPrototypeProperties( ctx, @@ -1273,7 +1257,7 @@ function formatPrimitive(fn, value, ctx) { let trailer = ""; if (value.length > ctx.maxStringLength) { const remaining = value.length - ctx.maxStringLength; - value = value.slice(0, ctx.maxStringLength); + value = StringPrototypeSlice(value, 0, ctx.maxStringLength); trailer = `... ${remaining} more character${remaining > 1 ? "s" : ""}`; } if ( @@ -1283,10 +1267,13 @@ function formatPrimitive(fn, value, ctx) { value.length > kMinLineLength && value.length > ctx.breakLength - ctx.indentationLvl - 4 ) { - return value - .split(formatPrimitiveRegExp) - .map((line) => fn(quoteString(line, ctx), "string")) - .join(` +\n${" ".repeat(ctx.indentationLvl + 2)}`) + trailer; + return ArrayPrototypeJoin( + ArrayPrototypeMap( + StringPrototypeSplit(value, formatPrimitiveRegExp), + (line) => fn(quoteString(line, ctx), "string"), + ), + ` +\n${StringPrototypeRepeat(" ", ctx.indentationLvl + 2)}`, + ) + trailer; } return fn(quoteString(value, ctx), "string") + trailer; } @@ -1328,14 +1315,19 @@ function formatArray(ctx, value, recurseTimes) { const output = []; for (let i = 0; i < len; i++) { // Special handle sparse arrays. - // deno-lint-ignore no-prototype-builtins - if (!value.hasOwnProperty(i)) { + if (!ObjectPrototypeHasOwnProperty(value, i)) { return formatSpecialArray(ctx, value, recurseTimes, len, output, i); } - output.push(formatProperty(ctx, value, recurseTimes, i, kArrayType)); + ArrayPrototypePush( + output, + formatProperty(ctx, value, recurseTimes, i, kArrayType), + ); } if (remaining > 0) { - output.push(`... ${remaining} more item${remaining > 1 ? "s" : ""}`); + ArrayPrototypePush( + output, + `... ${remaining} more item${remaining > 1 ? "s" : ""}`, + ); } return output; } @@ -1393,10 +1385,13 @@ function formatSet(value, ctx, _ignored, recurseTimes) { const remaining = valLen - len; const output = []; for (let i = 0; i < len; i++) { - output.push(formatValue(ctx, values[i], recurseTimes)); + ArrayPrototypePush(output, formatValue(ctx, values[i], recurseTimes)); } if (remaining > 0) { - output.push(`... ${remaining} more item${remaining > 1 ? "s" : ""}`); + ArrayPrototypePush( + output, + `... ${remaining} more item${remaining > 1 ? "s" : ""}`, + ); } ctx.indentationLvl -= 2; @@ -1413,14 +1408,18 @@ function formatMap(value, ctx, _gnored, recurseTimes) { const remaining = valLen - len; const output = []; for (let i = 0; i < len; i++) { - output.push( + ArrayPrototypePush( + output, `${formatValue(ctx, values[i][0], recurseTimes)} => ${ formatValue(ctx, values[i][1], recurseTimes) }`, ); } if (remaining > 0) { - output.push(`... ${remaining} more item${remaining > 1 ? "s" : ""}`); + ArrayPrototypePush( + output, + `... ${remaining} more item${remaining > 1 ? "s" : ""}`, + ); } ctx.indentationLvl -= 2; @@ -1460,7 +1459,7 @@ function formatTypedArray( ]) ) { const str = formatValue(ctx, value[key], recurseTimes, true); - Array.prototype.push.call(output, `[${key}]: ${str}`); + ArrayPrototypePush(output, `[${key}]: ${str}`); } ctx.indentationLvl -= 2; } @@ -1484,7 +1483,11 @@ function formatIterator(braces, ctx, value, recurseTimes) { const { 0: entries, 1: isKeyValue } = value; if (isKeyValue) { // Mark entry iterators as such. - braces[0] = braces[0].replace(iteratorRegExp, " Entries] {"); + braces[0] = StringPrototypeReplace( + braces[0], + iteratorRegExp, + " Entries] {", + ); return formatMapIterInner(ctx, recurseTimes, entries, kMapEntries); } @@ -1627,8 +1630,13 @@ function formatArrayBuffer(ctx, value) { } catch { return [ctx.stylize("(detached)", "special")]; } - let str = hexSlice(buffer, 0, MathMin(ctx.maxArrayLength, buffer.length)) - .replace(arrayBufferRegExp, "$1 ").trim(); + let str = StringPrototypeTrim( + StringPrototypeReplace( + hexSlice(buffer, 0, MathMin(ctx.maxArrayLength, buffer.length)), + arrayBufferRegExp, + "$1 ", + ), + ); const remaining = buffer.length - ctx.maxArrayLength; if (remaining > 0) { @@ -1703,7 +1711,7 @@ function formatProperty( ctx.indentationLvl += diff; str = formatValue(ctx, desc.value, recurseTimes); if (diff === 3 && ctx.breakLength < getStringWidth(str, ctx.colors)) { - extra = `\n${" ".repeat(ctx.indentationLvl)}`; + extra = `\n${StringPrototypeRepeat(" ", ctx.indentationLvl)}`; } ctx.indentationLvl -= diff; } else if (desc.get !== undefined) { @@ -1716,7 +1724,7 @@ function formatProperty( (ctx.getters === "set" && desc.set !== undefined)) ) { try { - const tmp = desc.get.call(original); + const tmp = FunctionPrototypeCall(desc.get, original); ctx.indentationLvl += 2; if (tmp === null) { str = `${s(`[${label}:`, sp)} ${s("null", "null")}${s("]", sp)}`; @@ -1747,7 +1755,11 @@ function formatProperty( } else if (key === "__proto__") { name = "['__proto__']"; } else if (desc.enumerable === false) { - const tmp = key.replace(strEscapeSequencesReplacer, escapeFn); + const tmp = StringPrototypeReplace( + key, + strEscapeSequencesReplacer, + escapeFn, + ); name = `[${tmp}]`; } else if (keyStrRegExp.test(key)) { @@ -1780,7 +1792,7 @@ function handleMaxCallStackSize( const colorRegExp = new SafeRegExp("\u001b\\[\\d\\d?m", "g"); function removeColors(str) { - return str.replace(colorRegExp, ""); + return StringPrototypeReplace(str, colorRegExp, ""); } function isBelowBreakLength(ctx, output, start, base) { @@ -1836,10 +1848,10 @@ function formatNamespaceObject( // this aligned, even though this is a hacky way of dealing with this. const tmp = { [keys[i]]: "" }; output[i] = formatProperty(ctx, tmp, recurseTimes, keys[i], kObjectType); - const pos = output[i].lastIndexOf(" "); + const pos = StringPrototypeLastIndexOf(output[i], " "); // We have to find the last whitespace and have to replace that value as // it will be visualized as a regular string. - output[i] = output[i].slice(0, pos + 1) + + output[i] = StringPrototypeSlice(output[i], 0, pos + 1) + ctx.stylize("", "special"); } } @@ -1873,13 +1885,16 @@ function formatSpecialArray( const emptyItems = tmp - index; const ending = emptyItems > 1 ? "s" : ""; const message = `<${emptyItems} empty item${ending}>`; - output.push(ctx.stylize(message, "undefined")); + ArrayPrototypePush(output, ctx.stylize(message, "undefined")); index = tmp; if (output.length === maxLength) { break; } } - output.push(formatProperty(ctx, value, recurseTimes, key, kArrayType)); + ArrayPrototypePush( + output, + formatProperty(ctx, value, recurseTimes, key, kArrayType), + ); index++; } const remaining = value.length - index; @@ -1887,10 +1902,13 @@ function formatSpecialArray( if (remaining > 0) { const ending = remaining > 1 ? "s" : ""; const message = `<${remaining} empty item${ending}>`; - output.push(ctx.stylize(message, "undefined")); + ArrayPrototypePush(output, ctx.stylize(message, "undefined")); } } else if (remaining > 0) { - output.push(`... ${remaining} more item${remaining > 1 ? "s" : ""}`); + ArrayPrototypePush( + output, + `... ${remaining} more item${remaining > 1 ? "s" : ""}`, + ); } return output; } @@ -1902,22 +1920,28 @@ function getBoxedBase( constructor, tag, ) { - let type; + let type, primitive; if (isNumberObject(value)) { type = "Number"; + primitive = NumberPrototypeValueOf(value); } else if (isStringObject(value)) { type = "String"; + primitive = StringPrototypeValueOf(value); // For boxed Strings, we have to remove the 0-n indexed entries, // since they just noisy up the output and are redundant // Make boxed primitive Strings look like such - keys.splice(0, value.length); + ArrayPrototypeSplice(keys, 0, value.length); } else if (isBooleanObject(value)) { type = "Boolean"; + primitive = BooleanPrototypeValueOf(value); } else if (isBigIntObject(value)) { type = "BigInt"; + primitive = BigIntPrototypeValueOf(value); } else { type = "Symbol"; + primitive = SymbolPrototypeValueOf(value); } + let base = `[${type}`; if (type !== constructor) { if (constructor === null) { @@ -1926,15 +1950,14 @@ function getBoxedBase( base += ` (${constructor})`; } } - - base += `: ${formatPrimitive(stylizeNoColor, value.valueOf(), ctx)}]`; + base += `: ${formatPrimitive(stylizeNoColor, primitive, ctx)}]`; if (tag !== "" && tag !== constructor) { base += ` [${tag}]`; } if (keys.length !== 0 || ctx.stylize === stylizeNoColor) { return base; } - return ctx.stylize(base, type.toLowerCase()); + return ctx.stylize(base, StringPrototypeToLowerCase(type)); } function reduceToSingleString( @@ -2140,7 +2163,7 @@ function formatMapIterInner( const len = entries.length / 2; const remaining = len - maxArrayLength; const maxLength = MathMin(maxArrayLength, len); - let output = new Array(maxLength); + const output = new Array(maxLength); let i = 0; ctx.indentationLvl += 2; if (state === kWeak) { @@ -2154,7 +2177,7 @@ function formatMapIterInner( // retrieved ones exist, we can not reliably return the same output) if the // output is not sorted anyway. if (!ctx.sorted) { - output = output.sort(); + ArrayPrototypeSort(output); } } else { for (; i < maxLength; i++) { @@ -2175,7 +2198,10 @@ function formatMapIterInner( } ctx.indentationLvl -= 2; if (remaining > 0) { - output.push(`... ${remaining} more item${remaining > 1 ? "s" : ""}`); + ArrayPrototypePush( + output, + `... ${remaining} more item${remaining > 1 ? "s" : ""}`, + ); } return output; } @@ -2198,11 +2224,11 @@ function formatSetIterInner( // Sort all entries to have a halfway reliable output (if more entries than // retrieved ones exist, we can not reliably return the same output) if the // output is not sorted anyway. - output.sort(); + ArrayPrototypeSort(output); } const remaining = entries.length - maxLength; if (remaining > 0) { - Array.prototype.push.call( + ArrayPrototypePush( output, `... ${remaining} more item${remaining > 1 ? "s" : ""}`, ); @@ -2229,9 +2255,9 @@ export function getStringWidth(str, removeControlChars = true) { if (removeControlChars) { str = stripVTControlCharacters(str); } - str = str.normalize("NFC"); + str = StringPrototypeNormalize(str, "NFC"); for (const char of new SafeStringIterator(str)) { - const code = char.codePointAt(0); + const code = StringPrototypeCodePointAt(char, 0); if (isFullWidthCodePoint(code)) { width += 2; } else if (!isZeroWidthCodePoint(code)) { @@ -2258,7 +2284,7 @@ const isZeroWidthCodePoint = (code) => { * Remove all VT control characters. Use to estimate displayed string width. */ export function stripVTControlCharacters(str) { - return str.replace(ansi, ""); + return StringPrototypeReplace(str, ansi, ""); } function hasOwnProperty(obj, v) { @@ -2353,7 +2379,10 @@ function cliTable(head, columns) { (n, a) => MathMax(n, a.length), 0, ); - const columnRightAlign = new Array(columnWidths.length).fill(true); + const columnRightAlign = ArrayPrototypeFill( + new Array(columnWidths.length), + true, + ); for (let i = 0; i < head.length; i++) { const column = columns[i]; @@ -3349,7 +3378,7 @@ class Console { const values = []; let hasPrimitives = false; - keys.forEach((k, idx) => { + ArrayPrototypeForEach(keys, (k, idx) => { const value = resultData[k]; const primitive = value === null || (typeof value !== "function" && typeof value !== "object"); diff --git a/ext/crypto/00_crypto.js b/ext/crypto/00_crypto.js index 2208124f6b..5be2e0c1c2 100644 --- a/ext/crypto/00_crypto.js +++ b/ext/crypto/00_crypto.js @@ -12,11 +12,12 @@ const primordials = globalThis.__bootstrap.primordials; import * as webidl from "ext:deno_webidl/00_webidl.js"; import DOMException from "ext:deno_web/01_dom_exception.js"; const { - ArrayBufferPrototype, - ArrayBufferPrototypeSlice, - ArrayBufferPrototypeGetByteLength, ArrayBufferIsView, + ArrayBufferPrototype, + ArrayBufferPrototypeGetByteLength, + ArrayBufferPrototypeSlice, ArrayPrototypeEvery, + ArrayPrototypeFilter, ArrayPrototypeFind, ArrayPrototypeIncludes, DataViewPrototypeGetBuffer, @@ -28,21 +29,21 @@ const { ObjectAssign, ObjectPrototypeHasOwnProperty, ObjectPrototypeIsPrototypeOf, - StringPrototypeToLowerCase, - StringPrototypeToUpperCase, - StringPrototypeCharCodeAt, - StringFromCharCode, SafeArrayIterator, SafeWeakMap, + StringFromCharCode, + StringPrototypeCharCodeAt, + StringPrototypeToLowerCase, + StringPrototypeToUpperCase, Symbol, SymbolFor, SyntaxError, - TypedArrayPrototypeSlice, + TypeError, TypedArrayPrototypeGetBuffer, TypedArrayPrototypeGetByteLength, TypedArrayPrototypeGetByteOffset, TypedArrayPrototypeGetSymbolToStringTag, - TypeError, + TypedArrayPrototypeSlice, Uint8Array, WeakMapPrototypeGet, WeakMapPrototypeSet, @@ -388,7 +389,10 @@ function constructKey(type, extractable, usages, algorithm, handle) { * @returns */ function usageIntersection(a, b) { - return a.filter((i) => b.includes(i)); + return ArrayPrototypeFilter( + a, + (i) => ArrayPrototypeIncludes(b, i), + ); } // TODO(lucacasonato): this should be moved to rust diff --git a/ext/fetch/23_request.js b/ext/fetch/23_request.js index d3132dc625..4c46ebe750 100644 --- a/ext/fetch/23_request.js +++ b/ext/fetch/23_request.js @@ -37,6 +37,7 @@ const { ObjectKeys, ObjectPrototypeIsPrototypeOf, RegExpPrototypeTest, + StringPrototypeStartsWith, Symbol, SymbolFor, TypeError, @@ -90,7 +91,11 @@ function processUrlList(urlList, urlListProcessed) { */ function newInnerRequest(method, url, headerList, body, maybeBlob) { let blobUrlEntry = null; - if (maybeBlob && typeof url === "string" && url.startsWith("blob:")) { + if ( + maybeBlob && + typeof url === "string" && + StringPrototypeStartsWith(url, "blob:") + ) { blobUrlEntry = blobFromObjectUrl(url); } return { diff --git a/ext/fs/30_fs.js b/ext/fs/30_fs.js index bddafb09ee..8766d32fff 100644 --- a/ext/fs/30_fs.js +++ b/ext/fs/30_fs.js @@ -7,12 +7,15 @@ const { ArrayPrototypeFilter, Date, DatePrototype, + DatePrototypeGetTime, Error, Function, MathTrunc, ObjectEntries, ObjectPrototypeIsPrototypeOf, ObjectValues, + StringPrototypeSlice, + StringPrototypeStartsWith, SymbolAsyncIterator, SymbolIterator, Uint32Array, @@ -232,8 +235,8 @@ function createByteStruct(types) { for (let i = 0; i < typeEntries.length; ++i) { let { 0: name, 1: type } = typeEntries[i]; - const optional = type.startsWith("?"); - if (optional) type = type.slice(1); + const optional = StringPrototypeStartsWith(type, "?"); + if (optional) type = StringPrototypeSlice(type, 1); if (type == "u64") { if (!optional) { @@ -369,7 +372,7 @@ async function link(oldpath, newpath) { function toUnixTimeFromEpoch(value) { if (ObjectPrototypeIsPrototypeOf(DatePrototype, value)) { - const time = value.valueOf(); + const time = DatePrototypeGetTime(value); const seconds = MathTrunc(time / 1e3); const nanoseconds = MathTrunc(time - (seconds * 1e3)) * 1e6; diff --git a/ext/http/00_serve.js b/ext/http/00_serve.js index 33742e122c..6aed08bddb 100644 --- a/ext/http/00_serve.js +++ b/ext/http/00_serve.js @@ -37,14 +37,15 @@ import { import { TcpConn } from "ext:deno_net/01_net.js"; const { ObjectPrototypeIsPrototypeOf, + PromisePrototypeCatch, SafeSet, SafeSetIterator, SetPrototypeAdd, SetPrototypeDelete, Symbol, TypeError, - Uint8ArrayPrototype, Uint8Array, + Uint8ArrayPrototype, } = primordials; const { @@ -667,7 +668,7 @@ async function serve(arg1, arg2) { if (req === 0xffffffff) { break; } - callback(req).catch((error) => { + PromisePrototypeCatch(callback(req), (error) => { // Abnormal exit console.error( "Terminating Deno.serve loop due to unexpected error", diff --git a/ext/http/01_http.js b/ext/http/01_http.js index 0048eedebb..f41a2beed6 100644 --- a/ext/http/01_http.js +++ b/ext/http/01_http.js @@ -54,8 +54,9 @@ const { SetPrototypeDelete, StringPrototypeCharCodeAt, StringPrototypeIncludes, - StringPrototypeToLowerCase, StringPrototypeSplit, + StringPrototypeToLowerCase, + StringPrototypeToUpperCase, Symbol, SymbolAsyncIterator, TypeError, @@ -497,17 +498,20 @@ function buildCaseInsensitiveCommaValueFinder(checkText) { StringPrototypeToLowerCase(checkText), "", ), - (c) => [c.charCodeAt(0), c.toUpperCase().charCodeAt(0)], + (c) => [ + StringPrototypeCharCodeAt(c, 0), + StringPrototypeCharCodeAt(StringPrototypeToUpperCase(c), 0), + ], ); /** @type {number} */ let i; /** @type {number} */ let char; - /** @param value {string} */ + /** @param {string} value */ return function (value) { for (i = 0; i < value.length; i++) { - char = value.charCodeAt(i); + char = StringPrototypeCharCodeAt(value, i); skipWhitespace(value); if (hasWord(value)) { diff --git a/ext/net/01_net.js b/ext/net/01_net.js index 81e13f0945..e8ce3a3001 100644 --- a/ext/net/01_net.js +++ b/ext/net/01_net.js @@ -11,13 +11,16 @@ import { import * as abortSignal from "ext:deno_web/03_abort_signal.js"; const primordials = globalThis.__bootstrap.primordials; const { + ArrayPrototypeFilter, + ArrayPrototypeForEach, + ArrayPrototypePush, Error, ObjectPrototypeIsPrototypeOf, PromiseResolve, SymbolAsyncIterator, SymbolFor, - TypedArrayPrototypeSubarray, TypeError, + TypedArrayPrototypeSubarray, Uint8Array, } = primordials; @@ -97,15 +100,16 @@ class Conn { const promise = core.read(this.rid, buffer); const promiseId = promise[promiseIdSymbol]; if (this.#unref) core.unrefOp(promiseId); - this.#pendingReadPromiseIds.push(promiseId); + ArrayPrototypePush(this.#pendingReadPromiseIds, promiseId); let nread; try { nread = await promise; } catch (e) { throw e; } finally { - this.#pendingReadPromiseIds = this.#pendingReadPromiseIds.filter((id) => - id !== promiseId + this.#pendingReadPromiseIds = ArrayPrototypeFilter( + this.#pendingReadPromiseIds, + (id) => id !== promiseId, ); } return nread === 0 ? null : nread; @@ -141,7 +145,7 @@ class Conn { if (this.#readable) { readableStreamForRidUnrefableRef(this.#readable); } - this.#pendingReadPromiseIds.forEach((id) => core.refOp(id)); + ArrayPrototypeForEach(this.#pendingReadPromiseIds, (id) => core.refOp(id)); } unref() { @@ -149,7 +153,10 @@ class Conn { if (this.#readable) { readableStreamForRidUnrefableUnref(this.#readable); } - this.#pendingReadPromiseIds.forEach((id) => core.unrefOp(id)); + ArrayPrototypeForEach( + this.#pendingReadPromiseIds, + (id) => core.unrefOp(id), + ); } } diff --git a/ext/url/00_url.js b/ext/url/00_url.js index 169cbe3a51..b4bc34b927 100644 --- a/ext/url/00_url.js +++ b/ext/url/00_url.js @@ -17,13 +17,14 @@ const { ArrayPrototypeSort, ArrayPrototypeSplice, ObjectKeys, - Uint32Array, SafeArrayIterator, StringPrototypeSlice, + StringPrototypeStartsWith, Symbol, SymbolFor, SymbolIterator, TypeError, + Uint32Array, } = primordials; const _list = Symbol("list"); @@ -421,7 +422,10 @@ class URL { #hasAuthority() { // https://github.com/servo/rust-url/blob/1d307ae51a28fecc630ecec03380788bfb03a643/url/src/lib.rs#L824 - return this.#serialization.slice(this.#schemeEnd).startsWith("://"); + return StringPrototypeStartsWith( + StringPrototypeSlice(this.#serialization, this.#schemeEnd), + "://", + ); } /** @return {string} */ @@ -429,7 +433,7 @@ class URL { webidl.assertBranded(this, URLPrototype); // https://github.com/servo/rust-url/blob/1d307ae51a28fecc630ecec03380788bfb03a643/url/src/quirks.rs#L263 return this.#fragmentStart - ? trim(this.#serialization.slice(this.#fragmentStart)) + ? trim(StringPrototypeSlice(this.#serialization, this.#fragmentStart)) : ""; } @@ -455,7 +459,11 @@ class URL { get host() { webidl.assertBranded(this, URLPrototype); // https://github.com/servo/rust-url/blob/1d307ae51a28fecc630ecec03380788bfb03a643/url/src/quirks.rs#L101 - return this.#serialization.slice(this.#hostStart, this.#pathStart); + return StringPrototypeSlice( + this.#serialization, + this.#hostStart, + this.#pathStart, + ); } /** @param {string} value */ @@ -480,7 +488,11 @@ class URL { get hostname() { webidl.assertBranded(this, URLPrototype); // https://github.com/servo/rust-url/blob/1d307ae51a28fecc630ecec03380788bfb03a643/url/src/lib.rs#L988 - return this.#serialization.slice(this.#hostStart, this.#hostEnd); + return StringPrototypeSlice( + this.#serialization, + this.#hostStart, + this.#hostEnd, + ); } /** @param {string} value */ @@ -523,7 +535,11 @@ class URL { get origin() { webidl.assertBranded(this, URLPrototype); // https://github.com/servo/rust-url/blob/1d307ae51a28fecc630ecec03380788bfb03a643/url/src/origin.rs#L14 - const scheme = this.#serialization.slice(0, this.#schemeEnd); + const scheme = StringPrototypeSlice( + this.#serialization, + 0, + this.#schemeEnd, + ); if ( scheme === "http" || scheme === "https" || scheme === "ftp" || scheme === "ws" || scheme === "wss" @@ -552,7 +568,8 @@ class URL { this.#usernameEnd !== this.#serialization.length && this.#serialization[this.#usernameEnd] === ":" ) { - return this.#serialization.slice( + return StringPrototypeSlice( + this.#serialization, this.#usernameEnd + 1, this.#hostStart - 1, ); @@ -583,11 +600,15 @@ class URL { webidl.assertBranded(this, URLPrototype); // https://github.com/servo/rust-url/blob/1d307ae51a28fecc630ecec03380788bfb03a643/url/src/lib.rs#L1203 if (!this.#queryStart && !this.#fragmentStart) { - return this.#serialization.slice(this.#pathStart); + return StringPrototypeSlice(this.#serialization, this.#pathStart); } const nextComponentStart = this.#queryStart || this.#fragmentStart; - return this.#serialization.slice(this.#pathStart, nextComponentStart); + return StringPrototypeSlice( + this.#serialization, + this.#pathStart, + nextComponentStart, + ); } /** @param {string} value */ @@ -613,9 +634,14 @@ class URL { webidl.assertBranded(this, URLPrototype); // https://github.com/servo/rust-url/blob/1d307ae51a28fecc630ecec03380788bfb03a643/url/src/quirks.rs#L196 if (this.#port === NO_PORT) { - return this.#serialization.slice(this.#hostEnd, this.#pathStart); + return StringPrototypeSlice( + this.#serialization, + this.#hostEnd, + this.#pathStart, + ); } else { - return this.#serialization.slice( + return StringPrototypeSlice( + this.#serialization, this.#hostEnd + 1, /* : */ this.#pathStart, ); @@ -644,7 +670,11 @@ class URL { get protocol() { webidl.assertBranded(this, URLPrototype); // https://github.com/servo/rust-url/blob/1d307ae51a28fecc630ecec03380788bfb03a643/url/src/quirks.rs#L56 - return this.#serialization.slice(0, this.#schemeEnd + 1 /* : */); + return StringPrototypeSlice( + this.#serialization, + 0, + this.#schemeEnd + 1, /* : */ + ); } /** @param {string} value */ @@ -672,7 +702,9 @@ class URL { const afterPath = this.#queryStart || this.#fragmentStart || this.#serialization.length; const afterQuery = this.#fragmentStart || this.#serialization.length; - return trim(this.#serialization.slice(afterPath, afterQuery)); + return trim( + StringPrototypeSlice(this.#serialization, afterPath, afterQuery), + ); } /** @param {string} value */ @@ -703,7 +735,8 @@ class URL { this.#hasAuthority() && this.#usernameEnd > this.#schemeEnd + schemeSeperatorLen ) { - return this.#serialization.slice( + return StringPrototypeSlice( + this.#serialization, this.#schemeEnd + schemeSeperatorLen, this.#usernameEnd, ); diff --git a/ext/url/01_urlpattern.js b/ext/url/01_urlpattern.js index 3c08bc1b89..04bb50fd7f 100644 --- a/ext/url/01_urlpattern.js +++ b/ext/url/01_urlpattern.js @@ -13,8 +13,9 @@ import * as webidl from "ext:deno_webidl/00_webidl.js"; const primordials = globalThis.__bootstrap.primordials; const { ArrayPrototypeMap, - ObjectKeys, + ArrayPrototypePop, ObjectFromEntries, + ObjectKeys, RegExpPrototypeExec, RegExpPrototypeTest, SafeRegExp, @@ -178,7 +179,7 @@ class URLPattern { const { 0: values, 1: inputs } = res; if (inputs[1] === null) { - inputs.pop(); + ArrayPrototypePop(inputs); } /** @type {URLPatternResult} */ diff --git a/ext/web/06_streams.js b/ext/web/06_streams.js index 6d390308d4..c0cbb30498 100644 --- a/ext/web/06_streams.js +++ b/ext/web/06_streams.js @@ -19,9 +19,10 @@ import { const primordials = globalThis.__bootstrap.primordials; const { ArrayBuffer, + ArrayBufferIsView, ArrayBufferPrototype, ArrayBufferPrototypeGetByteLength, - ArrayBufferIsView, + ArrayBufferPrototypeSlice, ArrayPrototypeMap, ArrayPrototypePush, ArrayPrototypeShift, @@ -34,12 +35,12 @@ const { DataViewPrototypeGetByteOffset, Float32Array, Float64Array, - Int8Array, Int16Array, Int32Array, + Int8Array, + MathMin, NumberIsInteger, NumberIsNaN, - MathMin, ObjectCreate, ObjectDefineProperties, ObjectDefineProperty, @@ -52,14 +53,13 @@ const { PromisePrototypeThen, PromiseReject, PromiseResolve, - queueMicrotask, RangeError, ReflectHas, SafeFinalizationRegistry, SafePromiseAll, SafeWeakMap, // TODO(lucacasonato): add SharedArrayBuffer to primordials - // SharedArrayBufferPrototype + // SharedArrayBufferPrototype, Symbol, SymbolAsyncIterator, SymbolFor, @@ -70,13 +70,14 @@ const { TypedArrayPrototypeGetSymbolToStringTag, TypedArrayPrototypeSet, TypedArrayPrototypeSlice, - Uint8Array, Uint16Array, Uint32Array, + Uint8Array, Uint8ClampedArray, WeakMapPrototypeGet, WeakMapPrototypeHas, WeakMapPrototypeSet, + queueMicrotask, } = primordials; import { createFilteredInspectProxy } from "ext:deno_console/01_console.js"; import { assert, AssertionError } from "ext:deno_web/00_infra.js"; @@ -1252,7 +1253,16 @@ function readableByteStreamControllerEnqueueClonedChunkToQueue( ) { let cloneResult; try { - cloneResult = buffer.slice(byteOffset, byteOffset + byteLength); + if (ObjectPrototypeIsPrototypeOf(ArrayBufferPrototype, buffer)) { + cloneResult = ArrayBufferPrototypeSlice( + buffer, + byteOffset, + byteOffset + byteLength, + ); + } else { + // TODO(lucacasonato): add SharedArrayBuffer to primordials + cloneResult = buffer.slice(byteOffset, byteOffset + byteLength); + } } catch (e) { readableByteStreamControllerError(controller, e); } @@ -1864,7 +1874,7 @@ function readableByteStreamControllerPullInto( return; } } - controller[_pendingPullIntos].push(pullIntoDescriptor); + ArrayPrototypePush(controller[_pendingPullIntos], pullIntoDescriptor); readableStreamAddReadIntoRequest(stream, readIntoRequest); readableByteStreamControllerCallPullIfNeeded(controller); } @@ -4481,7 +4491,7 @@ function writableStreamMarkCloseRequestInFlight(stream) { function writableStreamMarkFirstWriteRequestInFlight(stream) { assert(stream[_inFlightWriteRequest] === undefined); assert(stream[_writeRequests].length); - const writeRequest = stream[_writeRequests].shift(); + const writeRequest = ArrayPrototypeShift(stream[_writeRequests]); stream[_inFlightWriteRequest] = writeRequest; } diff --git a/ext/web/13_message_port.js b/ext/web/13_message_port.js index fdc678a4f9..6d9a2c01f2 100644 --- a/ext/web/13_message_port.js +++ b/ext/web/13_message_port.js @@ -259,7 +259,7 @@ function serializeJsMessageData(data, transferables) { ); } j++; - transferredArrayBuffers.push(ab); + ArrayPrototypePush(transferredArrayBuffers, ab); } } diff --git a/ext/websocket/02_websocketstream.js b/ext/websocket/02_websocketstream.js index fef17b7018..06f4b50d96 100644 --- a/ext/websocket/02_websocketstream.js +++ b/ext/websocket/02_websocketstream.js @@ -17,6 +17,7 @@ const primordials = globalThis.__bootstrap.primordials; const { ArrayPrototypeJoin, ArrayPrototypeMap, + DateNow, Error, ObjectPrototypeIsPrototypeOf, PromisePrototypeCatch, @@ -27,8 +28,8 @@ const { StringPrototypeToLowerCase, Symbol, SymbolFor, - TypedArrayPrototypeGetByteLength, TypeError, + TypedArrayPrototypeGetByteLength, Uint8ArrayPrototype, } = primordials; @@ -281,7 +282,7 @@ class WebSocketStream { this[_closed].state === "pending" ) { if ( - new Date().getTime() - await this[_closeSent].promise <= + DateNow() - await this[_closeSent].promise <= CLOSE_RESPONSE_TIMEOUT ) { return pull(controller); @@ -404,7 +405,7 @@ class WebSocketStream { core.opAsync("op_ws_close", this[_rid], code, closeInfo.reason), () => { setTimeout(() => { - this[_closeSent].resolve(new Date().getTime()); + this[_closeSent].resolve(DateNow()); }, 0); }, (err) => { diff --git a/runtime/js/11_workers.js b/runtime/js/11_workers.js index b08a5737e5..e046900053 100644 --- a/runtime/js/11_workers.js +++ b/runtime/js/11_workers.js @@ -4,10 +4,11 @@ const core = globalThis.Deno.core; const ops = core.ops; const primordials = globalThis.__bootstrap.primordials; const { + ArrayPrototypeFilter, Error, ObjectPrototypeIsPrototypeOf, - StringPrototypeStartsWith, String, + StringPrototypeStartsWith, SymbolIterator, SymbolToStringTag, } = primordials; @@ -192,8 +193,9 @@ class Worker extends EventTarget { const event = new MessageEvent("message", { cancelable: false, data: message, - ports: transferables.filter((t) => - ObjectPrototypeIsPrototypeOf(MessagePortPrototype, t) + ports: ArrayPrototypeFilter( + transferables, + (t) => ObjectPrototypeIsPrototypeOf(MessagePortPrototype, t), ), }); this.dispatchEvent(event); diff --git a/runtime/js/30_os.js b/runtime/js/30_os.js index 3af5803ad6..a5a55e19bd 100644 --- a/runtime/js/30_os.js +++ b/runtime/js/30_os.js @@ -6,10 +6,12 @@ import { Event, EventTarget } from "ext:deno_web/02_event.js"; const primordials = globalThis.__bootstrap.primordials; const { Error, + FunctionPrototypeBind, SymbolFor, } = primordials; -const windowDispatchEvent = EventTarget.prototype.dispatchEvent.bind( +const windowDispatchEvent = FunctionPrototypeBind( + EventTarget.prototype.dispatchEvent, globalThis, ); diff --git a/runtime/js/99_main.js b/runtime/js/99_main.js index 01cf2973c8..854a0029ec 100644 --- a/runtime/js/99_main.js +++ b/runtime/js/99_main.js @@ -12,27 +12,28 @@ const ops = core.ops; const internals = globalThis.__bootstrap.internals; const primordials = globalThis.__bootstrap.primordials; const { + ArrayPrototypeFilter, ArrayPrototypeIndexOf, + ArrayPrototypeMap, ArrayPrototypePush, ArrayPrototypeShift, ArrayPrototypeSplice, - ArrayPrototypeMap, DateNow, Error, ErrorPrototype, - FunctionPrototypeCall, FunctionPrototypeBind, + FunctionPrototypeCall, ObjectAssign, - ObjectDefineProperty, ObjectDefineProperties, + ObjectDefineProperty, ObjectFreeze, ObjectPrototypeIsPrototypeOf, ObjectSetPrototypeOf, + PromisePrototypeThen, PromiseResolve, + SafeWeakMap, Symbol, SymbolIterator, - PromisePrototypeThen, - SafeWeakMap, TypeError, WeakMapPrototypeDelete, WeakMapPrototypeGet, @@ -147,8 +148,10 @@ async function pollForMessages() { const msgEvent = new event.MessageEvent("message", { cancelable: false, data: message, - ports: transferables.filter((t) => - ObjectPrototypeIsPrototypeOf(messagePort.MessagePortPrototype, t) + ports: ArrayPrototypeFilter( + transferables, + (t) => + ObjectPrototypeIsPrototypeOf(messagePort.MessagePortPrototype, t), ), }); From dcf391ffed3850f9026d88b146e156375c4619d4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Mon, 1 May 2023 17:40:00 +0200 Subject: [PATCH 095/320] refactor: migrate async ops to generated wrappers (#18937) Migrates some of existing async ops to generated wrappers introduced in https://github.com/denoland/deno/pull/18887. As a result "core.opAsync2" was removed. I will follow up with more PRs that migrate all the async ops to generated wrappers. --- core/01_core.js | 26 --------- .../http_bench_json_ops.js | 32 +++++------ core/runtime.rs | 4 +- ext/fs/30_fs.js | 25 +++++++-- ext/http/00_serve.js | 2 +- ext/http/01_http.js | 6 ++- ext/node/polyfills/internal/crypto/random.ts | 19 +++++-- ext/web/02_timers.js | 6 ++- ext/websocket/01_websocket.js | 53 ++++++++++--------- ext/websocket/02_websocketstream.js | 35 +++++++----- 10 files changed, 109 insertions(+), 99 deletions(-) diff --git a/core/01_core.js b/core/01_core.js index 72cbe31f71..403a04297a 100644 --- a/core/01_core.js +++ b/core/01_core.js @@ -564,31 +564,6 @@ for (let i = 0; i < 10; i++) { return (ops[opName] = fn); } - function opAsync2(name, arg0, arg1) { - const id = nextPromiseId++; - try { - const maybeResult = asyncOps[name](id, arg0, arg1); - if (maybeResult !== undefined) { - movePromise(id); - return unwrapOpResultNewPromise(id, maybeResult, opAsync2); - } - } catch (err) { - movePromise(id); - if (!ReflectHas(asyncOps, name)) { - return PromiseReject(new TypeError(`${name} is not a registered op`)); - } - ErrorCaptureStackTrace(err, opAsync2); - return PromiseReject(err); - } - let promise = PromisePrototypeThen( - setPromise(id), - unwrapOpError(eventLoopTick), - ); - promise = handleOpCallTracing(name, id, promise); - promise[promiseIdSymbol] = id; - return promise; - } - function opAsync(name, ...args) { const id = nextPromiseId++; try { @@ -823,7 +798,6 @@ for (let i = 0; i < 10; i++) { asyncStub, generateAsyncOpHandler, opAsync, - opAsync2, resources, metrics, registerErrorBuilder, diff --git a/core/examples/http_bench_json_ops/http_bench_json_ops.js b/core/examples/http_bench_json_ops/http_bench_json_ops.js index 0c3b5be13e..beb6c90e45 100644 --- a/core/examples/http_bench_json_ops/http_bench_json_ops.js +++ b/core/examples/http_bench_json_ops/http_bench_json_ops.js @@ -3,7 +3,16 @@ // then write this fixed 'responseBuf'. The point of this benchmark is to // exercise the event loop in a simple yet semi-realistic way. -const { ops, opAsync, opAsync2 } = Deno.core; +// deno-lint-ignore-file camelcase + +const { op_listen } = Deno.core.ops; +const { + op_accept, + op_read_socket, +} = core.generateAsyncOpHandler( + "op_accept", + "op_read_socket", +); const requestBuf = new Uint8Array(64 * 1024); const responseBuf = new Uint8Array( @@ -12,24 +21,10 @@ const responseBuf = new Uint8Array( .map((c) => c.charCodeAt(0)), ); -/** Listens on 0.0.0.0:4570, returns rid. */ -function listen() { - return ops.op_listen(); -} - -/** Accepts a connection, returns rid. */ -function accept(serverRid) { - return opAsync("op_accept", serverRid); -} - -function read(serverRid, buf) { - return opAsync2("op_read_socket", serverRid, buf); -} - async function serve(rid) { try { while (true) { - await read(rid, requestBuf); + await op_read_socket(rid, requestBuf); if (!ops.op_try_write(rid, responseBuf)) { await Deno.core.writeAll(rid, responseBuf); } @@ -41,11 +36,12 @@ async function serve(rid) { } async function main() { - const listenerRid = listen(); + /** Listens on 0.0.0.0:4570, returns rid. */ + const listenerRid = op_listen(); Deno.core.print(`http_bench_ops listening on http://127.0.0.1:4570/\n`); while (true) { - const rid = await accept(listenerRid); + const rid = await op_accept(listenerRid); serve(rid); } } diff --git a/core/runtime.rs b/core/runtime.rs index e6c365e420..46256b8d8e 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -2765,9 +2765,9 @@ pub mod tests { .execute_script_static( "filename.js", r#" - + const { op_test } = Deno.core.generateAsyncOpHandler("op_test"); let zero_copy_a = new Uint8Array([0]); - Deno.core.opAsync2("op_test", null, zero_copy_a); + op_test(null, zero_copy_a); "#, ) .unwrap(); diff --git a/ext/fs/30_fs.js b/ext/fs/30_fs.js index 8766d32fff..70cfcee6ef 100644 --- a/ext/fs/30_fs.js +++ b/ext/fs/30_fs.js @@ -1,7 +1,22 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +// deno-lint-ignore-file camelcase + const core = globalThis.Deno.core; const ops = core.ops; +const { + op_chmod_async, + op_ftruncate_async, + op_truncate_async, + op_link_async, + op_flock_async, +} = Deno.core.generateAsyncOpHandler( + "op_chmod_async", + "op_ftruncate_async", + "op_truncate_async", + "op_link_async", + "op_flock_async", +); const primordials = globalThis.__bootstrap.primordials; const { ArrayPrototypeFilter, @@ -34,7 +49,7 @@ function chmodSync(path, mode) { } async function chmod(path, mode) { - await core.opAsync2("op_chmod_async", pathFromURL(path), mode); + await op_chmod_async(pathFromURL(path), mode); } function chownSync( @@ -347,7 +362,7 @@ function ftruncateSync(rid, len) { } async function ftruncate(rid, len) { - await core.opAsync2("op_ftruncate_async", rid, coerceLen(len)); + await op_ftruncate_async(rid, coerceLen(len)); } function truncateSync(path, len) { @@ -355,7 +370,7 @@ function truncateSync(path, len) { } async function truncate(path, len) { - await core.opAsync2("op_truncate_async", path, coerceLen(len)); + await op_truncate_async(path, coerceLen(len)); } function umask(mask) { @@ -367,7 +382,7 @@ function linkSync(oldpath, newpath) { } async function link(oldpath, newpath) { - await core.opAsync2("op_link_async", oldpath, newpath); + await op_link_async(oldpath, newpath); } function toUnixTimeFromEpoch(value) { @@ -497,7 +512,7 @@ function flockSync(rid, exclusive) { } async function flock(rid, exclusive) { - await core.opAsync2("op_flock_async", rid, exclusive === true); + await op_flock_async(rid, exclusive === true); } function funlockSync(rid) { diff --git a/ext/http/00_serve.js b/ext/http/00_serve.js index 6aed08bddb..b18c26e800 100644 --- a/ext/http/00_serve.js +++ b/ext/http/00_serve.js @@ -64,7 +64,7 @@ const { op_set_response_headers, op_upgrade_raw, op_ws_server_create, -} = Deno.core.generateAsyncOpHandler( +} = core.generateAsyncOpHandler( "op_http_wait", "op_upgrade", "op_get_request_headers", diff --git a/ext/http/01_http.js b/ext/http/01_http.js index f41a2beed6..92fd8e2858 100644 --- a/ext/http/01_http.js +++ b/ext/http/01_http.js @@ -1,8 +1,12 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +// deno-lint-ignore-file camelcase + const core = globalThis.Deno.core; const internals = globalThis.__bootstrap.internals; const primordials = globalThis.__bootstrap.primordials; const { BadResourcePrototype, InterruptedPrototype, ops } = core; +const { op_http_write } = Deno.core.generateAsyncOpHandler("op_http_write"); import * as webidl from "ext:deno_webidl/00_webidl.js"; import { InnerBody } from "ext:deno_fetch/22_body.js"; import { Event, setEventTargetData } from "ext:deno_web/02_event.js"; @@ -321,7 +325,7 @@ function createRespondWith( break; } try { - await core.opAsync2("op_http_write", streamRid, value); + await op_http_write(streamRid, value); } catch (error) { const connError = httpConn[connErrorSymbol]; if ( diff --git a/ext/node/polyfills/internal/crypto/random.ts b/ext/node/polyfills/internal/crypto/random.ts index 32256b13bf..4890e158ad 100644 --- a/ext/node/polyfills/internal/crypto/random.ts +++ b/ext/node/polyfills/internal/crypto/random.ts @@ -1,6 +1,8 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. // Copyright Joyent, Inc. and Node.js contributors. All rights reserved. MIT license. +// deno-lint-ignore-file camelcase + import { notImplemented } from "ext:deno_node/_utils.ts"; import randomBytes from "ext:deno_node/internal/crypto/_randomBytes.ts"; import randomFill, { @@ -31,6 +33,15 @@ export { default as randomInt } from "ext:deno_node/internal/crypto/_randomInt.t const { core } = globalThis.__bootstrap; const { ops } = core; +const { + op_node_gen_prime_async, + op_node_check_prime_bytes_async, + op_node_check_prime_async, +} = Deno.core.generateAsyncOpHandler( + "op_node_gen_prime_async", + "op_node_check_prime_bytes_async", + "op_node_check_prime_async", +); export type LargeNumberLike = | ArrayBufferView @@ -79,9 +90,9 @@ export function checkPrime( validateInt32(checks, "options.checks", 0); - let op = "op_node_check_prime_bytes_async"; + let op = op_node_check_prime_bytes_async; if (typeof candidate === "bigint") { - op = "op_node_check_prime_async"; + op = op_node_check_prime_async; } else if (!isAnyArrayBuffer(candidate) && !isArrayBufferView(candidate)) { throw new ERR_INVALID_ARG_TYPE( "candidate", @@ -96,7 +107,7 @@ export function checkPrime( ); } - core.opAsync2(op, candidate, checks).then( + op(candidate, checks).then( (result) => { callback?.(null, result); }, @@ -160,7 +171,7 @@ export function generatePrime( const { bigint, } = validateRandomPrimeJob(size, options); - core.opAsync2("op_node_gen_prime_async", size).then((prime: Uint8Array) => + op_node_gen_prime_async(size).then((prime: Uint8Array) => bigint ? arrayBufferToUnsignedBigInt(prime.buffer) : prime.buffer ).then((prime: ArrayBuffer | bigint) => { callback?.(null, prime); diff --git a/ext/web/02_timers.js b/ext/web/02_timers.js index 78cf06e445..cfd85a0553 100644 --- a/ext/web/02_timers.js +++ b/ext/web/02_timers.js @@ -1,5 +1,7 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +// deno-lint-ignore-file camelcase + const core = globalThis.Deno.core; const ops = core.ops; const primordials = globalThis.__bootstrap.primordials; @@ -13,7 +15,6 @@ const { MapPrototypeSet, Uint8Array, Uint32Array, - // deno-lint-ignore camelcase NumberPOSITIVE_INFINITY, PromisePrototypeThen, SafeArrayIterator, @@ -26,6 +27,7 @@ const { import * as webidl from "ext:deno_webidl/00_webidl.js"; import { reportException } from "ext:deno_web/02_event.js"; import { assert } from "ext:deno_web/00_infra.js"; +const { op_sleep } = core.generateAsyncOpHandler("op_sleep"); const hrU8 = new Uint8Array(8); const hr = new Uint32Array(TypedArrayPrototypeGetBuffer(hrU8)); @@ -216,7 +218,7 @@ const scheduledTimers = { head: null, tail: null }; */ function runAfterTimeout(cb, millis, timerInfo) { const cancelRid = timerInfo.cancelRid; - const sleepPromise = core.opAsync2("op_sleep", millis, cancelRid); + const sleepPromise = op_sleep(millis, cancelRid); timerInfo.promiseId = sleepPromise[SymbolFor("Deno.core.internalPromiseId")]; if (!timerInfo.isRef) { core.unrefOp(timerInfo.promiseId); diff --git a/ext/websocket/01_websocket.js b/ext/websocket/01_websocket.js index dab34a0236..f7dd516ff0 100644 --- a/ext/websocket/01_websocket.js +++ b/ext/websocket/01_websocket.js @@ -1,12 +1,9 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +// deno-lint-ignore-file camelcase /// const core = globalThis.Deno.core; -const { opAsync, opAsync2 } = core; -// deno-lint-ignore camelcase -const op_ws_check_permission_and_cancel_handle = - core.ops.op_ws_check_permission_and_cancel_handle; import { URL } from "ext:deno_url/00_url.js"; import * as webidl from "ext:deno_webidl/00_webidl.js"; import { HTTP_TOKEN_CODE_POINT_RE } from "ext:deno_web/00_infra.js"; @@ -51,6 +48,23 @@ const { TypedArrayPrototypeGetByteLength, TypedArrayPrototypeGetSymbolToStringTag, } = primordials; +const op_ws_check_permission_and_cancel_handle = + core.ops.op_ws_check_permission_and_cancel_handle; +const { + op_ws_create, + op_ws_close, + op_ws_send_binary, + op_ws_send_text, + op_ws_next_event, + op_ws_send_ping, +} = core.generateAsyncOpHandler( + "op_ws_create", + "op_ws_close", + "op_ws_send_binary", + "op_ws_send_text", + "op_ws_next_event", + "op_ws_send_ping", +); webidl.converters["sequence or DOMString"] = ( V, @@ -252,8 +266,7 @@ class WebSocket extends EventTarget { } PromisePrototypeThen( - opAsync( - "op_ws_create", + op_ws_create( "new WebSocket()", wsURL.href, ArrayPrototypeJoin(protocols, ", "), @@ -265,7 +278,7 @@ class WebSocket extends EventTarget { if (this[_readyState] === CLOSING) { PromisePrototypeThen( - opAsync("op_ws_close", this[_rid]), + op_ws_close(this[_rid]), () => { this[_readyState] = CLOSED; @@ -318,8 +331,7 @@ class WebSocket extends EventTarget { const sendTypedArray = (view, byteLength) => { this[_bufferedAmount] += byteLength; PromisePrototypeThen( - opAsync2( - "op_ws_send_binary", + op_ws_send_binary( this[_rid], view, ), @@ -353,8 +365,7 @@ class WebSocket extends EventTarget { const d = core.encode(string); this[_bufferedAmount] += TypedArrayPrototypeGetByteLength(d); PromisePrototypeThen( - opAsync2( - "op_ws_send_text", + op_ws_send_text( this[_rid], string, ), @@ -407,8 +418,7 @@ class WebSocket extends EventTarget { this[_readyState] = CLOSING; PromisePrototypeCatch( - opAsync( - "op_ws_close", + op_ws_close( this[_rid], code, reason, @@ -432,10 +442,7 @@ class WebSocket extends EventTarget { async [_eventLoop]() { while (this[_readyState] !== CLOSED) { - const { 0: kind, 1: value } = await opAsync2( - "op_ws_next_event", - this[_rid], - ); + const { 0: kind, 1: value } = await op_ws_next_event(this[_rid]); switch (kind) { case 0: { @@ -495,8 +502,7 @@ class WebSocket extends EventTarget { if (prevState === OPEN) { try { - await opAsync( - "op_ws_close", + await op_ws_close( this[_rid], code, value, @@ -524,17 +530,12 @@ class WebSocket extends EventTarget { clearTimeout(this[_idleTimeoutTimeout]); this[_idleTimeoutTimeout] = setTimeout(async () => { if (this[_readyState] === OPEN) { - await opAsync("op_ws_send_ping", this[_rid]); + await op_ws_send_ping(this[_rid]); this[_idleTimeoutTimeout] = setTimeout(async () => { if (this[_readyState] === OPEN) { this[_readyState] = CLOSING; const reason = "No response from ping frame."; - await opAsync( - "op_ws_close", - this[_rid], - 1001, - reason, - ); + await op_ws_close(this[_rid], 1001, reason); this[_readyState] = CLOSED; const errEvent = new ErrorEvent("error", { diff --git a/ext/websocket/02_websocketstream.js b/ext/websocket/02_websocketstream.js index 06f4b50d96..2c5df262ac 100644 --- a/ext/websocket/02_websocketstream.js +++ b/ext/websocket/02_websocketstream.js @@ -1,5 +1,6 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +// deno-lint-ignore-file camelcase /// const core = globalThis.Deno.core; @@ -32,6 +33,19 @@ const { TypedArrayPrototypeGetByteLength, Uint8ArrayPrototype, } = primordials; +const { + op_ws_send_text, + op_ws_send_binary, + op_ws_next_event, + op_ws_create, + op_ws_close, +} = core.generateAsyncOpHandler( + "op_ws_send_text", + "op_ws_send_binary", + "op_ws_next_event", + "op_ws_create", + "op_ws_close", +); webidl.converters.WebSocketStreamOptions = webidl.createDictionaryConverter( "WebSocketStreamOptions", @@ -153,8 +167,7 @@ class WebSocketStream { }; options.signal?.[add](abort); PromisePrototypeThen( - core.opAsync( - "op_ws_create", + op_ws_create( "new WebSocketStream()", this[_url], options.protocols ? ArrayPrototypeJoin(options.protocols, ", ") : "", @@ -165,15 +178,12 @@ class WebSocketStream { options.signal?.[remove](abort); if (this[_earlyClose]) { PromisePrototypeThen( - core.opAsync("op_ws_close", create.rid), + op_ws_close(create.rid), () => { PromisePrototypeThen( (async () => { while (true) { - const { 0: kind } = await core.opAsync( - "op_ws_next_event", - create.rid, - ); + const { 0: kind } = await op_ws_next_event(create.rid); if (kind > 5) { /* close */ @@ -206,11 +216,11 @@ class WebSocketStream { const writable = new WritableStream({ write: async (chunk) => { if (typeof chunk === "string") { - await core.opAsync2("op_ws_send_text", this[_rid], chunk); + await op_ws_send_text(this[_rid], chunk); } else if ( ObjectPrototypeIsPrototypeOf(Uint8ArrayPrototype, chunk) ) { - await core.opAsync2("op_ws_send_binary", this[_rid], chunk); + await op_ws_send_binary(this[_rid], chunk); } else { throw new TypeError( "A chunk may only be either a string or an Uint8Array", @@ -235,10 +245,7 @@ class WebSocketStream { }, }); const pull = async (controller) => { - const { 0: kind, 1: value } = await core.opAsync2( - "op_ws_next_event", - this[_rid], - ); + const { 0: kind, 1: value } = await op_ws_next_event(this[_rid]); switch (kind) { case 0: @@ -402,7 +409,7 @@ class WebSocketStream { this[_earlyClose] = true; } else if (this[_closed].state === "pending") { PromisePrototypeThen( - core.opAsync("op_ws_close", this[_rid], code, closeInfo.reason), + op_ws_close(this[_rid], code, closeInfo.reason), () => { setTimeout(() => { this[_closeSent].resolve(DateNow()); From 30628288ce2b411ca3def46129a4606073e16bac Mon Sep 17 00:00:00 2001 From: David Sherret Date: Mon, 1 May 2023 14:21:27 -0400 Subject: [PATCH 096/320] perf: lazily retrieve ppid (#18940) This is very apparent on Windows. Before: 45.74ms (Hello world) After: 33.92ms Closes #18939 --- cli/tsc/diagnostics.rs | 1 - runtime/js/99_main.js | 31 ++++++++++++++++++------------- runtime/ops/runtime.rs | 7 +++++-- runtime/worker_bootstrap.rs | 18 ++++++------------ 4 files changed, 29 insertions(+), 28 deletions(-) diff --git a/cli/tsc/diagnostics.rs b/cli/tsc/diagnostics.rs index 1e9819309e..15aadff814 100644 --- a/cli/tsc/diagnostics.rs +++ b/cli/tsc/diagnostics.rs @@ -26,7 +26,6 @@ const UNSTABLE_DENO_PROPS: &[&str] = &[ "listen", "listenDatagram", "dlopen", - "ppid", "removeSignalListener", "shutdown", "umask", diff --git a/runtime/js/99_main.js b/runtime/js/99_main.js index 854a0029ec..f0c63df744 100644 --- a/runtime/js/99_main.js +++ b/runtime/js/99_main.js @@ -427,12 +427,11 @@ function bootstrapMainRuntime(runtimeOptions) { 8: tsVersion, 9: unstableFlag, 10: pid, - 11: ppid, - 12: target, - 13: v8Version, - 14: userAgent, - 15: inspectFlag, - // 16: enableTestingFeaturesFlag + 11: target, + 12: v8Version, + 13: userAgent, + 14: inspectFlag, + // 15: enableTestingFeaturesFlag } = runtimeOptions; performance.setTimeOrigin(DateNow()); @@ -495,9 +494,16 @@ function bootstrapMainRuntime(runtimeOptions) { setUserAgent(userAgent); setLanguage(locale); + let ppid = undefined; ObjectDefineProperties(finalDenoNs, { pid: util.readOnly(pid), - ppid: util.readOnly(ppid), + ppid: util.getterOnly(() => { + // lazy because it's expensive + if (ppid === undefined) { + ppid = ops.op_ppid(); + } + return ppid; + }), noColor: util.readOnly(noColor), args: util.readOnly(ObjectFreeze(args)), mainModule: util.getterOnly(opMainModule), @@ -535,12 +541,11 @@ function bootstrapWorkerRuntime( 8: tsVersion, 9: unstableFlag, 10: pid, - // 11: ppid, - 12: target, - 13: v8Version, - // 14: userAgent, - // 15: inspectFlag, - 16: enableTestingFeaturesFlag, + 11: target, + 12: v8Version, + // 13: userAgent, + // 14: inspectFlag, + 15: enableTestingFeaturesFlag, } = runtimeOptions; performance.setTimeOrigin(DateNow()); diff --git a/runtime/ops/runtime.rs b/runtime/ops/runtime.rs index 8802f9cd6c..9f2e48d7aa 100644 --- a/runtime/ops/runtime.rs +++ b/runtime/ops/runtime.rs @@ -8,7 +8,7 @@ use deno_core::OpState; deno_core::extension!( deno_runtime, - ops = [op_main_module], + ops = [op_main_module, op_ppid], options = { main_module: ModuleSpecifier }, state = |state, options| { state.put::(options.main_module); @@ -31,7 +31,10 @@ fn op_main_module(state: &mut OpState) -> Result { Ok(main_path) } -pub fn ppid() -> i64 { +/// This is an op instead of being done at initialization time because +/// it's expensive to retreive the ppid on Windows. +#[op] +pub fn op_ppid() -> i64 { #[cfg(windows)] { // Adopted from rustup: diff --git a/runtime/worker_bootstrap.rs b/runtime/worker_bootstrap.rs index 09725122cf..ba894f52b3 100644 --- a/runtime/worker_bootstrap.rs +++ b/runtime/worker_bootstrap.rs @@ -5,7 +5,6 @@ use deno_core::ModuleSpecifier; use std::thread; use crate::colors; -use crate::ops::runtime::ppid; /// Common bootstrap options for MainWorker & WebWorker #[derive(Clone)] @@ -61,7 +60,7 @@ impl BootstrapOptions { &self, scope: &mut v8::HandleScope<'s>, ) -> v8::Local<'s, v8::Array> { - let array = v8::Array::new(scope, 17); + let array = v8::Array::new(scope, 16); { let args = v8::Array::new(scope, self.args.len() as i32); @@ -142,18 +141,13 @@ impl BootstrapOptions { array.set_index(scope, 10, val.into()); } - { - let val = v8::Integer::new(scope, ppid() as i32); - array.set_index(scope, 11, val.into()); - } - { let val = v8::String::new_external_onebyte_static( scope, env!("TARGET").as_bytes(), ) .unwrap(); - array.set_index(scope, 12, val.into()); + array.set_index(scope, 11, val.into()); } { @@ -163,7 +157,7 @@ impl BootstrapOptions { v8::NewStringType::Normal, ) .unwrap(); - array.set_index(scope, 13, val.into()); + array.set_index(scope, 12, val.into()); } { @@ -173,17 +167,17 @@ impl BootstrapOptions { v8::NewStringType::Normal, ) .unwrap(); - array.set_index(scope, 14, val.into()); + array.set_index(scope, 13, val.into()); } { let val = v8::Boolean::new(scope, self.inspect); - array.set_index(scope, 15, val.into()); + array.set_index(scope, 14, val.into()); } { let val = v8::Boolean::new(scope, self.enable_testing_features); - array.set_index(scope, 16, val.into()); + array.set_index(scope, 15, val.into()); } array From 9efed4c7a3d32de62e9c9b5e0c6712ce97637abb Mon Sep 17 00:00:00 2001 From: David Sherret Date: Mon, 1 May 2023 14:35:23 -0400 Subject: [PATCH 097/320] refactor(cli): remove ProcState - add CliFactory (#18900) This removes `ProcState` and replaces it with a new `CliFactory` which initializes our "service structs" on demand. This isn't a performance improvement at the moment for `deno run`, but might unlock performance improvements in the future. --- cli/args/mod.rs | 4 +- cli/cache/caches.rs | 33 +- cli/factory.rs | 669 ++++++++++++++++++ cli/lsp/language_server.rs | 11 +- cli/lsp/testing/execution.rs | 40 +- cli/main.rs | 19 +- cli/module_loader.rs | 21 +- cli/proc_state.rs | 447 ------------ cli/standalone/binary.rs | 19 +- .../package_json/invalid_value/task.out | 2 +- .../task/both/package_json_selected.out | 2 +- cli/tests/testdata/task/npx/non_existent.out | 2 +- cli/tests/testdata/task/npx/on_own.out | 2 +- cli/tests/testdata/task/package_json/bin.out | 2 +- cli/tools/bench.rs | 47 +- cli/tools/bundle.rs | 95 +-- cli/tools/check.rs | 7 +- cli/tools/coverage/mod.rs | 17 +- cli/tools/doc.rs | 25 +- cli/tools/fmt.rs | 9 +- cli/tools/info.rs | 42 +- cli/tools/installer.rs | 7 +- cli/tools/lint.rs | 9 +- cli/tools/repl/mod.rs | 24 +- cli/tools/run.rs | 70 +- cli/tools/standalone.rs | 30 +- cli/tools/task.rs | 32 +- cli/tools/test.rs | 54 +- cli/tools/upgrade.rs | 7 +- cli/tools/vendor/mod.rs | 17 +- cli/watcher.rs | 99 +++ cli/worker.rs | 1 + 32 files changed, 1140 insertions(+), 725 deletions(-) create mode 100644 cli/factory.rs delete mode 100644 cli/proc_state.rs create mode 100644 cli/watcher.rs diff --git a/cli/args/mod.rs b/cli/args/mod.rs index 4038fb0998..00476dce1c 100644 --- a/cli/args/mod.rs +++ b/cli/args/mod.rs @@ -753,7 +753,7 @@ impl CliOptions { return Ok(Some(state.snapshot.clone().into_valid()?)); } - if let Some(lockfile) = self.maybe_lock_file() { + if let Some(lockfile) = self.maybe_lockfile() { if !lockfile.lock().overwrite { return Ok(Some( snapshot_from_lockfile(lockfile.clone(), api) @@ -827,7 +827,7 @@ impl CliOptions { .map(|host| InspectorServer::new(host, version::get_user_agent())) } - pub fn maybe_lock_file(&self) -> Option>> { + pub fn maybe_lockfile(&self) -> Option>> { self.maybe_lockfile.clone() } diff --git a/cli/cache/caches.rs b/cli/cache/caches.rs index 0b60d0bece..62bec8a000 100644 --- a/cli/cache/caches.rs +++ b/cli/cache/caches.rs @@ -12,8 +12,8 @@ use super::node::NODE_ANALYSIS_CACHE_DB; use super::parsed_source::PARSED_SOURCE_CACHE_DB; use super::DenoDir; -#[derive(Default)] pub struct Caches { + dir: DenoDir, fmt_incremental_cache_db: OnceCell, lint_incremental_cache_db: OnceCell, dep_analysis_db: OnceCell, @@ -22,6 +22,17 @@ pub struct Caches { } impl Caches { + pub fn new(dir: DenoDir) -> Self { + Self { + dir, + fmt_incremental_cache_db: Default::default(), + lint_incremental_cache_db: Default::default(), + dep_analysis_db: Default::default(), + node_analysis_db: Default::default(), + type_checking_cache_db: Default::default(), + } + } + fn make_db( cell: &OnceCell, config: &'static CacheDBConfiguration, @@ -32,43 +43,43 @@ impl Caches { .clone() } - pub fn fmt_incremental_cache_db(&self, dir: &DenoDir) -> CacheDB { + pub fn fmt_incremental_cache_db(&self) -> CacheDB { Self::make_db( &self.fmt_incremental_cache_db, &INCREMENTAL_CACHE_DB, - dir.fmt_incremental_cache_db_file_path(), + self.dir.fmt_incremental_cache_db_file_path(), ) } - pub fn lint_incremental_cache_db(&self, dir: &DenoDir) -> CacheDB { + pub fn lint_incremental_cache_db(&self) -> CacheDB { Self::make_db( &self.lint_incremental_cache_db, &INCREMENTAL_CACHE_DB, - dir.lint_incremental_cache_db_file_path(), + self.dir.lint_incremental_cache_db_file_path(), ) } - pub fn dep_analysis_db(&self, dir: &DenoDir) -> CacheDB { + pub fn dep_analysis_db(&self) -> CacheDB { Self::make_db( &self.dep_analysis_db, &PARSED_SOURCE_CACHE_DB, - dir.dep_analysis_db_file_path(), + self.dir.dep_analysis_db_file_path(), ) } - pub fn node_analysis_db(&self, dir: &DenoDir) -> CacheDB { + pub fn node_analysis_db(&self) -> CacheDB { Self::make_db( &self.node_analysis_db, &NODE_ANALYSIS_CACHE_DB, - dir.node_analysis_db_file_path(), + self.dir.node_analysis_db_file_path(), ) } - pub fn type_checking_cache_db(&self, dir: &DenoDir) -> CacheDB { + pub fn type_checking_cache_db(&self) -> CacheDB { Self::make_db( &self.type_checking_cache_db, &TYPE_CHECK_CACHE_DB, - dir.type_checking_cache_db_file_path(), + self.dir.type_checking_cache_db_file_path(), ) } } diff --git a/cli/factory.rs b/cli/factory.rs new file mode 100644 index 0000000000..69560cf544 --- /dev/null +++ b/cli/factory.rs @@ -0,0 +1,669 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +use crate::args::CliOptions; +use crate::args::DenoSubcommand; +use crate::args::Flags; +use crate::args::Lockfile; +use crate::args::StorageKeyResolver; +use crate::args::TsConfigType; +use crate::cache::Caches; +use crate::cache::DenoDir; +use crate::cache::EmitCache; +use crate::cache::HttpCache; +use crate::cache::NodeAnalysisCache; +use crate::cache::ParsedSourceCache; +use crate::emit::Emitter; +use crate::file_fetcher::FileFetcher; +use crate::graph_util::ModuleGraphBuilder; +use crate::graph_util::ModuleGraphContainer; +use crate::http_util::HttpClient; +use crate::module_loader::CjsResolutionStore; +use crate::module_loader::CliModuleLoaderFactory; +use crate::module_loader::ModuleLoadPreparer; +use crate::module_loader::NpmModuleLoader; +use crate::node::CliCjsEsmCodeAnalyzer; +use crate::node::CliNodeCodeTranslator; +use crate::npm::create_npm_fs_resolver; +use crate::npm::CliNpmRegistryApi; +use crate::npm::CliNpmResolver; +use crate::npm::NpmCache; +use crate::npm::NpmResolution; +use crate::npm::PackageJsonDepsInstaller; +use crate::resolver::CliGraphResolver; +use crate::tools::check::TypeChecker; +use crate::util::progress_bar::ProgressBar; +use crate::util::progress_bar::ProgressBarStyle; +use crate::watcher::FileWatcher; +use crate::watcher::FileWatcherReporter; +use crate::worker::CliMainWorkerFactory; +use crate::worker::CliMainWorkerOptions; +use crate::worker::HasNodeSpecifierChecker; + +use deno_core::error::AnyError; +use deno_core::parking_lot::Mutex; + +use deno_runtime::deno_node; +use deno_runtime::deno_node::analyze::NodeCodeTranslator; +use deno_runtime::deno_node::NodeResolver; +use deno_runtime::deno_tls::rustls::RootCertStore; +use deno_runtime::deno_web::BlobStore; +use deno_runtime::inspector_server::InspectorServer; +use deno_semver::npm::NpmPackageReqReference; +use import_map::ImportMap; +use log::warn; +use std::cell::RefCell; +use std::future::Future; +use std::path::PathBuf; +use std::sync::Arc; + +pub struct CliFactoryBuilder { + maybe_sender: Option>>, +} + +impl CliFactoryBuilder { + pub fn new() -> Self { + Self { maybe_sender: None } + } + + pub fn with_watcher( + mut self, + sender: tokio::sync::mpsc::UnboundedSender>, + ) -> Self { + self.maybe_sender = Some(sender); + self + } + + pub async fn build_from_flags( + self, + flags: Flags, + ) -> Result { + Ok(self.build_from_cli_options(Arc::new(CliOptions::from_flags(flags)?))) + } + + pub fn build_from_cli_options(self, options: Arc) -> CliFactory { + CliFactory { + maybe_sender: RefCell::new(self.maybe_sender), + options, + services: Default::default(), + } + } +} + +struct Deferred(once_cell::unsync::OnceCell); + +impl Default for Deferred { + fn default() -> Self { + Self(once_cell::unsync::OnceCell::default()) + } +} + +impl Deferred { + pub fn get_or_try_init( + &self, + create: impl FnOnce() -> Result, + ) -> Result<&T, AnyError> { + self.0.get_or_try_init(create) + } + + pub fn get_or_init(&self, create: impl FnOnce() -> T) -> &T { + self.0.get_or_init(create) + } + + pub async fn get_or_try_init_async( + &self, + create: impl Future>, + ) -> Result<&T, AnyError> { + if self.0.get().is_none() { + // todo(dsherret): it would be more ideal if this enforced a + // single executor and then we could make some initialization + // concurrent + let val = create.await?; + _ = self.0.set(val); + } + Ok(self.0.get().unwrap()) + } +} + +#[derive(Default)] +struct CliFactoryServices { + dir: Deferred, + caches: Deferred>, + file_fetcher: Deferred>, + http_client: Deferred, + emit_cache: Deferred, + emitter: Deferred>, + graph_container: Deferred>, + lockfile: Deferred>>>, + maybe_import_map: Deferred>>, + maybe_inspector_server: Deferred>>, + root_cert_store: Deferred, + blob_store: Deferred, + parsed_source_cache: Deferred>, + resolver: Deferred>, + file_watcher: Deferred>, + maybe_file_watcher_reporter: Deferred>, + module_graph_builder: Deferred>, + module_load_preparer: Deferred>, + node_code_translator: Deferred>, + node_fs: Deferred>, + node_resolver: Deferred>, + npm_api: Deferred>, + npm_cache: Deferred>, + npm_resolver: Deferred>, + npm_resolution: Deferred>, + package_json_deps_installer: Deferred>, + text_only_progress_bar: Deferred, + type_checker: Deferred>, + cjs_resolutions: Deferred>, +} + +pub struct CliFactory { + maybe_sender: + RefCell>>>, + options: Arc, + services: CliFactoryServices, +} + +impl CliFactory { + pub async fn from_flags(flags: Flags) -> Result { + CliFactoryBuilder::new().build_from_flags(flags).await + } + + pub fn from_cli_options(options: Arc) -> Self { + CliFactoryBuilder::new().build_from_cli_options(options) + } + + pub fn cli_options(&self) -> &Arc { + &self.options + } + + pub fn deno_dir(&self) -> Result<&DenoDir, AnyError> { + self + .services + .dir + .get_or_try_init(|| self.options.resolve_deno_dir()) + } + + pub fn caches(&self) -> Result<&Arc, AnyError> { + self.services.caches.get_or_try_init(|| { + let caches = Arc::new(Caches::new(self.deno_dir()?.clone())); + // Warm up the caches we know we'll likely need based on the CLI mode + match self.options.sub_command() { + DenoSubcommand::Run(_) => { + _ = caches.dep_analysis_db(); + _ = caches.node_analysis_db(); + } + DenoSubcommand::Check(_) => { + _ = caches.dep_analysis_db(); + _ = caches.node_analysis_db(); + _ = caches.type_checking_cache_db(); + } + _ => {} + } + Ok(caches) + }) + } + + pub fn blob_store(&self) -> &BlobStore { + self.services.blob_store.get_or_init(BlobStore::default) + } + + pub fn root_cert_store(&self) -> Result<&RootCertStore, AnyError> { + self + .services + .root_cert_store + .get_or_try_init(|| self.options.resolve_root_cert_store()) + } + + pub fn text_only_progress_bar(&self) -> &ProgressBar { + self + .services + .text_only_progress_bar + .get_or_init(|| ProgressBar::new(ProgressBarStyle::TextOnly)) + } + + pub fn http_client(&self) -> Result<&HttpClient, AnyError> { + self.services.http_client.get_or_try_init(|| { + HttpClient::new( + Some(self.root_cert_store()?.clone()), + self.options.unsafely_ignore_certificate_errors().clone(), + ) + }) + } + + pub fn file_fetcher(&self) -> Result<&Arc, AnyError> { + self.services.file_fetcher.get_or_try_init(|| { + Ok(Arc::new(FileFetcher::new( + HttpCache::new(&self.deno_dir()?.deps_folder_path()), + self.options.cache_setting(), + !self.options.no_remote(), + self.http_client()?.clone(), + self.blob_store().clone(), + Some(self.text_only_progress_bar().clone()), + ))) + }) + } + + pub fn maybe_lockfile(&self) -> &Option>> { + self + .services + .lockfile + .get_or_init(|| self.options.maybe_lockfile()) + } + + pub fn npm_cache(&self) -> Result<&Arc, AnyError> { + self.services.npm_cache.get_or_try_init(|| { + Ok(Arc::new(NpmCache::new( + self.deno_dir()?.npm_folder_path(), + self.options.cache_setting(), + self.http_client()?.clone(), + self.text_only_progress_bar().clone(), + ))) + }) + } + + pub fn npm_api(&self) -> Result<&Arc, AnyError> { + self.services.npm_api.get_or_try_init(|| { + Ok(Arc::new(CliNpmRegistryApi::new( + CliNpmRegistryApi::default_url().to_owned(), + self.npm_cache()?.clone(), + self.http_client()?.clone(), + self.text_only_progress_bar().clone(), + ))) + }) + } + + pub async fn npm_resolution(&self) -> Result<&Arc, AnyError> { + self + .services + .npm_resolution + .get_or_try_init_async(async { + let npm_api = self.npm_api()?; + Ok(Arc::new(NpmResolution::from_serialized( + npm_api.clone(), + self + .options + .resolve_npm_resolution_snapshot(npm_api) + .await?, + self.maybe_lockfile().as_ref().cloned(), + ))) + }) + .await + } + + pub fn node_fs(&self) -> &Arc { + self + .services + .node_fs + .get_or_init(|| Arc::new(deno_node::RealFs)) + } + + pub async fn npm_resolver(&self) -> Result<&Arc, AnyError> { + self + .services + .npm_resolver + .get_or_try_init_async(async { + let npm_resolution = self.npm_resolution().await?; + let npm_fs_resolver = create_npm_fs_resolver( + self.node_fs().clone(), + self.npm_cache()?.clone(), + self.text_only_progress_bar(), + CliNpmRegistryApi::default_url().to_owned(), + npm_resolution.clone(), + self.options.node_modules_dir_path(), + ); + Ok(Arc::new(CliNpmResolver::new( + npm_resolution.clone(), + npm_fs_resolver, + self.maybe_lockfile().as_ref().cloned(), + ))) + }) + .await + } + + pub async fn package_json_deps_installer( + &self, + ) -> Result<&Arc, AnyError> { + self + .services + .package_json_deps_installer + .get_or_try_init_async(async { + let npm_api = self.npm_api()?; + let npm_resolution = self.npm_resolution().await?; + Ok(Arc::new(PackageJsonDepsInstaller::new( + npm_api.clone(), + npm_resolution.clone(), + self.options.maybe_package_json_deps(), + ))) + }) + .await + } + + pub async fn maybe_import_map( + &self, + ) -> Result<&Option>, AnyError> { + self + .services + .maybe_import_map + .get_or_try_init_async(async { + Ok( + self + .options + .resolve_import_map(self.file_fetcher()?) + .await? + .map(Arc::new), + ) + }) + .await + } + + pub async fn resolver(&self) -> Result<&Arc, AnyError> { + self + .services + .resolver + .get_or_try_init_async(async { + Ok(Arc::new(CliGraphResolver::new( + self.options.to_maybe_jsx_import_source_config(), + self.maybe_import_map().await?.clone(), + self.options.no_npm(), + self.npm_api()?.clone(), + self.npm_resolution().await?.clone(), + self.package_json_deps_installer().await?.clone(), + ))) + }) + .await + } + + pub fn file_watcher(&self) -> Result<&Arc, AnyError> { + self.services.file_watcher.get_or_try_init(|| { + let watcher = FileWatcher::new( + self.options.clone(), + self.cjs_resolutions().clone(), + self.graph_container().clone(), + self.maybe_file_watcher_reporter().clone(), + self.parsed_source_cache()?.clone(), + ); + watcher.init_watcher(); + Ok(Arc::new(watcher)) + }) + } + + pub fn maybe_file_watcher_reporter(&self) -> &Option { + let maybe_sender = self.maybe_sender.borrow_mut().take(); + self + .services + .maybe_file_watcher_reporter + .get_or_init(|| maybe_sender.map(FileWatcherReporter::new)) + } + + pub fn emit_cache(&self) -> Result<&EmitCache, AnyError> { + self.services.emit_cache.get_or_try_init(|| { + Ok(EmitCache::new(self.deno_dir()?.gen_cache.clone())) + }) + } + + pub fn parsed_source_cache( + &self, + ) -> Result<&Arc, AnyError> { + self.services.parsed_source_cache.get_or_try_init(|| { + Ok(Arc::new(ParsedSourceCache::new( + self.caches()?.dep_analysis_db(), + ))) + }) + } + + pub fn emitter(&self) -> Result<&Arc, AnyError> { + self.services.emitter.get_or_try_init(|| { + let ts_config_result = self + .options + .resolve_ts_config_for_emit(TsConfigType::Emit)?; + if let Some(ignored_options) = ts_config_result.maybe_ignored_options { + warn!("{}", ignored_options); + } + let emit_options: deno_ast::EmitOptions = + ts_config_result.ts_config.into(); + Ok(Arc::new(Emitter::new( + self.emit_cache()?.clone(), + self.parsed_source_cache()?.clone(), + emit_options, + ))) + }) + } + + pub async fn node_resolver(&self) -> Result<&Arc, AnyError> { + self + .services + .node_resolver + .get_or_try_init_async(async { + Ok(Arc::new(NodeResolver::new( + self.node_fs().clone(), + self.npm_resolver().await?.clone(), + ))) + }) + .await + } + + pub async fn node_code_translator( + &self, + ) -> Result<&Arc, AnyError> { + self + .services + .node_code_translator + .get_or_try_init_async(async { + let caches = self.caches()?; + let node_analysis_cache = + NodeAnalysisCache::new(caches.node_analysis_db()); + let cjs_esm_analyzer = CliCjsEsmCodeAnalyzer::new(node_analysis_cache); + + Ok(Arc::new(NodeCodeTranslator::new( + cjs_esm_analyzer, + self.node_fs().clone(), + self.node_resolver().await?.clone(), + self.npm_resolver().await?.clone(), + ))) + }) + .await + } + + pub async fn type_checker(&self) -> Result<&Arc, AnyError> { + self + .services + .type_checker + .get_or_try_init_async(async { + Ok(Arc::new(TypeChecker::new( + self.caches()?.clone(), + self.options.clone(), + self.node_resolver().await?.clone(), + self.npm_resolver().await?.clone(), + ))) + }) + .await + } + + pub async fn module_graph_builder( + &self, + ) -> Result<&Arc, AnyError> { + self + .services + .module_graph_builder + .get_or_try_init_async(async { + Ok(Arc::new(ModuleGraphBuilder::new( + self.options.clone(), + self.resolver().await?.clone(), + self.npm_resolver().await?.clone(), + self.parsed_source_cache()?.clone(), + self.maybe_lockfile().clone(), + self.emit_cache()?.clone(), + self.file_fetcher()?.clone(), + self.type_checker().await?.clone(), + ))) + }) + .await + } + + pub fn graph_container(&self) -> &Arc { + self.services.graph_container.get_or_init(Default::default) + } + + pub fn maybe_inspector_server(&self) -> &Option> { + self + .services + .maybe_inspector_server + .get_or_init(|| self.options.resolve_inspector_server().map(Arc::new)) + } + + pub async fn module_load_preparer( + &self, + ) -> Result<&Arc, AnyError> { + self + .services + .module_load_preparer + .get_or_try_init_async(async { + Ok(Arc::new(ModuleLoadPreparer::new( + self.options.clone(), + self.graph_container().clone(), + self.maybe_lockfile().clone(), + self.maybe_file_watcher_reporter().clone(), + self.module_graph_builder().await?.clone(), + self.parsed_source_cache()?.clone(), + self.text_only_progress_bar().clone(), + self.resolver().await?.clone(), + self.type_checker().await?.clone(), + ))) + }) + .await + } + + pub fn cjs_resolutions(&self) -> &Arc { + self.services.cjs_resolutions.get_or_init(Default::default) + } + + /// Gets a function that can be used to create a CliMainWorkerFactory + /// for a file watcher. + pub async fn create_cli_main_worker_factory_func( + &self, + ) -> Result CliMainWorkerFactory>, AnyError> { + let emitter = self.emitter()?.clone(); + let graph_container = self.graph_container().clone(); + let module_load_preparer = self.module_load_preparer().await?.clone(); + let parsed_source_cache = self.parsed_source_cache()?.clone(); + let resolver = self.resolver().await?.clone(); + let blob_store = self.blob_store().clone(); + let cjs_resolutions = self.cjs_resolutions().clone(); + let node_code_translator = self.node_code_translator().await?.clone(); + let options = self.cli_options().clone(); + let main_worker_options = self.create_cli_main_worker_options()?; + let node_fs = self.node_fs().clone(); + let root_cert_store = self.root_cert_store()?.clone(); + let node_resolver = self.node_resolver().await?.clone(); + let npm_resolver = self.npm_resolver().await?.clone(); + let maybe_inspector_server = self.maybe_inspector_server().clone(); + Ok(Arc::new(move || { + CliMainWorkerFactory::new( + StorageKeyResolver::from_options(&options), + npm_resolver.clone(), + node_resolver.clone(), + Box::new(CliHasNodeSpecifierChecker(graph_container.clone())), + blob_store.clone(), + Box::new(CliModuleLoaderFactory::new( + &options, + emitter.clone(), + graph_container.clone(), + module_load_preparer.clone(), + parsed_source_cache.clone(), + resolver.clone(), + NpmModuleLoader::new( + cjs_resolutions.clone(), + node_code_translator.clone(), + node_resolver.clone(), + ), + )), + root_cert_store.clone(), + node_fs.clone(), + maybe_inspector_server.clone(), + main_worker_options.clone(), + ) + })) + } + + pub async fn create_cli_main_worker_factory( + &self, + ) -> Result { + let node_resolver = self.node_resolver().await?; + Ok(CliMainWorkerFactory::new( + StorageKeyResolver::from_options(&self.options), + self.npm_resolver().await?.clone(), + node_resolver.clone(), + Box::new(CliHasNodeSpecifierChecker(self.graph_container().clone())), + self.blob_store().clone(), + Box::new(CliModuleLoaderFactory::new( + &self.options, + self.emitter()?.clone(), + self.graph_container().clone(), + self.module_load_preparer().await?.clone(), + self.parsed_source_cache()?.clone(), + self.resolver().await?.clone(), + NpmModuleLoader::new( + self.cjs_resolutions().clone(), + self.node_code_translator().await?.clone(), + node_resolver.clone(), + ), + )), + self.root_cert_store()?.clone(), + self.node_fs().clone(), + self.maybe_inspector_server().clone(), + self.create_cli_main_worker_options()?, + )) + } + + fn create_cli_main_worker_options( + &self, + ) -> Result { + Ok(CliMainWorkerOptions { + argv: self.options.argv().clone(), + debug: self + .options + .log_level() + .map(|l| l == log::Level::Debug) + .unwrap_or(false), + coverage_dir: self.options.coverage_dir(), + enable_testing_features: self.options.enable_testing_features(), + has_node_modules_dir: self.options.has_node_modules_dir(), + inspect_brk: self.options.inspect_brk().is_some(), + inspect_wait: self.options.inspect_wait().is_some(), + is_inspecting: self.options.is_inspecting(), + is_npm_main: self.options.is_npm_main(), + location: self.options.location_flag().clone(), + maybe_binary_npm_command_name: { + let mut maybe_binary_command_name = None; + if let DenoSubcommand::Run(flags) = self.options.sub_command() { + if let Ok(pkg_ref) = NpmPackageReqReference::from_str(&flags.script) { + // if the user ran a binary command, we'll need to set process.argv[0] + // to be the name of the binary command instead of deno + let binary_name = pkg_ref + .sub_path + .as_deref() + .unwrap_or(pkg_ref.req.name.as_str()); + maybe_binary_command_name = Some(binary_name.to_string()); + } + } + maybe_binary_command_name + }, + origin_data_folder_path: Some(self.deno_dir()?.origin_data_folder_path()), + seed: self.options.seed(), + unsafely_ignore_certificate_errors: self + .options + .unsafely_ignore_certificate_errors() + .clone(), + unstable: self.options.unstable(), + }) + } +} + +struct CliHasNodeSpecifierChecker(Arc); + +impl HasNodeSpecifierChecker for CliHasNodeSpecifierChecker { + fn has_node_specifier(&self) -> bool { + self.0.graph().has_node_specifier + } +} diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index 288e453626..d49a2559ca 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -76,6 +76,7 @@ use crate::args::LintOptions; use crate::args::TsConfig; use crate::cache::DenoDir; use crate::cache::HttpCache; +use crate::factory::CliFactory; use crate::file_fetcher::FileFetcher; use crate::graph_util; use crate::http_util::HttpClient; @@ -85,7 +86,6 @@ use crate::npm::CliNpmRegistryApi; use crate::npm::CliNpmResolver; use crate::npm::NpmCache; use crate::npm::NpmResolution; -use crate::proc_state::ProcState; use crate::tools::fmt::format_file; use crate::tools::fmt::format_parsed_source; use crate::util::fs::remove_dir_all_if_exists; @@ -185,15 +185,14 @@ impl LanguageServer { .into_iter() .map(|d| (d.specifier().clone(), d)) .collect::>(); - // todo(dsherret): don't use ProcState here - let ps = ProcState::from_cli_options(Arc::new(cli_options)).await?; - let mut inner_loader = ps.module_graph_builder.create_graph_loader(); + let factory = CliFactory::from_cli_options(Arc::new(cli_options)); + let module_graph_builder = factory.module_graph_builder().await?; + let mut inner_loader = module_graph_builder.create_graph_loader(); let mut loader = crate::lsp::documents::OpenDocumentsGraphLoader { inner_loader: &mut inner_loader, open_docs: &open_docs, }; - let graph = ps - .module_graph_builder + let graph = module_graph_builder .create_graph_with_loader(roots.clone(), &mut loader) .await?; graph_util::graph_valid( diff --git a/cli/lsp/testing/execution.rs b/cli/lsp/testing/execution.rs index 5dfb310137..4834cd0c9c 100644 --- a/cli/lsp/testing/execution.rs +++ b/cli/lsp/testing/execution.rs @@ -6,11 +6,11 @@ use super::lsp_custom; use crate::args::flags_from_vec; use crate::args::DenoSubcommand; +use crate::factory::CliFactory; use crate::lsp::client::Client; use crate::lsp::client::TestingNotification; use crate::lsp::config; use crate::lsp::logging::lsp_log; -use crate::proc_state; use crate::tools::test; use crate::tools::test::FailFastTracker; use crate::tools::test::TestEventSender; @@ -218,16 +218,16 @@ impl TestRun { let args = self.get_args(); lsp_log!("Executing test run with arguments: {}", args.join(" ")); let flags = flags_from_vec(args.into_iter().map(String::from).collect())?; - let ps = proc_state::ProcState::from_flags(flags).await?; + let factory = CliFactory::from_flags(flags).await?; // Various test files should not share the same permissions in terms of // `PermissionsContainer` - otherwise granting/revoking permissions in one // file would have impact on other files, which is undesirable. let permissions = - Permissions::from_options(&ps.options.permissions_options())?; + Permissions::from_options(&factory.cli_options().permissions_options())?; test::check_specifiers( - &ps.options, - &ps.file_fetcher, - &ps.module_load_preparer, + factory.cli_options(), + factory.file_fetcher()?, + factory.module_load_preparer().await?, self .queue .iter() @@ -236,18 +236,19 @@ impl TestRun { ) .await?; - let (concurrent_jobs, fail_fast) = - if let DenoSubcommand::Test(test_flags) = ps.options.sub_command() { - ( - test_flags - .concurrent_jobs - .unwrap_or_else(|| NonZeroUsize::new(1).unwrap()) - .into(), - test_flags.fail_fast, - ) - } else { - unreachable!("Should always be Test subcommand."); - }; + let (concurrent_jobs, fail_fast) = if let DenoSubcommand::Test(test_flags) = + factory.cli_options().sub_command() + { + ( + test_flags + .concurrent_jobs + .unwrap_or_else(|| NonZeroUsize::new(1).unwrap()) + .into(), + test_flags.fail_fast, + ) + } else { + unreachable!("Should always be Test subcommand."); + }; let (sender, mut receiver) = mpsc::unbounded_channel::(); let sender = TestEventSender::new(sender); @@ -259,7 +260,8 @@ impl TestRun { let tests: Arc>> = Arc::new(RwLock::new(IndexMap::new())); let mut test_steps = IndexMap::new(); - let worker_factory = Arc::new(ps.create_cli_main_worker_factory()); + let worker_factory = + Arc::new(factory.create_cli_main_worker_factory().await?); let join_handles = queue.into_iter().map(move |specifier| { let specifier = specifier.clone(); diff --git a/cli/main.rs b/cli/main.rs index 02ac5891cd..85942cbd82 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -6,6 +6,7 @@ mod cache; mod deno_std; mod emit; mod errors; +mod factory; mod file_fetcher; mod graph_util; mod http_util; @@ -16,19 +17,18 @@ mod napi; mod node; mod npm; mod ops; -mod proc_state; mod resolver; mod standalone; mod tools; mod tsc; mod util; mod version; +mod watcher; mod worker; use crate::args::flags_from_vec; use crate::args::DenoSubcommand; use crate::args::Flags; -use crate::proc_state::ProcState; use crate::resolver::CliGraphResolver; use crate::util::display; use crate::util::v8::get_v8_flags_from_env; @@ -41,6 +41,7 @@ use deno_core::error::JsError; use deno_runtime::colors; use deno_runtime::fmt_errors::format_js_error; use deno_runtime::tokio_util::run_local; +use factory::CliFactory; use std::env; use std::env::current_exe; use std::path::PathBuf; @@ -70,16 +71,20 @@ async fn run_subcommand(flags: Flags) -> Result { tools::run::eval_command(flags, eval_flags).await } DenoSubcommand::Cache(cache_flags) => { - let ps = ProcState::from_flags(flags).await?; - ps.module_load_preparer + let factory = CliFactory::from_flags(flags).await?; + let module_load_preparer = factory.module_load_preparer().await?; + let emitter = factory.emitter()?; + let graph_container = factory.graph_container(); + module_load_preparer .load_and_type_check_files(&cache_flags.files) .await?; - ps.emitter.cache_module_emits(&ps.graph_container.graph())?; + emitter.cache_module_emits(&graph_container.graph())?; Ok(0) } DenoSubcommand::Check(check_flags) => { - let ps = ProcState::from_flags(flags).await?; - ps.module_load_preparer + let factory = CliFactory::from_flags(flags).await?; + let module_load_preparer = factory.module_load_preparer().await?; + module_load_preparer .load_and_type_check_files(&check_flags.files) .await?; Ok(0) diff --git a/cli/module_loader.rs b/cli/module_loader.rs index d8a5b73c4d..0ed84a20f6 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -12,14 +12,13 @@ use crate::graph_util::ModuleGraphBuilder; use crate::graph_util::ModuleGraphContainer; use crate::node; use crate::node::CliNodeCodeTranslator; -use crate::proc_state::CjsResolutionStore; -use crate::proc_state::FileWatcherReporter; use crate::resolver::CliGraphResolver; use crate::tools::check; use crate::tools::check::TypeChecker; use crate::util::progress_bar::ProgressBar; use crate::util::text_encoding::code_without_source_map; use crate::util::text_encoding::source_map_from_code; +use crate::watcher::FileWatcherReporter; use crate::worker::ModuleLoaderFactory; use deno_ast::MediaType; @@ -791,3 +790,21 @@ impl NpmModuleLoader { Ok(response.into_url()) } } + +/// Keeps track of what module specifiers were resolved as CJS. +#[derive(Default)] +pub struct CjsResolutionStore(Mutex>); + +impl CjsResolutionStore { + pub fn clear(&self) { + self.0.lock().clear(); + } + + pub fn contains(&self, specifier: &ModuleSpecifier) -> bool { + self.0.lock().contains(specifier) + } + + pub fn insert(&self, specifier: ModuleSpecifier) { + self.0.lock().insert(specifier); + } +} diff --git a/cli/proc_state.rs b/cli/proc_state.rs deleted file mode 100644 index 6c1a5e7c56..0000000000 --- a/cli/proc_state.rs +++ /dev/null @@ -1,447 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. - -use crate::args::CliOptions; -use crate::args::DenoSubcommand; -use crate::args::Flags; -use crate::args::Lockfile; -use crate::args::StorageKeyResolver; -use crate::args::TsConfigType; -use crate::cache::Caches; -use crate::cache::DenoDir; -use crate::cache::EmitCache; -use crate::cache::HttpCache; -use crate::cache::NodeAnalysisCache; -use crate::cache::ParsedSourceCache; -use crate::emit::Emitter; -use crate::file_fetcher::FileFetcher; -use crate::graph_util::ModuleGraphBuilder; -use crate::graph_util::ModuleGraphContainer; -use crate::http_util::HttpClient; -use crate::module_loader::CliModuleLoaderFactory; -use crate::module_loader::ModuleLoadPreparer; -use crate::module_loader::NpmModuleLoader; -use crate::node::CliCjsEsmCodeAnalyzer; -use crate::node::CliNodeCodeTranslator; -use crate::npm::create_npm_fs_resolver; -use crate::npm::CliNpmRegistryApi; -use crate::npm::CliNpmResolver; -use crate::npm::NpmCache; -use crate::npm::NpmResolution; -use crate::npm::PackageJsonDepsInstaller; -use crate::resolver::CliGraphResolver; -use crate::tools::check::TypeChecker; -use crate::util::progress_bar::ProgressBar; -use crate::util::progress_bar::ProgressBarStyle; -use crate::worker::CliMainWorkerFactory; -use crate::worker::CliMainWorkerOptions; -use crate::worker::HasNodeSpecifierChecker; - -use deno_core::error::AnyError; -use deno_core::parking_lot::Mutex; -use deno_core::ModuleSpecifier; - -use deno_runtime::deno_node; -use deno_runtime::deno_node::analyze::NodeCodeTranslator; -use deno_runtime::deno_node::NodeResolver; -use deno_runtime::deno_tls::rustls::RootCertStore; -use deno_runtime::deno_web::BlobStore; -use deno_runtime::inspector_server::InspectorServer; -use deno_semver::npm::NpmPackageReqReference; -use import_map::ImportMap; -use log::warn; -use std::collections::HashSet; -use std::path::PathBuf; -use std::sync::Arc; - -/// This structure used to represent state of single "deno" program -/// that was shared by all created workers. It morphed into being the -/// "factory" for all objects, but is being slowly phased out. -pub struct ProcState { - pub dir: DenoDir, - pub caches: Arc, - pub file_fetcher: Arc, - pub http_client: HttpClient, - pub options: Arc, - pub emit_cache: EmitCache, - pub emitter: Arc, - pub graph_container: Arc, - pub lockfile: Option>>, - pub maybe_import_map: Option>, - pub maybe_inspector_server: Option>, - pub root_cert_store: RootCertStore, - pub blob_store: BlobStore, - pub parsed_source_cache: Arc, - pub resolver: Arc, - maybe_file_watcher_reporter: Option, - pub module_graph_builder: Arc, - pub module_load_preparer: Arc, - pub node_code_translator: Arc, - pub node_fs: Arc, - pub node_resolver: Arc, - pub npm_api: Arc, - pub npm_cache: Arc, - pub npm_resolver: Arc, - pub npm_resolution: Arc, - pub package_json_deps_installer: Arc, - pub cjs_resolutions: Arc, -} - -impl ProcState { - pub async fn from_cli_options( - options: Arc, - ) -> Result { - Self::build_with_sender(options, None).await - } - - pub async fn from_flags(flags: Flags) -> Result { - Self::from_cli_options(Arc::new(CliOptions::from_flags(flags)?)).await - } - - pub async fn from_flags_for_file_watcher( - flags: Flags, - files_to_watch_sender: tokio::sync::mpsc::UnboundedSender>, - ) -> Result { - // resolve the config each time - let cli_options = Arc::new(CliOptions::from_flags(flags)?); - let ps = - Self::build_with_sender(cli_options, Some(files_to_watch_sender.clone())) - .await?; - ps.init_watcher(); - Ok(ps) - } - - /// Reset all runtime state to its default. This should be used on file - /// watcher restarts. - pub fn reset_for_file_watcher(&self) { - self.cjs_resolutions.clear(); - self.parsed_source_cache.clear(); - self.graph_container.clear(); - - self.init_watcher(); - } - - // Add invariant files like the import map and explicit watch flag list to - // the watcher. Dedup for build_for_file_watcher and reset_for_file_watcher. - fn init_watcher(&self) { - let files_to_watch_sender = match &self.maybe_file_watcher_reporter { - Some(reporter) => &reporter.sender, - None => return, - }; - if let Some(watch_paths) = self.options.watch_paths() { - files_to_watch_sender.send(watch_paths.clone()).unwrap(); - } - if let Ok(Some(import_map_path)) = self - .options - .resolve_import_map_specifier() - .map(|ms| ms.and_then(|ref s| s.to_file_path().ok())) - { - files_to_watch_sender.send(vec![import_map_path]).unwrap(); - } - } - - async fn build_with_sender( - cli_options: Arc, - maybe_sender: Option>>, - ) -> Result { - let dir = cli_options.resolve_deno_dir()?; - let caches = Arc::new(Caches::default()); - // Warm up the caches we know we'll likely need based on the CLI mode - match cli_options.sub_command() { - DenoSubcommand::Run(_) => { - _ = caches.dep_analysis_db(&dir); - _ = caches.node_analysis_db(&dir); - } - DenoSubcommand::Check(_) => { - _ = caches.dep_analysis_db(&dir); - _ = caches.node_analysis_db(&dir); - _ = caches.type_checking_cache_db(&dir); - } - _ => {} - } - let blob_store = BlobStore::default(); - let deps_cache_location = dir.deps_folder_path(); - let http_cache = HttpCache::new(&deps_cache_location); - let root_cert_store = cli_options.resolve_root_cert_store()?; - let cache_usage = cli_options.cache_setting(); - let progress_bar = ProgressBar::new(ProgressBarStyle::TextOnly); - let http_client = HttpClient::new( - Some(root_cert_store.clone()), - cli_options.unsafely_ignore_certificate_errors().clone(), - )?; - let file_fetcher = FileFetcher::new( - http_cache, - cache_usage, - !cli_options.no_remote(), - http_client.clone(), - blob_store.clone(), - Some(progress_bar.clone()), - ); - - let lockfile = cli_options.maybe_lock_file(); - - let npm_registry_url = CliNpmRegistryApi::default_url().to_owned(); - let npm_cache = Arc::new(NpmCache::new( - dir.npm_folder_path(), - cli_options.cache_setting(), - http_client.clone(), - progress_bar.clone(), - )); - let npm_api = Arc::new(CliNpmRegistryApi::new( - npm_registry_url.clone(), - npm_cache.clone(), - http_client.clone(), - progress_bar.clone(), - )); - let npm_snapshot = cli_options - .resolve_npm_resolution_snapshot(&npm_api) - .await?; - let npm_resolution = Arc::new(NpmResolution::from_serialized( - npm_api.clone(), - npm_snapshot, - lockfile.as_ref().cloned(), - )); - let node_fs = Arc::new(deno_node::RealFs); - let npm_fs_resolver = create_npm_fs_resolver( - node_fs.clone(), - npm_cache.clone(), - &progress_bar, - npm_registry_url, - npm_resolution.clone(), - cli_options.node_modules_dir_path(), - ); - let npm_resolver = Arc::new(CliNpmResolver::new( - npm_resolution.clone(), - npm_fs_resolver, - lockfile.as_ref().cloned(), - )); - let package_json_deps_installer = Arc::new(PackageJsonDepsInstaller::new( - npm_api.clone(), - npm_resolution.clone(), - cli_options.maybe_package_json_deps(), - )); - let maybe_import_map = cli_options - .resolve_import_map(&file_fetcher) - .await? - .map(Arc::new); - let maybe_inspector_server = - cli_options.resolve_inspector_server().map(Arc::new); - - let resolver = Arc::new(CliGraphResolver::new( - cli_options.to_maybe_jsx_import_source_config(), - maybe_import_map.clone(), - cli_options.no_npm(), - npm_api.clone(), - npm_resolution.clone(), - package_json_deps_installer.clone(), - )); - - let maybe_file_watcher_reporter = - maybe_sender.map(|sender| FileWatcherReporter { - sender, - file_paths: Arc::new(Mutex::new(vec![])), - }); - - let ts_config_result = - cli_options.resolve_ts_config_for_emit(TsConfigType::Emit)?; - if let Some(ignored_options) = ts_config_result.maybe_ignored_options { - warn!("{}", ignored_options); - } - let emit_cache = EmitCache::new(dir.gen_cache.clone()); - let parsed_source_cache = - Arc::new(ParsedSourceCache::new(caches.dep_analysis_db(&dir))); - let emit_options: deno_ast::EmitOptions = ts_config_result.ts_config.into(); - let emitter = Arc::new(Emitter::new( - emit_cache.clone(), - parsed_source_cache.clone(), - emit_options, - )); - let file_fetcher = Arc::new(file_fetcher); - let node_analysis_cache = - NodeAnalysisCache::new(caches.node_analysis_db(&dir)); - let cjs_esm_analyzer = CliCjsEsmCodeAnalyzer::new(node_analysis_cache); - let node_resolver = - Arc::new(NodeResolver::new(node_fs.clone(), npm_resolver.clone())); - let node_code_translator = Arc::new(NodeCodeTranslator::new( - cjs_esm_analyzer, - node_fs.clone(), - node_resolver.clone(), - npm_resolver.clone(), - )); - let type_checker = Arc::new(TypeChecker::new( - dir.clone(), - caches.clone(), - cli_options.clone(), - node_resolver.clone(), - npm_resolver.clone(), - )); - let module_graph_builder = Arc::new(ModuleGraphBuilder::new( - cli_options.clone(), - resolver.clone(), - npm_resolver.clone(), - parsed_source_cache.clone(), - lockfile.clone(), - emit_cache.clone(), - file_fetcher.clone(), - type_checker.clone(), - )); - let graph_container: Arc = Default::default(); - let module_load_preparer = Arc::new(ModuleLoadPreparer::new( - cli_options.clone(), - graph_container.clone(), - lockfile.clone(), - maybe_file_watcher_reporter.clone(), - module_graph_builder.clone(), - parsed_source_cache.clone(), - progress_bar.clone(), - resolver.clone(), - type_checker, - )); - - Ok(ProcState { - dir, - caches, - options: cli_options, - emit_cache, - emitter, - file_fetcher, - http_client, - graph_container, - lockfile, - maybe_import_map, - maybe_inspector_server, - root_cert_store, - blob_store, - parsed_source_cache, - resolver, - maybe_file_watcher_reporter, - module_graph_builder, - node_code_translator, - node_fs, - node_resolver, - npm_api, - npm_cache, - npm_resolver, - npm_resolution, - package_json_deps_installer, - cjs_resolutions: Default::default(), - module_load_preparer, - }) - } - - // todo(dsherret): this is a transitory method as we separate out - // ProcState from more code - pub fn create_cli_main_worker_factory(&self) -> CliMainWorkerFactory { - CliMainWorkerFactory::new( - StorageKeyResolver::from_options(&self.options), - self.npm_resolver.clone(), - self.node_resolver.clone(), - Box::new(CliHasNodeSpecifierChecker(self.graph_container.clone())), - self.blob_store.clone(), - Box::new(CliModuleLoaderFactory::new( - &self.options, - self.emitter.clone(), - self.graph_container.clone(), - self.module_load_preparer.clone(), - self.parsed_source_cache.clone(), - self.resolver.clone(), - NpmModuleLoader::new( - self.cjs_resolutions.clone(), - self.node_code_translator.clone(), - self.node_resolver.clone(), - ), - )), - self.root_cert_store.clone(), - self.node_fs.clone(), - self.maybe_inspector_server.clone(), - CliMainWorkerOptions { - argv: self.options.argv().clone(), - debug: self - .options - .log_level() - .map(|l| l == log::Level::Debug) - .unwrap_or(false), - coverage_dir: self.options.coverage_dir(), - enable_testing_features: self.options.enable_testing_features(), - has_node_modules_dir: self.options.has_node_modules_dir(), - inspect_brk: self.options.inspect_brk().is_some(), - inspect_wait: self.options.inspect_wait().is_some(), - is_inspecting: self.options.is_inspecting(), - is_npm_main: self.options.is_npm_main(), - location: self.options.location_flag().clone(), - maybe_binary_npm_command_name: { - let mut maybe_binary_command_name = None; - if let DenoSubcommand::Run(flags) = self.options.sub_command() { - if let Ok(pkg_ref) = NpmPackageReqReference::from_str(&flags.script) - { - // if the user ran a binary command, we'll need to set process.argv[0] - // to be the name of the binary command instead of deno - let binary_name = pkg_ref - .sub_path - .as_deref() - .unwrap_or(pkg_ref.req.name.as_str()); - maybe_binary_command_name = Some(binary_name.to_string()); - } - } - maybe_binary_command_name - }, - origin_data_folder_path: Some(self.dir.origin_data_folder_path()), - seed: self.options.seed(), - unsafely_ignore_certificate_errors: self - .options - .unsafely_ignore_certificate_errors() - .clone(), - unstable: self.options.unstable(), - }, - ) - } -} - -struct CliHasNodeSpecifierChecker(Arc); - -impl HasNodeSpecifierChecker for CliHasNodeSpecifierChecker { - fn has_node_specifier(&self) -> bool { - self.0.graph().has_node_specifier - } -} - -/// Keeps track of what module specifiers were resolved as CJS. -#[derive(Default)] -pub struct CjsResolutionStore(Mutex>); - -impl CjsResolutionStore { - pub fn clear(&self) { - self.0.lock().clear(); - } - - pub fn contains(&self, specifier: &ModuleSpecifier) -> bool { - self.0.lock().contains(specifier) - } - - pub fn insert(&self, specifier: ModuleSpecifier) { - self.0.lock().insert(specifier); - } -} - -#[derive(Clone, Debug)] -pub struct FileWatcherReporter { - sender: tokio::sync::mpsc::UnboundedSender>, - file_paths: Arc>>, -} - -impl deno_graph::source::Reporter for FileWatcherReporter { - fn on_load( - &self, - specifier: &ModuleSpecifier, - modules_done: usize, - modules_total: usize, - ) { - let mut file_paths = self.file_paths.lock(); - if specifier.scheme() == "file" { - file_paths.push(specifier.to_file_path().unwrap()); - } - - if modules_done == modules_total { - self.sender.send(file_paths.drain(..).collect()).unwrap(); - } - } -} diff --git a/cli/standalone/binary.rs b/cli/standalone/binary.rs index bca0aff2b4..51d8db79e1 100644 --- a/cli/standalone/binary.rs +++ b/cli/standalone/binary.rs @@ -5,7 +5,6 @@ use std::io::Seek; use std::io::SeekFrom; use std::io::Write; use std::path::Path; -use std::sync::Arc; use deno_ast::ModuleSpecifier; use deno_core::anyhow::Context; @@ -150,17 +149,17 @@ fn u64_from_bytes(arr: &[u8]) -> Result { Ok(u64::from_be_bytes(*fixed_arr)) } -pub struct DenoCompileBinaryWriter { - file_fetcher: Arc, - client: HttpClient, - deno_dir: DenoDir, +pub struct DenoCompileBinaryWriter<'a> { + file_fetcher: &'a FileFetcher, + client: &'a HttpClient, + deno_dir: &'a DenoDir, } -impl DenoCompileBinaryWriter { +impl<'a> DenoCompileBinaryWriter<'a> { pub fn new( - file_fetcher: Arc, - client: HttpClient, - deno_dir: DenoDir, + file_fetcher: &'a FileFetcher, + client: &'a HttpClient, + deno_dir: &'a DenoDir, ) -> Self { Self { file_fetcher, @@ -282,7 +281,7 @@ impl DenoCompileBinaryWriter { None => None, }; let maybe_import_map = cli_options - .resolve_import_map(&self.file_fetcher) + .resolve_import_map(self.file_fetcher) .await? .map(|import_map| (import_map.base_url().clone(), import_map.to_json())); let metadata = Metadata { diff --git a/cli/tests/testdata/package_json/invalid_value/task.out b/cli/tests/testdata/package_json/invalid_value/task.out index 914dc27c6b..823c50612f 100644 --- a/cli/tests/testdata/package_json/invalid_value/task.out +++ b/cli/tests/testdata/package_json/invalid_value/task.out @@ -1,6 +1,6 @@ Warning Ignoring dependency '@denotest/cjs-default-export' in package.json because its version requirement failed to parse: Invalid npm specifier version requirement. Unexpected character. invalid stuff that won't parse ~ -Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in the upcoming release. +Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in an upcoming release. Task test echo 1 1 diff --git a/cli/tests/testdata/task/both/package_json_selected.out b/cli/tests/testdata/task/both/package_json_selected.out index 06b735c9da..d317af4ed4 100644 --- a/cli/tests/testdata/task/both/package_json_selected.out +++ b/cli/tests/testdata/task/both/package_json_selected.out @@ -1,7 +1,7 @@ Download http://localhost:4545/npm/registry/@denotest/bin Download http://localhost:4545/npm/registry/@denotest/bin/1.0.0.tgz Initialize @denotest/bin@1.0.0 -Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in the upcoming release. +Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in an upcoming release. Task bin cli-esm testing this out "asdf" testing this diff --git a/cli/tests/testdata/task/npx/non_existent.out b/cli/tests/testdata/task/npx/non_existent.out index b08d29ece6..81065bf743 100644 --- a/cli/tests/testdata/task/npx/non_existent.out +++ b/cli/tests/testdata/task/npx/non_existent.out @@ -1,3 +1,3 @@ -Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in the upcoming release. +Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in an upcoming release. Task non-existent npx this-command-should-not-exist-for-you npx: could not resolve command 'this-command-should-not-exist-for-you' diff --git a/cli/tests/testdata/task/npx/on_own.out b/cli/tests/testdata/task/npx/on_own.out index 80d8ed9db3..fc9673f7f6 100644 --- a/cli/tests/testdata/task/npx/on_own.out +++ b/cli/tests/testdata/task/npx/on_own.out @@ -1,3 +1,3 @@ -Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in the upcoming release. +Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in an upcoming release. Task on-own npx npx: missing command diff --git a/cli/tests/testdata/task/package_json/bin.out b/cli/tests/testdata/task/package_json/bin.out index fac6921156..6cfa06d433 100644 --- a/cli/tests/testdata/task/package_json/bin.out +++ b/cli/tests/testdata/task/package_json/bin.out @@ -3,7 +3,7 @@ Download http://localhost:4545/npm/registry/@denotest/bin/0.5.0.tgz Initialize @denotest/bin@0.5.0 Download http://localhost:4545/npm/registry/@denotest/bin/1.0.0.tgz Initialize @denotest/bin@1.0.0 -Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in the upcoming release. +Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in an upcoming release. Task bin @denotest/bin hi && cli-esm testing this out && npx cli-cjs test "extra" hi testing diff --git a/cli/tools/bench.rs b/cli/tools/bench.rs index 3f606cfa93..aa5bd044df 100644 --- a/cli/tools/bench.rs +++ b/cli/tools/bench.rs @@ -5,10 +5,10 @@ use crate::args::CliOptions; use crate::args::TypeCheckMode; use crate::colors; use crate::display::write_json_to_stdout; +use crate::factory::CliFactory; use crate::graph_util::graph_valid_with_cli_options; use crate::module_loader::ModuleLoadPreparer; use crate::ops; -use crate::proc_state::ProcState; use crate::tools::test::format_test_error; use crate::tools::test::TestFilter; use crate::util::file_watcher; @@ -635,12 +635,13 @@ pub async fn run_benchmarks( cli_options: CliOptions, bench_options: BenchOptions, ) -> Result<(), AnyError> { - let ps = ProcState::from_cli_options(Arc::new(cli_options)).await?; + let factory = CliFactory::from_cli_options(Arc::new(cli_options)); + let cli_options = factory.cli_options(); // Various bench files should not share the same permissions in terms of // `PermissionsContainer` - otherwise granting/revoking permissions in one // file would have impact on other files, which is undesirable. let permissions = - Permissions::from_options(&ps.options.permissions_options())?; + Permissions::from_options(&cli_options.permissions_options())?; let specifiers = collect_specifiers(&bench_options.files, is_supported_bench_path)?; @@ -649,15 +650,20 @@ pub async fn run_benchmarks( return Err(generic_error("No bench modules found")); } - check_specifiers(&ps.options, &ps.module_load_preparer, specifiers.clone()) - .await?; + check_specifiers( + cli_options, + factory.module_load_preparer().await?, + specifiers.clone(), + ) + .await?; if bench_options.no_run { return Ok(()); } - let log_level = ps.options.log_level(); - let worker_factory = Arc::new(ps.create_cli_main_worker_factory()); + let log_level = cli_options.log_level(); + let worker_factory = + Arc::new(factory.create_cli_main_worker_factory().await?); bench_specifiers( worker_factory, &permissions, @@ -678,21 +684,25 @@ pub async fn run_benchmarks_with_watch( cli_options: CliOptions, bench_options: BenchOptions, ) -> Result<(), AnyError> { - let ps = ProcState::from_cli_options(Arc::new(cli_options)).await?; + let factory = CliFactory::from_cli_options(Arc::new(cli_options)); + let cli_options = factory.cli_options(); + let module_graph_builder = factory.module_graph_builder().await?; + let file_watcher = factory.file_watcher()?; + let module_load_preparer = factory.module_load_preparer().await?; // Various bench files should not share the same permissions in terms of // `PermissionsContainer` - otherwise granting/revoking permissions in one // file would have impact on other files, which is undesirable. let permissions = - Permissions::from_options(&ps.options.permissions_options())?; - let no_check = ps.options.type_check_mode() == TypeCheckMode::None; + Permissions::from_options(&cli_options.permissions_options())?; + let no_check = cli_options.type_check_mode() == TypeCheckMode::None; let resolver = |changed: Option>| { let paths_to_watch = bench_options.files.include.clone(); let paths_to_watch_clone = paths_to_watch.clone(); let files_changed = changed.is_some(); let bench_options = &bench_options; - let module_graph_builder = ps.module_graph_builder.clone(); - let cli_options = ps.options.clone(); + let module_graph_builder = module_graph_builder.clone(); + let cli_options = cli_options.clone(); async move { let bench_modules = @@ -797,15 +807,18 @@ pub async fn run_benchmarks_with_watch( }) }; + let create_cli_main_worker_factory = + factory.create_cli_main_worker_factory_func().await?; let operation = |modules_to_reload: Vec| { let permissions = &permissions; let bench_options = &bench_options; - ps.reset_for_file_watcher(); - let module_load_preparer = ps.module_load_preparer.clone(); - let cli_options = ps.options.clone(); - let worker_factory = Arc::new(ps.create_cli_main_worker_factory()); + file_watcher.reset(); + let module_load_preparer = module_load_preparer.clone(); + let cli_options = cli_options.clone(); + let create_cli_main_worker_factory = create_cli_main_worker_factory.clone(); async move { + let worker_factory = Arc::new(create_cli_main_worker_factory()); let specifiers = collect_specifiers(&bench_options.files, is_supported_bench_path)? .into_iter() @@ -836,7 +849,7 @@ pub async fn run_benchmarks_with_watch( } }; - let clear_screen = !ps.options.no_clear_screen(); + let clear_screen = !cli_options.no_clear_screen(); file_watcher::watch_func( resolver, operation, diff --git a/cli/tools/bundle.rs b/cli/tools/bundle.rs index 26d170d7e2..759882c833 100644 --- a/cli/tools/bundle.rs +++ b/cli/tools/bundle.rs @@ -13,8 +13,8 @@ use crate::args::CliOptions; use crate::args::Flags; use crate::args::TsConfigType; use crate::args::TypeCheckMode; +use crate::factory::CliFactory; use crate::graph_util::error_for_any_npm_specifier; -use crate::proc_state::ProcState; use crate::util; use crate::util::display; use crate::util::file_watcher::ResolutionResult; @@ -40,9 +40,11 @@ pub async fn bundle( let module_specifier = &module_specifier; async move { log::debug!(">>>>> bundle START"); - let ps = ProcState::from_cli_options(cli_options).await?; - let graph = ps - .module_graph_builder + let factory = CliFactory::from_cli_options(cli_options); + let module_graph_builder = factory.module_graph_builder().await?; + let cli_options = factory.cli_options(); + + let graph = module_graph_builder .create_graph_and_maybe_check(vec![module_specifier.clone()]) .await?; @@ -58,15 +60,14 @@ pub async fn bundle( }) .collect(); - if let Ok(Some(import_map_path)) = ps - .options + if let Ok(Some(import_map_path)) = cli_options .resolve_import_map_specifier() .map(|ms| ms.and_then(|ref s| s.to_file_path().ok())) { paths_to_watch.push(import_map_path); } - Ok((paths_to_watch, graph, ps)) + Ok((paths_to_watch, graph, cli_options.clone())) } .map(move |result| match result { Ok((paths_to_watch, graph, ps)) => ResolutionResult::Restart { @@ -80,49 +81,50 @@ pub async fn bundle( }) }; - let operation = |(ps, graph): (ProcState, Arc)| { - let out_file = &bundle_flags.out_file; - async move { - // at the moment, we don't support npm specifiers in deno bundle, so show an error - error_for_any_npm_specifier(&graph)?; + let operation = + |(cli_options, graph): (Arc, Arc)| { + let out_file = &bundle_flags.out_file; + async move { + // at the moment, we don't support npm specifiers in deno bundle, so show an error + error_for_any_npm_specifier(&graph)?; - let bundle_output = bundle_module_graph(graph.as_ref(), &ps)?; - log::debug!(">>>>> bundle END"); + let bundle_output = bundle_module_graph(graph.as_ref(), &cli_options)?; + log::debug!(">>>>> bundle END"); - if let Some(out_file) = out_file { - let output_bytes = bundle_output.code.as_bytes(); - let output_len = output_bytes.len(); - util::fs::write_file(out_file, output_bytes, 0o644)?; - log::info!( - "{} {:?} ({})", - colors::green("Emit"), - out_file, - colors::gray(display::human_size(output_len as f64)) - ); - if let Some(bundle_map) = bundle_output.maybe_map { - let map_bytes = bundle_map.as_bytes(); - let map_len = map_bytes.len(); - let ext = if let Some(curr_ext) = out_file.extension() { - format!("{}.map", curr_ext.to_string_lossy()) - } else { - "map".to_string() - }; - let map_out_file = out_file.with_extension(ext); - util::fs::write_file(&map_out_file, map_bytes, 0o644)?; + if let Some(out_file) = out_file { + let output_bytes = bundle_output.code.as_bytes(); + let output_len = output_bytes.len(); + util::fs::write_file(out_file, output_bytes, 0o644)?; log::info!( "{} {:?} ({})", colors::green("Emit"), - map_out_file, - colors::gray(display::human_size(map_len as f64)) + out_file, + colors::gray(display::human_size(output_len as f64)) ); + if let Some(bundle_map) = bundle_output.maybe_map { + let map_bytes = bundle_map.as_bytes(); + let map_len = map_bytes.len(); + let ext = if let Some(curr_ext) = out_file.extension() { + format!("{}.map", curr_ext.to_string_lossy()) + } else { + "map".to_string() + }; + let map_out_file = out_file.with_extension(ext); + util::fs::write_file(&map_out_file, map_bytes, 0o644)?; + log::info!( + "{} {:?} ({})", + colors::green("Emit"), + map_out_file, + colors::gray(display::human_size(map_len as f64)) + ); + } + } else { + println!("{}", bundle_output.code); } - } else { - println!("{}", bundle_output.code); - } - Ok(()) - } - }; + Ok(()) + } + }; if cli_options.watch_paths().is_some() { util::file_watcher::watch_func( @@ -149,14 +151,13 @@ pub async fn bundle( fn bundle_module_graph( graph: &deno_graph::ModuleGraph, - ps: &ProcState, + cli_options: &CliOptions, ) -> Result { log::info!("{} {}", colors::green("Bundle"), graph.roots[0]); - let ts_config_result = ps - .options - .resolve_ts_config_for_emit(TsConfigType::Bundle)?; - if ps.options.type_check_mode() == TypeCheckMode::None { + let ts_config_result = + cli_options.resolve_ts_config_for_emit(TsConfigType::Bundle)?; + if cli_options.type_check_mode() == TypeCheckMode::None { if let Some(ignored_options) = ts_config_result.maybe_ignored_options { log::warn!("{}", ignored_options); } diff --git a/cli/tools/check.rs b/cli/tools/check.rs index 4fb6800fa0..4464802e6e 100644 --- a/cli/tools/check.rs +++ b/cli/tools/check.rs @@ -19,7 +19,6 @@ use crate::args::TsConfigType; use crate::args::TsTypeLib; use crate::args::TypeCheckMode; use crate::cache::Caches; -use crate::cache::DenoDir; use crate::cache::FastInsecureHasher; use crate::cache::TypeCheckCache; use crate::npm::CliNpmResolver; @@ -39,7 +38,6 @@ pub struct CheckOptions { } pub struct TypeChecker { - deno_dir: DenoDir, caches: Arc, cli_options: Arc, node_resolver: Arc, @@ -48,14 +46,12 @@ pub struct TypeChecker { impl TypeChecker { pub fn new( - deno_dir: DenoDir, caches: Arc, cli_options: Arc, node_resolver: Arc, npm_resolver: Arc, ) -> Self { Self { - deno_dir, caches, cli_options, node_resolver, @@ -95,8 +91,7 @@ impl TypeChecker { let ts_config = ts_config_result.ts_config; let type_check_mode = self.cli_options.type_check_mode(); let debug = self.cli_options.log_level() == Some(log::Level::Debug); - let cache = - TypeCheckCache::new(self.caches.type_checking_cache_db(&self.deno_dir)); + let cache = TypeCheckCache::new(self.caches.type_checking_cache_db()); let check_js = ts_config.get_check_js(); let check_hash = match get_check_hash(&graph, type_check_mode, &ts_config) { CheckHashResult::NoFiles => return Ok(()), diff --git a/cli/tools/coverage/mod.rs b/cli/tools/coverage/mod.rs index d3044a7163..223bac3167 100644 --- a/cli/tools/coverage/mod.rs +++ b/cli/tools/coverage/mod.rs @@ -4,7 +4,7 @@ use crate::args::CoverageFlags; use crate::args::FileFlags; use crate::args::Flags; use crate::colors; -use crate::proc_state::ProcState; +use crate::factory::CliFactory; use crate::tools::fmt::format_json; use crate::tools::test::is_supported_test_path; use crate::util::fs::FileCollector; @@ -623,8 +623,11 @@ pub async fn cover_files( return Err(generic_error("No matching coverage profiles found")); } - let ps = ProcState::from_flags(flags).await?; - let root_dir_url = ps.npm_resolver.root_dir_url(); + let factory = CliFactory::from_flags(flags).await?; + let root_dir_url = factory.npm_resolver().await?.root_dir_url(); + let file_fetcher = factory.file_fetcher()?; + let cli_options = factory.cli_options(); + let emitter = factory.emitter()?; let script_coverages = collect_coverages(coverage_flags.files)?; let script_coverages = filter_coverages( @@ -667,13 +670,13 @@ pub async fn cover_files( for script_coverage in script_coverages { let module_specifier = deno_core::resolve_url_or_path( &script_coverage.url, - ps.options.initial_cwd(), + cli_options.initial_cwd(), )?; let maybe_file = if module_specifier.scheme() == "file" { - ps.file_fetcher.get_source(&module_specifier) + file_fetcher.get_source(&module_specifier) } else { - ps.file_fetcher + file_fetcher .fetch_cached(&module_specifier, 10) .with_context(|| { format!("Failed to fetch \"{module_specifier}\" from cache.") @@ -700,7 +703,7 @@ pub async fn cover_files( | MediaType::Mts | MediaType::Cts | MediaType::Tsx => { - match ps.emitter.maybed_cached_emit(&file.specifier, &file.source) { + match emitter.maybed_cached_emit(&file.specifier, &file.source) { Some(code) => code.into(), None => { return Err(anyhow!( diff --git a/cli/tools/doc.rs b/cli/tools/doc.rs index a07ba175aa..2cb53cb6ab 100644 --- a/cli/tools/doc.rs +++ b/cli/tools/doc.rs @@ -6,9 +6,9 @@ use crate::args::Flags; use crate::colors; use crate::display::write_json_to_stdout; use crate::display::write_to_stdout_ignore_sigpipe; +use crate::factory::CliFactory; use crate::file_fetcher::File; use crate::graph_util::graph_lock_or_exit; -use crate::proc_state::ProcState; use crate::tsc::get_types_declaration_file_text; use deno_ast::MediaType; use deno_core::anyhow::bail; @@ -23,13 +23,14 @@ pub async fn print_docs( flags: Flags, doc_flags: DocFlags, ) -> Result<(), AnyError> { - let ps = ProcState::from_flags(flags).await?; + let factory = CliFactory::from_flags(flags).await?; + let cli_options = factory.cli_options(); let mut doc_nodes = match doc_flags.source_file { DocSourceFileFlag::Builtin => { let source_file_specifier = ModuleSpecifier::parse("internal://lib.deno.d.ts").unwrap(); - let content = get_types_declaration_file_text(ps.options.unstable()); + let content = get_types_declaration_file_text(cli_options.unstable()); let mut loader = deno_graph::source::MemoryLoader::new( vec![( source_file_specifier.to_string(), @@ -61,13 +62,18 @@ pub async fn print_docs( doc_parser.parse_module(&source_file_specifier)?.definitions } DocSourceFileFlag::Path(source_file) => { + let file_fetcher = factory.file_fetcher()?; + let module_graph_builder = factory.module_graph_builder().await?; + let maybe_lockfile = factory.maybe_lockfile(); + let parsed_source_cache = factory.parsed_source_cache()?; + let module_specifier = - resolve_url_or_path(&source_file, ps.options.initial_cwd())?; + resolve_url_or_path(&source_file, cli_options.initial_cwd())?; // If the root module has external types, the module graph won't redirect it, // so instead create a dummy file which exports everything from the actual file being documented. let root_specifier = - resolve_path("./$deno$doc.ts", ps.options.initial_cwd()).unwrap(); + resolve_path("./$deno$doc.ts", cli_options.initial_cwd()).unwrap(); let root = File { local: PathBuf::from("./$deno$doc.ts"), maybe_types: None, @@ -78,21 +84,20 @@ pub async fn print_docs( }; // Save our fake file into file fetcher cache. - ps.file_fetcher.insert_cached(root); + file_fetcher.insert_cached(root); - let graph = ps - .module_graph_builder + let graph = module_graph_builder .create_graph(vec![root_specifier.clone()]) .await?; - if let Some(lockfile) = &ps.lockfile { + if let Some(lockfile) = maybe_lockfile { graph_lock_or_exit(&graph, &mut lockfile.lock()); } let doc_parser = doc::DocParser::new( graph, doc_flags.private, - ps.parsed_source_cache.as_capturing_parser(), + parsed_source_cache.as_capturing_parser(), ); doc_parser.parse_with_reexports(&root_specifier)? } diff --git a/cli/tools/fmt.rs b/cli/tools/fmt.rs index 41accacba9..70d2bd6395 100644 --- a/cli/tools/fmt.rs +++ b/cli/tools/fmt.rs @@ -12,8 +12,8 @@ use crate::args::FilesConfig; use crate::args::FmtOptions; use crate::args::FmtOptionsConfig; use crate::args::ProseWrap; -use crate::cache::Caches; use crate::colors; +use crate::factory::CliFactory; use crate::util::diff::diff; use crate::util::file_watcher; use crate::util::file_watcher::ResolutionResult; @@ -101,11 +101,12 @@ pub async fn format( } } }; - let deno_dir = &cli_options.resolve_deno_dir()?; - let caches = Caches::default(); + let factory = CliFactory::from_cli_options(Arc::new(cli_options)); + let cli_options = factory.cli_options(); + let caches = factory.caches()?; let operation = |(paths, fmt_options): (Vec, FmtOptionsConfig)| async { let incremental_cache = Arc::new(IncrementalCache::new( - caches.fmt_incremental_cache_db(deno_dir), + caches.fmt_incremental_cache_db(), &fmt_options, &paths, )); diff --git a/cli/tools/info.rs b/cli/tools/info.rs index a59f8a4c84..d491f55dc6 100644 --- a/cli/tools/info.rs +++ b/cli/tools/info.rs @@ -27,57 +27,61 @@ use deno_semver::npm::NpmPackageReqReference; use crate::args::Flags; use crate::args::InfoFlags; use crate::display; +use crate::factory::CliFactory; use crate::graph_util::graph_lock_or_exit; use crate::npm::CliNpmResolver; -use crate::proc_state::ProcState; use crate::util::checksum; pub async fn info(flags: Flags, info_flags: InfoFlags) -> Result<(), AnyError> { - let ps = ProcState::from_flags(flags).await?; + let factory = CliFactory::from_flags(flags).await?; + let cli_options = factory.cli_options(); if let Some(specifier) = info_flags.file { - let specifier = resolve_url_or_path(&specifier, ps.options.initial_cwd())?; - let mut loader = ps.module_graph_builder.create_graph_loader(); + let module_graph_builder = factory.module_graph_builder().await?; + let npm_resolver = factory.npm_resolver().await?; + let maybe_lockfile = factory.maybe_lockfile(); + let specifier = resolve_url_or_path(&specifier, cli_options.initial_cwd())?; + let mut loader = module_graph_builder.create_graph_loader(); loader.enable_loading_cache_info(); // for displaying the cache information - let graph = ps - .module_graph_builder + let graph = module_graph_builder .create_graph_with_loader(vec![specifier], &mut loader) .await?; - if let Some(lockfile) = &ps.lockfile { + if let Some(lockfile) = maybe_lockfile { graph_lock_or_exit(&graph, &mut lockfile.lock()); } if info_flags.json { let mut json_graph = json!(graph); - add_npm_packages_to_json(&mut json_graph, &ps.npm_resolver); + add_npm_packages_to_json(&mut json_graph, npm_resolver); display::write_json_to_stdout(&json_graph)?; } else { let mut output = String::new(); - GraphDisplayContext::write(&graph, &ps.npm_resolver, &mut output)?; + GraphDisplayContext::write(&graph, npm_resolver, &mut output)?; display::write_to_stdout_ignore_sigpipe(output.as_bytes())?; } } else { // If it was just "deno info" print location of caches and exit print_cache_info( - &ps, + &factory, info_flags.json, - ps.options.location_flag().as_ref(), + cli_options.location_flag().as_ref(), )?; } Ok(()) } fn print_cache_info( - state: &ProcState, + factory: &CliFactory, json: bool, location: Option<&deno_core::url::Url>, ) -> Result<(), AnyError> { - let deno_dir = &state.dir.root_path_for_display(); - let modules_cache = &state.file_fetcher.get_http_cache_location(); - let npm_cache = &state.npm_cache.as_readonly().get_cache_location(); - let typescript_cache = &state.dir.gen_cache.location; - let registry_cache = &state.dir.registries_folder_path(); - let mut origin_dir = state.dir.origin_data_folder_path(); + let dir = factory.deno_dir()?; + let modules_cache = factory.file_fetcher()?.get_http_cache_location(); + let npm_cache = factory.npm_cache()?.as_readonly().get_cache_location(); + let typescript_cache = &dir.gen_cache.location; + let registry_cache = dir.registries_folder_path(); + let mut origin_dir = dir.origin_data_folder_path(); + let deno_dir = dir.root_path_for_display().to_string(); if let Some(location) = &location { origin_dir = @@ -88,7 +92,7 @@ fn print_cache_info( if json { let mut output = json!({ - "denoDir": deno_dir.to_string(), + "denoDir": deno_dir, "modulesCache": modules_cache, "npmCache": npm_cache, "typescriptCache": typescript_cache, diff --git a/cli/tools/installer.rs b/cli/tools/installer.rs index 461bb1a50a..fb83c3cab9 100644 --- a/cli/tools/installer.rs +++ b/cli/tools/installer.rs @@ -6,8 +6,8 @@ use crate::args::ConfigFlag; use crate::args::Flags; use crate::args::InstallFlags; use crate::args::TypeCheckMode; +use crate::factory::CliFactory; use crate::http_util::HttpClient; -use crate::proc_state::ProcState; use crate::util::fs::canonicalize_path_maybe_not_exists; use deno_core::anyhow::Context; @@ -233,9 +233,10 @@ pub async fn install_command( install_flags: InstallFlags, ) -> Result<(), AnyError> { // ensure the module is cached - ProcState::from_flags(flags.clone()) + CliFactory::from_flags(flags.clone()) + .await? + .module_load_preparer() .await? - .module_load_preparer .load_and_type_check_files(&[install_flags.module_url.clone()]) .await?; diff --git a/cli/tools/lint.rs b/cli/tools/lint.rs index eae2f1032d..40c37ce773 100644 --- a/cli/tools/lint.rs +++ b/cli/tools/lint.rs @@ -11,8 +11,8 @@ use crate::args::FilesConfig; use crate::args::LintOptions; use crate::args::LintReporterKind; use crate::args::LintRulesConfig; -use crate::cache::Caches; use crate::colors; +use crate::factory::CliFactory; use crate::tools::fmt::run_parallelized; use crate::util::file_watcher; use crate::util::file_watcher::ResolutionResult; @@ -98,11 +98,12 @@ pub async fn lint( }; let has_error = Arc::new(AtomicBool::new(false)); - let deno_dir = cli_options.resolve_deno_dir()?; - let caches = Caches::default(); + let factory = CliFactory::from_cli_options(Arc::new(cli_options)); + let cli_options = factory.cli_options(); + let caches = factory.caches()?; let operation = |paths: Vec| async { let incremental_cache = Arc::new(IncrementalCache::new( - caches.lint_incremental_cache_db(&deno_dir), + caches.lint_incremental_cache_db(), // use a hash of the rule names in order to bust the cache &{ // ensure this is stable by sorting it diff --git a/cli/tools/repl/mod.rs b/cli/tools/repl/mod.rs index f0faf74ec1..9f4b589196 100644 --- a/cli/tools/repl/mod.rs +++ b/cli/tools/repl/mod.rs @@ -4,8 +4,8 @@ use crate::args::CliOptions; use crate::args::Flags; use crate::args::ReplFlags; use crate::colors; +use crate::factory::CliFactory; use crate::file_fetcher::FileFetcher; -use crate::proc_state::ProcState; use deno_core::error::AnyError; use deno_core::futures::StreamExt; use deno_runtime::permissions::Permissions; @@ -98,17 +98,17 @@ async fn read_eval_file( } pub async fn run(flags: Flags, repl_flags: ReplFlags) -> Result { - let ps = ProcState::from_flags(flags).await?; - let main_module = ps.options.resolve_main_module()?; + let factory = CliFactory::from_flags(flags).await?; + let cli_options = factory.cli_options(); + let main_module = cli_options.resolve_main_module()?; let permissions = PermissionsContainer::new(Permissions::from_options( - &ps.options.permissions_options(), + &cli_options.permissions_options(), )?); - let cli_options = ps.options.clone(); - let npm_resolver = ps.npm_resolver.clone(); - let resolver = ps.resolver.clone(); - let dir = ps.dir.clone(); - let file_fetcher = ps.file_fetcher.clone(); - let worker_factory = ps.create_cli_main_worker_factory(); + let npm_resolver = factory.npm_resolver().await?.clone(); + let resolver = factory.resolver().await?.clone(); + let dir = factory.deno_dir()?; + let file_fetcher = factory.file_fetcher()?; + let worker_factory = factory.create_cli_main_worker_factory().await?; let mut worker = worker_factory .create_main_worker(main_module, permissions) @@ -116,7 +116,7 @@ pub async fn run(flags: Flags, repl_flags: ReplFlags) -> Result { worker.setup_repl().await?; let worker = worker.into_main_worker(); let mut repl_session = - ReplSession::initialize(&cli_options, npm_resolver, resolver, worker) + ReplSession::initialize(cli_options, npm_resolver, resolver, worker) .await?; let mut rustyline_channel = rustyline_channel(); @@ -130,7 +130,7 @@ pub async fn run(flags: Flags, repl_flags: ReplFlags) -> Result { if let Some(eval_files) = repl_flags.eval_files { for eval_file in eval_files { - match read_eval_file(&cli_options, &file_fetcher, &eval_file).await { + match read_eval_file(cli_options, file_fetcher, &eval_file).await { Ok(eval_source) => { let output = repl_session .evaluate_line_and_get_output(&eval_source) diff --git a/cli/tools/run.rs b/cli/tools/run.rs index e1dc529bc2..c6e706285b 100644 --- a/cli/tools/run.rs +++ b/cli/tools/run.rs @@ -10,8 +10,9 @@ use deno_runtime::permissions::PermissionsContainer; use crate::args::EvalFlags; use crate::args::Flags; +use crate::factory::CliFactory; +use crate::factory::CliFactoryBuilder; use crate::file_fetcher::File; -use crate::proc_state::ProcState; use crate::util; pub async fn run_script(flags: Flags) -> Result { @@ -31,23 +32,25 @@ To grant permissions, set them before the script argument. For example: } // TODO(bartlomieju): actually I think it will also fail if there's an import - // map specified and bare specifier is used on the command line - this should - // probably call `ProcState::resolve` instead - let ps = ProcState::from_flags(flags).await?; + // map specified and bare specifier is used on the command line + let factory = CliFactory::from_flags(flags).await?; + let deno_dir = factory.deno_dir()?; + let http_client = factory.http_client()?; + let cli_options = factory.cli_options(); // Run a background task that checks for available upgrades. If an earlier // run of this background task found a new version of Deno. super::upgrade::check_for_upgrades( - ps.http_client.clone(), - ps.dir.upgrade_check_file_path(), + http_client.clone(), + deno_dir.upgrade_check_file_path(), ); - let main_module = ps.options.resolve_main_module()?; + let main_module = cli_options.resolve_main_module()?; let permissions = PermissionsContainer::new(Permissions::from_options( - &ps.options.permissions_options(), + &cli_options.permissions_options(), )?); - let worker_factory = ps.create_cli_main_worker_factory(); + let worker_factory = factory.create_cli_main_worker_factory().await?; let mut worker = worker_factory .create_main_worker(main_module, permissions) .await?; @@ -57,11 +60,14 @@ To grant permissions, set them before the script argument. For example: } pub async fn run_from_stdin(flags: Flags) -> Result { - let ps = ProcState::from_flags(flags).await?; - let main_module = ps.options.resolve_main_module()?; + let factory = CliFactory::from_flags(flags).await?; + let cli_options = factory.cli_options(); + let main_module = cli_options.resolve_main_module()?; + let file_fetcher = factory.file_fetcher()?; + let worker_factory = factory.create_cli_main_worker_factory().await?; let permissions = PermissionsContainer::new(Permissions::from_options( - &ps.options.permissions_options(), + &cli_options.permissions_options(), )?); let mut source = Vec::new(); std::io::stdin().read_to_end(&mut source)?; @@ -76,9 +82,8 @@ pub async fn run_from_stdin(flags: Flags) -> Result { }; // Save our fake file into file fetcher cache // to allow module access by TS compiler - ps.file_fetcher.insert_cached(source_file); + file_fetcher.insert_cached(source_file); - let worker_factory = ps.create_cli_main_worker_factory(); let mut worker = worker_factory .create_main_worker(main_module, permissions) .await?; @@ -90,20 +95,26 @@ pub async fn run_from_stdin(flags: Flags) -> Result { // code properly. async fn run_with_watch(flags: Flags) -> Result { let (sender, receiver) = tokio::sync::mpsc::unbounded_channel(); - let ps = - ProcState::from_flags_for_file_watcher(flags, sender.clone()).await?; - let clear_screen = !ps.options.no_clear_screen(); - let main_module = ps.options.resolve_main_module()?; + let factory = CliFactoryBuilder::new() + .with_watcher(sender.clone()) + .build_from_flags(flags) + .await?; + let file_watcher = factory.file_watcher()?; + let cli_options = factory.cli_options(); + let clear_screen = !cli_options.no_clear_screen(); + let main_module = cli_options.resolve_main_module()?; + let create_cli_main_worker_factory = + factory.create_cli_main_worker_factory_func().await?; let operation = |main_module: ModuleSpecifier| { - ps.reset_for_file_watcher(); + file_watcher.reset(); let permissions = PermissionsContainer::new(Permissions::from_options( - &ps.options.permissions_options(), + &cli_options.permissions_options(), )?); - let worker_factory = ps.create_cli_main_worker_factory(); + let create_cli_main_worker_factory = create_cli_main_worker_factory.clone(); Ok(async move { - let worker = worker_factory + let worker = create_cli_main_worker_factory() .create_main_worker(main_module, permissions) .await?; worker.run_for_watcher().await?; @@ -130,10 +141,14 @@ pub async fn eval_command( flags: Flags, eval_flags: EvalFlags, ) -> Result { - let ps = ProcState::from_flags(flags).await?; - let main_module = ps.options.resolve_main_module()?; + let factory = CliFactory::from_flags(flags).await?; + let cli_options = factory.cli_options(); + let file_fetcher = factory.file_fetcher()?; + let main_worker_factory = factory.create_cli_main_worker_factory().await?; + + let main_module = cli_options.resolve_main_module()?; let permissions = PermissionsContainer::new(Permissions::from_options( - &ps.options.permissions_options(), + &cli_options.permissions_options(), )?); // Create a dummy source file. let source_code = if eval_flags.print { @@ -154,10 +169,9 @@ pub async fn eval_command( // Save our fake file into file fetcher cache // to allow module access by TS compiler. - ps.file_fetcher.insert_cached(file); + file_fetcher.insert_cached(file); - let mut worker = ps - .create_cli_main_worker_factory() + let mut worker = main_worker_factory .create_main_worker(main_module, permissions) .await?; let exit_code = worker.run().await?; diff --git a/cli/tools/standalone.rs b/cli/tools/standalone.rs index 94b1c01703..0e8d9ca733 100644 --- a/cli/tools/standalone.rs +++ b/cli/tools/standalone.rs @@ -2,11 +2,11 @@ use crate::args::CompileFlags; use crate::args::Flags; +use crate::factory::CliFactory; use crate::graph_util::error_for_any_npm_specifier; use crate::standalone::is_standalone_binary; use crate::standalone::DenoCompileBinaryWriter; use crate::util::path::path_has_trailing_slash; -use crate::ProcState; use deno_core::anyhow::bail; use deno_core::anyhow::Context; use deno_core::error::generic_error; @@ -23,30 +23,34 @@ pub async fn compile( flags: Flags, compile_flags: CompileFlags, ) -> Result<(), AnyError> { - let ps = ProcState::from_flags(flags).await?; - let binary_writer = DenoCompileBinaryWriter::new( - ps.file_fetcher.clone(), - ps.http_client.clone(), - ps.dir.clone(), - ); - let module_specifier = ps.options.resolve_main_module()?; + let factory = CliFactory::from_flags(flags).await?; + let cli_options = factory.cli_options(); + let file_fetcher = factory.file_fetcher()?; + let http_client = factory.http_client()?; + let deno_dir = factory.deno_dir()?; + let module_graph_builder = factory.module_graph_builder().await?; + let parsed_source_cache = factory.parsed_source_cache()?; + + let binary_writer = + DenoCompileBinaryWriter::new(file_fetcher, http_client, deno_dir); + let module_specifier = cli_options.resolve_main_module()?; let module_roots = { let mut vec = Vec::with_capacity(compile_flags.include.len() + 1); vec.push(module_specifier.clone()); for side_module in &compile_flags.include { - vec.push(resolve_url_or_path(side_module, ps.options.initial_cwd())?); + vec.push(resolve_url_or_path(side_module, cli_options.initial_cwd())?); } vec }; let output_path = resolve_compile_executable_output_path( &compile_flags, - ps.options.initial_cwd(), + cli_options.initial_cwd(), ) .await?; let graph = Arc::try_unwrap( - ps.module_graph_builder + module_graph_builder .create_graph_and_maybe_check(module_roots) .await?, ) @@ -55,7 +59,7 @@ pub async fn compile( // at the moment, we don't support npm specifiers in deno_compile, so show an error error_for_any_npm_specifier(&graph)?; - let parser = ps.parsed_source_cache.as_capturing_parser(); + let parser = parsed_source_cache.as_capturing_parser(); let eszip = eszip::EszipV2::from_graph(graph, &parser, Default::default())?; log::info!( @@ -73,7 +77,7 @@ pub async fn compile( eszip, &module_specifier, &compile_flags, - &ps.options, + cli_options, ) .await .with_context(|| format!("Writing {}", output_path.display()))?; diff --git a/cli/tools/task.rs b/cli/tools/task.rs index 5d34d39c75..6380d3822a 100644 --- a/cli/tools/task.rs +++ b/cli/tools/task.rs @@ -4,8 +4,8 @@ use crate::args::CliOptions; use crate::args::Flags; use crate::args::TaskFlags; use crate::colors; +use crate::factory::CliFactory; use crate::npm::CliNpmResolver; -use crate::proc_state::ProcState; use crate::util::fs::canonicalize_path; use deno_core::anyhow::bail; use deno_core::anyhow::Context; @@ -26,9 +26,10 @@ pub async fn execute_script( flags: Flags, task_flags: TaskFlags, ) -> Result { - let ps = ProcState::from_flags(flags).await?; - let tasks_config = ps.options.resolve_tasks_config()?; - let maybe_package_json = ps.options.maybe_package_json(); + let factory = CliFactory::from_flags(flags).await?; + let cli_options = factory.cli_options(); + let tasks_config = cli_options.resolve_tasks_config()?; + let maybe_package_json = cli_options.maybe_package_json(); let package_json_scripts = maybe_package_json .as_ref() .and_then(|p| p.scripts.clone()) @@ -43,7 +44,7 @@ pub async fn execute_script( }; if let Some(script) = tasks_config.get(task_name) { - let config_file_url = ps.options.maybe_config_file_specifier().unwrap(); + let config_file_url = cli_options.maybe_config_file_specifier().unwrap(); let config_file_path = if config_file_url.scheme() == "file" { config_file_url.to_file_path().unwrap() } else { @@ -53,7 +54,7 @@ pub async fn execute_script( Some(path) => canonicalize_path(&PathBuf::from(path))?, None => config_file_path.parent().unwrap().to_owned(), }; - let script = get_script_with_args(script, &ps.options); + let script = get_script_with_args(script, cli_options); output_task(task_name, &script); let seq_list = deno_task_shell::parser::parse(&script) .with_context(|| format!("Error parsing script '{task_name}'."))?; @@ -63,7 +64,12 @@ pub async fn execute_script( .await; Ok(exit_code) } else if let Some(script) = package_json_scripts.get(task_name) { - if let Some(package_deps) = ps.package_json_deps_installer.package_deps() { + let package_json_deps_installer = + factory.package_json_deps_installer().await?; + let npm_resolver = factory.npm_resolver().await?; + let node_resolver = factory.node_resolver().await?; + + if let Some(package_deps) = package_json_deps_installer.package_deps() { for (key, value) in package_deps { if let Err(err) = value { log::info!( @@ -75,13 +81,14 @@ pub async fn execute_script( } } } - ps.package_json_deps_installer + + package_json_deps_installer .ensure_top_level_install() .await?; - ps.npm_resolver.resolve_pending().await?; + npm_resolver.resolve_pending().await?; log::info!( - "{} Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in the upcoming release.", + "{} Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in an upcoming release.", colors::yellow("Warning"), ); @@ -95,12 +102,11 @@ pub async fn execute_script( .unwrap() .to_owned(), }; - let script = get_script_with_args(script, &ps.options); + let script = get_script_with_args(script, cli_options); output_task(task_name, &script); let seq_list = deno_task_shell::parser::parse(&script) .with_context(|| format!("Error parsing script '{task_name}'."))?; - let npx_commands = - resolve_npm_commands(&ps.npm_resolver, &ps.node_resolver)?; + let npx_commands = resolve_npm_commands(npm_resolver, node_resolver)?; let env_vars = collect_env_vars(); let exit_code = deno_task_shell::execute(seq_list, env_vars, &cwd, npx_commands).await; diff --git a/cli/tools/test.rs b/cli/tools/test.rs index 429bee71b9..847260352a 100644 --- a/cli/tools/test.rs +++ b/cli/tools/test.rs @@ -6,12 +6,12 @@ use crate::args::TestOptions; use crate::args::TypeCheckMode; use crate::colors; use crate::display; +use crate::factory::CliFactory; use crate::file_fetcher::File; use crate::file_fetcher::FileFetcher; use crate::graph_util::graph_valid_with_cli_options; use crate::module_loader::ModuleLoadPreparer; use crate::ops; -use crate::proc_state::ProcState; use crate::util::checksum; use crate::util::file_watcher; use crate::util::file_watcher::ResolutionResult; @@ -1629,16 +1629,19 @@ pub async fn run_tests( cli_options: CliOptions, test_options: TestOptions, ) -> Result<(), AnyError> { - let ps = ProcState::from_cli_options(Arc::new(cli_options)).await?; + let factory = CliFactory::from_cli_options(Arc::new(cli_options)); + let cli_options = factory.cli_options(); + let file_fetcher = factory.file_fetcher()?; + let module_load_preparer = factory.module_load_preparer().await?; // Various test files should not share the same permissions in terms of // `PermissionsContainer` - otherwise granting/revoking permissions in one // file would have impact on other files, which is undesirable. let permissions = - Permissions::from_options(&ps.options.permissions_options())?; - let log_level = ps.options.log_level(); + Permissions::from_options(&cli_options.permissions_options())?; + let log_level = cli_options.log_level(); let specifiers_with_mode = fetch_specifiers_with_test_mode( - &ps.file_fetcher, + file_fetcher, &test_options.files, &test_options.doc, ) @@ -1649,9 +1652,9 @@ pub async fn run_tests( } check_specifiers( - &ps.options, - &ps.file_fetcher, - &ps.module_load_preparer, + cli_options, + file_fetcher, + module_load_preparer, specifiers_with_mode.clone(), ) .await?; @@ -1660,7 +1663,8 @@ pub async fn run_tests( return Ok(()); } - let worker_factory = Arc::new(ps.create_cli_main_worker_factory()); + let worker_factory = + Arc::new(factory.create_cli_main_worker_factory().await?); test_specifiers( worker_factory, @@ -1692,22 +1696,27 @@ pub async fn run_tests_with_watch( cli_options: CliOptions, test_options: TestOptions, ) -> Result<(), AnyError> { - let ps = ProcState::from_cli_options(Arc::new(cli_options)).await?; + let factory = CliFactory::from_cli_options(Arc::new(cli_options)); + let cli_options = factory.cli_options(); + let module_graph_builder = factory.module_graph_builder().await?; + let module_load_preparer = factory.module_load_preparer().await?; + let file_fetcher = factory.file_fetcher()?; + let file_watcher = factory.file_watcher()?; // Various test files should not share the same permissions in terms of // `PermissionsContainer` - otherwise granting/revoking permissions in one // file would have impact on other files, which is undesirable. let permissions = - Permissions::from_options(&ps.options.permissions_options())?; - let no_check = ps.options.type_check_mode() == TypeCheckMode::None; - let log_level = ps.options.log_level(); + Permissions::from_options(&cli_options.permissions_options())?; + let no_check = cli_options.type_check_mode() == TypeCheckMode::None; + let log_level = cli_options.log_level(); let resolver = |changed: Option>| { let paths_to_watch = test_options.files.include.clone(); let paths_to_watch_clone = paths_to_watch.clone(); let files_changed = changed.is_some(); let test_options = &test_options; - let cli_options = ps.options.clone(); - let module_graph_builder = ps.module_graph_builder.clone(); + let cli_options = cli_options.clone(); + let module_graph_builder = module_graph_builder.clone(); async move { let test_modules = if test_options.doc { @@ -1815,16 +1824,19 @@ pub async fn run_tests_with_watch( }) }; + let create_cli_main_worker_factory = + factory.create_cli_main_worker_factory_func().await?; let operation = |modules_to_reload: Vec| { let permissions = &permissions; let test_options = &test_options; - ps.reset_for_file_watcher(); - let cli_options = ps.options.clone(); - let file_fetcher = ps.file_fetcher.clone(); - let module_load_preparer = ps.module_load_preparer.clone(); - let worker_factory = Arc::new(ps.create_cli_main_worker_factory()); + file_watcher.reset(); + let cli_options = cli_options.clone(); + let file_fetcher = file_fetcher.clone(); + let module_load_preparer = module_load_preparer.clone(); + let create_cli_main_worker_factory = create_cli_main_worker_factory.clone(); async move { + let worker_factory = Arc::new(create_cli_main_worker_factory()); let specifiers_with_mode = fetch_specifiers_with_test_mode( &file_fetcher, &test_options.files, @@ -1887,7 +1899,7 @@ pub async fn run_tests_with_watch( } }); - let clear_screen = !ps.options.no_clear_screen(); + let clear_screen = !cli_options.no_clear_screen(); file_watcher::watch_func( resolver, operation, diff --git a/cli/tools/upgrade.rs b/cli/tools/upgrade.rs index f16923bf83..c76d36777f 100644 --- a/cli/tools/upgrade.rs +++ b/cli/tools/upgrade.rs @@ -5,8 +5,8 @@ use crate::args::Flags; use crate::args::UpgradeFlags; use crate::colors; +use crate::factory::CliFactory; use crate::http_util::HttpClient; -use crate::proc_state::ProcState; use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; use crate::util::time; @@ -263,7 +263,8 @@ pub async fn upgrade( flags: Flags, upgrade_flags: UpgradeFlags, ) -> Result<(), AnyError> { - let ps = ProcState::from_flags(flags).await?; + let factory = CliFactory::from_flags(flags).await?; + let client = factory.http_client()?; let current_exe_path = std::env::current_exe()?; let metadata = fs::metadata(¤t_exe_path)?; let permissions = metadata.permissions(); @@ -285,8 +286,6 @@ pub async fn upgrade( ), current_exe_path.display()); } - let client = &ps.http_client; - let install_version = match upgrade_flags.version { Some(passed_version) => { let re_hash = lazy_regex::regex!("^[0-9a-f]{40}$"); diff --git a/cli/tools/vendor/mod.rs b/cli/tools/vendor/mod.rs index 225c3e6a81..d478c2b57f 100644 --- a/cli/tools/vendor/mod.rs +++ b/cli/tools/vendor/mod.rs @@ -15,8 +15,8 @@ use crate::args::CliOptions; use crate::args::Flags; use crate::args::FmtOptionsConfig; use crate::args::VendorFlags; +use crate::factory::CliFactory; use crate::graph_util::ModuleGraphBuilder; -use crate::proc_state::ProcState; use crate::tools::fmt::format_json; use crate::util::fs::canonicalize_path; use crate::util::fs::resolve_from_cwd; @@ -43,19 +43,20 @@ pub async fn vendor( let output_dir = resolve_from_cwd(&raw_output_dir)?; validate_output_dir(&output_dir, &vendor_flags)?; validate_options(&mut cli_options, &output_dir)?; - let ps = ProcState::from_cli_options(Arc::new(cli_options)).await?; + let factory = CliFactory::from_cli_options(Arc::new(cli_options)); + let cli_options = factory.cli_options(); let graph = create_graph( - &ps.module_graph_builder, + factory.module_graph_builder().await?, &vendor_flags, - ps.options.initial_cwd(), + cli_options.initial_cwd(), ) .await?; let vendored_count = build::build( graph, - &ps.parsed_source_cache, + factory.parsed_source_cache()?, &output_dir, - ps.maybe_import_map.as_deref(), - ps.lockfile.clone(), + factory.maybe_import_map().await?.as_deref(), + factory.maybe_lockfile().clone(), &build::RealVendorEnvironment, )?; @@ -71,7 +72,7 @@ pub async fn vendor( ); if vendored_count > 0 { let import_map_path = raw_output_dir.join("import_map.json"); - if maybe_update_config_file(&output_dir, &ps.options) { + if maybe_update_config_file(&output_dir, cli_options) { log::info!( concat!( "\nUpdated your local Deno configuration file with a reference to the ", diff --git a/cli/watcher.rs b/cli/watcher.rs new file mode 100644 index 0000000000..f9c2c1b42d --- /dev/null +++ b/cli/watcher.rs @@ -0,0 +1,99 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +use crate::args::CliOptions; +use crate::cache::ParsedSourceCache; +use crate::graph_util::ModuleGraphContainer; +use crate::module_loader::CjsResolutionStore; + +use deno_core::parking_lot::Mutex; +use deno_core::ModuleSpecifier; + +use std::path::PathBuf; +use std::sync::Arc; + +pub struct FileWatcher { + cli_options: Arc, + cjs_resolutions: Arc, + graph_container: Arc, + maybe_reporter: Option, + parsed_source_cache: Arc, +} + +impl FileWatcher { + pub fn new( + cli_options: Arc, + cjs_resolutions: Arc, + graph_container: Arc, + maybe_reporter: Option, + parsed_source_cache: Arc, + ) -> Self { + Self { + cli_options, + cjs_resolutions, + parsed_source_cache, + graph_container, + maybe_reporter, + } + } + /// Reset all runtime state to its default. This should be used on file + /// watcher restarts. + pub fn reset(&self) { + self.cjs_resolutions.clear(); + self.parsed_source_cache.clear(); + self.graph_container.clear(); + + self.init_watcher(); + } + + // Add invariant files like the import map and explicit watch flag list to + // the watcher. Dedup for build_for_file_watcher and reset_for_file_watcher. + pub fn init_watcher(&self) { + let files_to_watch_sender = match &self.maybe_reporter { + Some(reporter) => &reporter.sender, + None => return, + }; + if let Some(watch_paths) = self.cli_options.watch_paths() { + files_to_watch_sender.send(watch_paths.clone()).unwrap(); + } + if let Ok(Some(import_map_path)) = self + .cli_options + .resolve_import_map_specifier() + .map(|ms| ms.and_then(|ref s| s.to_file_path().ok())) + { + files_to_watch_sender.send(vec![import_map_path]).unwrap(); + } + } +} + +#[derive(Clone, Debug)] +pub struct FileWatcherReporter { + sender: tokio::sync::mpsc::UnboundedSender>, + file_paths: Arc>>, +} + +impl FileWatcherReporter { + pub fn new(sender: tokio::sync::mpsc::UnboundedSender>) -> Self { + Self { + sender, + file_paths: Default::default(), + } + } +} + +impl deno_graph::source::Reporter for FileWatcherReporter { + fn on_load( + &self, + specifier: &ModuleSpecifier, + modules_done: usize, + modules_total: usize, + ) { + let mut file_paths = self.file_paths.lock(); + if specifier.scheme() == "file" { + file_paths.push(specifier.to_file_path().unwrap()); + } + + if modules_done == modules_total { + self.sender.send(file_paths.drain(..).collect()).unwrap(); + } + } +} diff --git a/cli/worker.rs b/cli/worker.rs index 1beaa27baf..64ce284776 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -66,6 +66,7 @@ pub trait HasNodeSpecifierChecker: Send + Sync { fn has_node_specifier(&self) -> bool; } +#[derive(Clone)] pub struct CliMainWorkerOptions { pub argv: Vec, pub debug: bool, From 13c16d9cfd2d8ce0dd748cf67ef10cc0b9e7e78f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Mon, 1 May 2023 20:41:56 +0200 Subject: [PATCH 098/320] bench: fix json ops benchmark (#18941) --- core/examples/http_bench_json_ops/http_bench_json_ops.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/examples/http_bench_json_ops/http_bench_json_ops.js b/core/examples/http_bench_json_ops/http_bench_json_ops.js index beb6c90e45..493574fe7e 100644 --- a/core/examples/http_bench_json_ops/http_bench_json_ops.js +++ b/core/examples/http_bench_json_ops/http_bench_json_ops.js @@ -5,7 +5,7 @@ // deno-lint-ignore-file camelcase -const { op_listen } = Deno.core.ops; +const { op_listen } = Deno[Deno.internal].core.ops; const { op_accept, op_read_socket, From ecc70eb58fd5531f3b93402cf781e93ef2bb4d64 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Mon, 1 May 2023 21:57:58 +0200 Subject: [PATCH 099/320] bench: fix benchmarks again (#18942) --- core/examples/http_bench_json_ops/http_bench_json_ops.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/examples/http_bench_json_ops/http_bench_json_ops.js b/core/examples/http_bench_json_ops/http_bench_json_ops.js index 493574fe7e..6cf2a8be24 100644 --- a/core/examples/http_bench_json_ops/http_bench_json_ops.js +++ b/core/examples/http_bench_json_ops/http_bench_json_ops.js @@ -5,11 +5,11 @@ // deno-lint-ignore-file camelcase -const { op_listen } = Deno[Deno.internal].core.ops; +const { op_listen } = Deno.core.ops; const { op_accept, op_read_socket, -} = core.generateAsyncOpHandler( +} = Deno.core.generateAsyncOpHandler( "op_accept", "op_read_socket", ); From 913176313b6869eeb29b8d48e0c8d80227fa6544 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Mon, 1 May 2023 16:42:05 -0400 Subject: [PATCH 100/320] perf: lazily create RootCertStore (#18938) --- cli/args/mod.rs | 73 ++++++++++++++++++++++++---- cli/factory.rs | 34 ++++++------- cli/file_fetcher.rs | 39 +++++++++------ cli/http_util.rs | 72 ++++++++++++++++++++------- cli/lsp/language_server.rs | 35 ++++++++----- cli/lsp/registries.rs | 32 +++++------- cli/npm/cache.rs | 5 +- cli/npm/registry.rs | 4 +- cli/standalone/mod.rs | 35 +++++++++---- cli/tools/installer.rs | 2 +- cli/tools/run.rs | 2 +- cli/tools/standalone.rs | 2 +- cli/tools/upgrade.rs | 12 +++-- cli/worker.rs | 14 +++--- ext/fetch/lib.rs | 73 ++++++++++++++++++---------- ext/net/lib.rs | 17 +++++-- ext/net/ops_tls.rs | 9 +--- ext/tls/lib.rs | 8 +++ ext/websocket/lib.rs | 22 +++++++-- runtime/examples/hello_runtime.rs | 2 +- runtime/ops/web_worker/sync_fetch.rs | 3 +- runtime/web_worker.rs | 10 ++-- runtime/worker.rs | 12 ++--- 23 files changed, 344 insertions(+), 173 deletions(-) diff --git a/cli/args/mod.rs b/cli/args/mod.rs index 00476dce1c..b5975536a1 100644 --- a/cli/args/mod.rs +++ b/cli/args/mod.rs @@ -13,6 +13,7 @@ use self::package_json::PackageJsonDeps; use ::import_map::ImportMap; use deno_core::resolve_url_or_path; use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot; +use deno_runtime::deno_tls::RootCertStoreProvider; use deno_semver::npm::NpmPackageReqReference; use indexmap::IndexMap; @@ -52,6 +53,7 @@ use deno_runtime::deno_tls::webpki_roots; use deno_runtime::inspector_server::InspectorServer; use deno_runtime::permissions::PermissionsOptions; use once_cell::sync::Lazy; +use once_cell::sync::OnceCell; use std::collections::HashMap; use std::env; use std::io::BufReader; @@ -61,6 +63,7 @@ use std::num::NonZeroUsize; use std::path::Path; use std::path::PathBuf; use std::sync::Arc; +use thiserror::Error; use crate::cache::DenoDir; use crate::file_fetcher::FileFetcher; @@ -401,13 +404,62 @@ fn discover_package_json( Ok(None) } +struct CliRootCertStoreProvider { + cell: OnceCell, + maybe_root_path: Option, + maybe_ca_stores: Option>, + maybe_ca_data: Option, +} + +impl CliRootCertStoreProvider { + pub fn new( + maybe_root_path: Option, + maybe_ca_stores: Option>, + maybe_ca_data: Option, + ) -> Self { + Self { + cell: Default::default(), + maybe_root_path, + maybe_ca_stores, + maybe_ca_data, + } + } +} + +impl RootCertStoreProvider for CliRootCertStoreProvider { + fn get_or_try_init(&self) -> Result<&RootCertStore, AnyError> { + self + .cell + .get_or_try_init(|| { + get_root_cert_store( + self.maybe_root_path.clone(), + self.maybe_ca_stores.clone(), + self.maybe_ca_data.clone(), + ) + }) + .map_err(|e| e.into()) + } +} + +#[derive(Error, Debug, Clone)] +pub enum RootCertStoreLoadError { + #[error( + "Unknown certificate store \"{0}\" specified (allowed: \"system,mozilla\")" + )] + UnknownStore(String), + #[error("Unable to add pem file to certificate store: {0}")] + FailedAddPemFile(String), + #[error("Failed opening CA file: {0}")] + CaFileOpenError(String), +} + /// Create and populate a root cert store based on the passed options and /// environment. pub fn get_root_cert_store( maybe_root_path: Option, maybe_ca_stores: Option>, maybe_ca_data: Option, -) -> Result { +) -> Result { let mut root_cert_store = RootCertStore::empty(); let ca_stores: Vec = maybe_ca_stores .or_else(|| { @@ -444,7 +496,7 @@ pub fn get_root_cert_store( } } _ => { - return Err(anyhow!("Unknown certificate store \"{}\" specified (allowed: \"system,mozilla\")", store)); + return Err(RootCertStoreLoadError::UnknownStore(store.clone())); } } } @@ -459,7 +511,9 @@ pub fn get_root_cert_store( } else { PathBuf::from(ca_file) }; - let certfile = std::fs::File::open(ca_file)?; + let certfile = std::fs::File::open(ca_file).map_err(|err| { + RootCertStoreLoadError::CaFileOpenError(err.to_string()) + })?; let mut reader = BufReader::new(certfile); rustls_pemfile::certs(&mut reader) } @@ -474,10 +528,7 @@ pub fn get_root_cert_store( root_cert_store.add_parsable_certificates(&certs); } Err(e) => { - return Err(anyhow!( - "Unable to add pem file to certificate store: {}", - e - )); + return Err(RootCertStoreLoadError::FailedAddPemFile(e.to_string())); } } } @@ -799,12 +850,14 @@ impl CliOptions { .map(|path| ModuleSpecifier::from_directory_path(path).unwrap()) } - pub fn resolve_root_cert_store(&self) -> Result { - get_root_cert_store( + pub fn resolve_root_cert_store_provider( + &self, + ) -> Arc { + Arc::new(CliRootCertStoreProvider::new( None, self.flags.ca_stores.clone(), self.flags.ca_data.clone(), - ) + )) } pub fn resolve_ts_config_for_emit( diff --git a/cli/factory.rs b/cli/factory.rs index 69560cf544..73d0cb8ea9 100644 --- a/cli/factory.rs +++ b/cli/factory.rs @@ -45,7 +45,7 @@ use deno_core::parking_lot::Mutex; use deno_runtime::deno_node; use deno_runtime::deno_node::analyze::NodeCodeTranslator; use deno_runtime::deno_node::NodeResolver; -use deno_runtime::deno_tls::rustls::RootCertStore; +use deno_runtime::deno_tls::RootCertStoreProvider; use deno_runtime::deno_web::BlobStore; use deno_runtime::inspector_server::InspectorServer; use deno_semver::npm::NpmPackageReqReference; @@ -129,14 +129,14 @@ struct CliFactoryServices { dir: Deferred, caches: Deferred>, file_fetcher: Deferred>, - http_client: Deferred, + http_client: Deferred>, emit_cache: Deferred, emitter: Deferred>, graph_container: Deferred>, lockfile: Deferred>>>, maybe_import_map: Deferred>>, maybe_inspector_server: Deferred>>, - root_cert_store: Deferred, + root_cert_store_provider: Deferred>, blob_store: Deferred, parsed_source_cache: Deferred>, resolver: Deferred>, @@ -208,11 +208,11 @@ impl CliFactory { self.services.blob_store.get_or_init(BlobStore::default) } - pub fn root_cert_store(&self) -> Result<&RootCertStore, AnyError> { + pub fn root_cert_store_provider(&self) -> &Arc { self .services - .root_cert_store - .get_or_try_init(|| self.options.resolve_root_cert_store()) + .root_cert_store_provider + .get_or_init(|| self.options.resolve_root_cert_store_provider()) } pub fn text_only_progress_bar(&self) -> &ProgressBar { @@ -222,12 +222,12 @@ impl CliFactory { .get_or_init(|| ProgressBar::new(ProgressBarStyle::TextOnly)) } - pub fn http_client(&self) -> Result<&HttpClient, AnyError> { - self.services.http_client.get_or_try_init(|| { - HttpClient::new( - Some(self.root_cert_store()?.clone()), + pub fn http_client(&self) -> &Arc { + self.services.http_client.get_or_init(|| { + Arc::new(HttpClient::new( + Some(self.root_cert_store_provider().clone()), self.options.unsafely_ignore_certificate_errors().clone(), - ) + )) }) } @@ -237,7 +237,7 @@ impl CliFactory { HttpCache::new(&self.deno_dir()?.deps_folder_path()), self.options.cache_setting(), !self.options.no_remote(), - self.http_client()?.clone(), + self.http_client().clone(), self.blob_store().clone(), Some(self.text_only_progress_bar().clone()), ))) @@ -256,7 +256,7 @@ impl CliFactory { Ok(Arc::new(NpmCache::new( self.deno_dir()?.npm_folder_path(), self.options.cache_setting(), - self.http_client()?.clone(), + self.http_client().clone(), self.text_only_progress_bar().clone(), ))) }) @@ -267,7 +267,7 @@ impl CliFactory { Ok(Arc::new(CliNpmRegistryApi::new( CliNpmRegistryApi::default_url().to_owned(), self.npm_cache()?.clone(), - self.http_client()?.clone(), + self.http_client().clone(), self.text_only_progress_bar().clone(), ))) }) @@ -554,7 +554,7 @@ impl CliFactory { let options = self.cli_options().clone(); let main_worker_options = self.create_cli_main_worker_options()?; let node_fs = self.node_fs().clone(); - let root_cert_store = self.root_cert_store()?.clone(); + let root_cert_store_provider = self.root_cert_store_provider().clone(); let node_resolver = self.node_resolver().await?.clone(); let npm_resolver = self.npm_resolver().await?.clone(); let maybe_inspector_server = self.maybe_inspector_server().clone(); @@ -578,7 +578,7 @@ impl CliFactory { node_resolver.clone(), ), )), - root_cert_store.clone(), + root_cert_store_provider.clone(), node_fs.clone(), maybe_inspector_server.clone(), main_worker_options.clone(), @@ -609,7 +609,7 @@ impl CliFactory { node_resolver.clone(), ), )), - self.root_cert_store()?.clone(), + self.root_cert_store_provider().clone(), self.node_fs().clone(), self.maybe_inspector_server().clone(), self.create_cli_main_worker_options()?, diff --git a/cli/file_fetcher.rs b/cli/file_fetcher.rs index 38b96c72de..fd8c0f7939 100644 --- a/cli/file_fetcher.rs +++ b/cli/file_fetcher.rs @@ -178,7 +178,7 @@ pub struct FileFetcher { cache: FileCache, cache_setting: CacheSetting, pub http_cache: HttpCache, - http_client: HttpClient, + http_client: Arc, blob_store: BlobStore, download_log_level: log::Level, progress_bar: Option, @@ -189,7 +189,7 @@ impl FileFetcher { http_cache: HttpCache, cache_setting: CacheSetting, allow_remote: bool, - http_client: HttpClient, + http_client: Arc, blob_store: BlobStore, progress_bar: Option, ) -> Self { @@ -660,7 +660,7 @@ async fn fetch_once<'a>( http_client: &HttpClient, args: FetchOnceArgs<'a>, ) -> Result { - let mut request = http_client.get_no_redirect(args.url.clone()); + let mut request = http_client.get_no_redirect(args.url.clone())?; if let Some(etag) = args.maybe_etag { let if_none_match_val = HeaderValue::from_str(&etag)?; @@ -769,7 +769,7 @@ mod tests { HttpCache::new(&location), cache_setting, true, - HttpClient::new(None, None).unwrap(), + Arc::new(HttpClient::new(None, None)), blob_store.clone(), None, ); @@ -1207,7 +1207,7 @@ mod tests { HttpCache::new(&location), CacheSetting::ReloadAll, true, - HttpClient::new(None, None).unwrap(), + Arc::new(HttpClient::new(None, None)), BlobStore::default(), None, ); @@ -1232,7 +1232,7 @@ mod tests { HttpCache::new(&location), CacheSetting::Use, true, - HttpClient::new(None, None).unwrap(), + Arc::new(HttpClient::new(None, None)), BlobStore::default(), None, ); @@ -1257,7 +1257,7 @@ mod tests { HttpCache::new(&location), CacheSetting::Use, true, - HttpClient::new(None, None).unwrap(), + Arc::new(HttpClient::new(None, None)), BlobStore::default(), None, ); @@ -1398,7 +1398,7 @@ mod tests { HttpCache::new(&location), CacheSetting::Use, true, - HttpClient::new(None, None).unwrap(), + Arc::new(HttpClient::new(None, None)), BlobStore::default(), None, ); @@ -1426,7 +1426,7 @@ mod tests { HttpCache::new(&location), CacheSetting::Use, true, - HttpClient::new(None, None).unwrap(), + Arc::new(HttpClient::new(None, None)), BlobStore::default(), None, ); @@ -1525,7 +1525,7 @@ mod tests { HttpCache::new(&location), CacheSetting::Use, false, - HttpClient::new(None, None).unwrap(), + Arc::new(HttpClient::new(None, None)), BlobStore::default(), None, ); @@ -1550,7 +1550,7 @@ mod tests { HttpCache::new(&location), CacheSetting::Only, true, - HttpClient::new(None, None).unwrap(), + Arc::new(HttpClient::new(None, None)), BlobStore::default(), None, ); @@ -1558,7 +1558,7 @@ mod tests { HttpCache::new(&location), CacheSetting::Use, true, - HttpClient::new(None, None).unwrap(), + Arc::new(HttpClient::new(None, None)), BlobStore::default(), None, ); @@ -2021,15 +2021,24 @@ mod tests { #[ignore] // https://github.com/denoland/deno/issues/12561 async fn test_fetch_with_empty_certificate_store() { use deno_runtime::deno_tls::rustls::RootCertStore; + use deno_runtime::deno_tls::RootCertStoreProvider; + + struct ValueRootCertStoreProvider(RootCertStore); + + impl RootCertStoreProvider for ValueRootCertStoreProvider { + fn get_or_try_init(&self) -> Result<&RootCertStore, AnyError> { + Ok(&self.0) + } + } let _http_server_guard = test_util::http_server(); // Relies on external http server with a valid mozilla root CA cert. let url = Url::parse("https://deno.land").unwrap(); let client = HttpClient::new( - Some(RootCertStore::empty()), // no certs loaded at all + // no certs loaded at all + Some(Arc::new(ValueRootCertStoreProvider(RootCertStore::empty()))), None, - ) - .unwrap(); + ); let result = fetch_once( &client, diff --git a/cli/http_util.rs b/cli/http_util.rs index b01732ca97..7c17e8e1e5 100644 --- a/cli/http_util.rs +++ b/cli/http_util.rs @@ -15,8 +15,9 @@ use deno_runtime::deno_fetch::create_http_client; use deno_runtime::deno_fetch::reqwest; use deno_runtime::deno_fetch::reqwest::header::LOCATION; use deno_runtime::deno_fetch::reqwest::Response; -use deno_runtime::deno_tls::rustls::RootCertStore; +use deno_runtime::deno_tls::RootCertStoreProvider; use std::collections::HashMap; +use std::sync::Arc; use std::time::Duration; use std::time::SystemTime; @@ -217,34 +218,68 @@ impl CacheSemantics { } } -#[derive(Debug, Clone)] -pub struct HttpClient(reqwest::Client); +pub struct HttpClient { + root_cert_store_provider: Option>, + unsafely_ignore_certificate_errors: Option>, + cell: once_cell::sync::OnceCell, +} + +impl std::fmt::Debug for HttpClient { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("HttpClient") + .field( + "unsafely_ignore_certificate_errors", + &self.unsafely_ignore_certificate_errors, + ) + .finish() + } +} impl HttpClient { pub fn new( - root_cert_store: Option, + root_cert_store_provider: Option>, unsafely_ignore_certificate_errors: Option>, - ) -> Result { - Ok(HttpClient::from_client(create_http_client( - get_user_agent(), - root_cert_store, - vec![], - None, + ) -> Self { + Self { + root_cert_store_provider, unsafely_ignore_certificate_errors, - None, - )?)) + cell: Default::default(), + } } + #[cfg(test)] pub fn from_client(client: reqwest::Client) -> Self { - Self(client) + let result = Self { + root_cert_store_provider: Default::default(), + unsafely_ignore_certificate_errors: Default::default(), + cell: Default::default(), + }; + result.cell.set(client).unwrap(); + result + } + + fn client(&self) -> Result<&reqwest::Client, AnyError> { + self.cell.get_or_try_init(|| { + create_http_client( + get_user_agent(), + match &self.root_cert_store_provider { + Some(provider) => Some(provider.get_or_try_init()?.clone()), + None => None, + }, + vec![], + None, + self.unsafely_ignore_certificate_errors.clone(), + None, + ) + }) } /// Do a GET request without following redirects. pub fn get_no_redirect( &self, url: U, - ) -> reqwest::RequestBuilder { - self.0.get(url) + ) -> Result { + Ok(self.client()?.get(url)) } pub async fn download_text( @@ -306,12 +341,13 @@ impl HttpClient { url: U, ) -> Result { let mut url = url.into_url()?; - let mut response = self.get_no_redirect(url.clone()).send().await?; + let mut response = self.get_no_redirect(url.clone())?.send().await?; let status = response.status(); if status.is_redirection() { for _ in 0..5 { let new_url = resolve_redirect_from_response(&url, &response)?; - let new_response = self.get_no_redirect(new_url.clone()).send().await?; + let new_response = + self.get_no_redirect(new_url.clone())?.send().await?; let status = new_response.status(); if status.is_redirection() { response = new_response; @@ -357,7 +393,7 @@ mod test { #[tokio::test] async fn test_http_client_download_redirect() { let _http_server_guard = test_util::http_server(); - let client = HttpClient::new(None, None).unwrap(); + let client = HttpClient::new(None, None); // make a request to the redirect server let text = client diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index d49a2559ca..83657a8ef4 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -12,6 +12,8 @@ use deno_core::ModuleSpecifier; use deno_runtime::deno_node; use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_node::PackageJson; +use deno_runtime::deno_tls::rustls::RootCertStore; +use deno_runtime::deno_tls::RootCertStoreProvider; use deno_runtime::deno_web::BlobStore; use import_map::ImportMap; use log::error; @@ -93,6 +95,14 @@ use crate::util::path::specifier_to_file_path; use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; +struct LspRootCertStoreProvider(RootCertStore); + +impl RootCertStoreProvider for LspRootCertStoreProvider { + fn get_or_try_init(&self) -> Result<&RootCertStore, AnyError> { + Ok(&self.0) + } +} + #[derive(Debug, Clone)] pub struct LanguageServer(Arc>); @@ -124,7 +134,7 @@ pub struct Inner { /// The collection of documents that the server is currently handling, either /// on disk or "open" within the client. pub documents: Documents, - http_client: HttpClient, + http_client: Arc, /// Handles module registries, which allow discovery of modules module_registries: ModuleRegistry, /// The path to the module registries cache @@ -420,7 +430,7 @@ impl LanguageServer { fn create_lsp_structs( dir: &DenoDir, - http_client: HttpClient, + http_client: Arc, ) -> ( Arc, Arc, @@ -469,10 +479,9 @@ impl Inner { let dir = DenoDir::new(maybe_custom_root).expect("could not access DENO_DIR"); let module_registries_location = dir.registries_folder_path(); - let http_client = HttpClient::new(None, None).unwrap(); + let http_client = Arc::new(HttpClient::new(None, None)); let module_registries = - ModuleRegistry::new(&module_registries_location, http_client.clone()) - .unwrap(); + ModuleRegistry::new(&module_registries_location, http_client.clone()); let location = dir.deps_folder_path(); let documents = Documents::new(&location, client.kind()); let deps_http_cache = HttpCache::new(&location); @@ -775,20 +784,22 @@ impl Inner { .root_uri .as_ref() .and_then(|uri| specifier_to_file_path(uri).ok()); - let root_cert_store = Some(get_root_cert_store( + let root_cert_store = get_root_cert_store( maybe_root_path, workspace_settings.certificate_stores, workspace_settings.tls_certificate.map(CaData::File), - )?); - let module_registries_location = dir.registries_folder_path(); - self.http_client = HttpClient::new( - root_cert_store, - workspace_settings.unsafely_ignore_certificate_errors, )?; + let root_cert_store_provider = + Arc::new(LspRootCertStoreProvider(root_cert_store)); + let module_registries_location = dir.registries_folder_path(); + self.http_client = Arc::new(HttpClient::new( + Some(root_cert_store_provider), + workspace_settings.unsafely_ignore_certificate_errors, + )); self.module_registries = ModuleRegistry::new( &module_registries_location, self.http_client.clone(), - )?; + ); self.module_registries_location = module_registries_location; ( self.npm_api, diff --git a/cli/lsp/registries.rs b/cli/lsp/registries.rs index f46aba44f2..b2f9bee2c3 100644 --- a/cli/lsp/registries.rs +++ b/cli/lsp/registries.rs @@ -35,6 +35,7 @@ use log::error; use once_cell::sync::Lazy; use std::collections::HashMap; use std::path::Path; +use std::sync::Arc; use tower_lsp::lsp_types as lsp; const CONFIG_PATH: &str = "/.well-known/deno-import-intellisense.json"; @@ -425,16 +426,13 @@ impl Default for ModuleRegistry { // custom root. let dir = DenoDir::new(None).unwrap(); let location = dir.registries_folder_path(); - let http_client = HttpClient::new(None, None).unwrap(); - Self::new(&location, http_client).unwrap() + let http_client = Arc::new(HttpClient::new(None, None)); + Self::new(&location, http_client) } } impl ModuleRegistry { - pub fn new( - location: &Path, - http_client: HttpClient, - ) -> Result { + pub fn new(location: &Path, http_client: Arc) -> Self { let http_cache = HttpCache::new(location); let mut file_fetcher = FileFetcher::new( http_cache, @@ -446,10 +444,10 @@ impl ModuleRegistry { ); file_fetcher.set_download_log_level(super::logging::lsp_log_level()); - Ok(Self { + Self { origins: HashMap::new(), file_fetcher, - }) + } } fn complete_literal( @@ -1251,8 +1249,7 @@ mod tests { let temp_dir = TempDir::new(); let location = temp_dir.path().join("registries"); let mut module_registry = - ModuleRegistry::new(&location, HttpClient::new(None, None).unwrap()) - .unwrap(); + ModuleRegistry::new(&location, Arc::new(HttpClient::new(None, None))); module_registry .enable("http://localhost:4545/") .await @@ -1313,8 +1310,7 @@ mod tests { let temp_dir = TempDir::new(); let location = temp_dir.path().join("registries"); let mut module_registry = - ModuleRegistry::new(&location, HttpClient::new(None, None).unwrap()) - .unwrap(); + ModuleRegistry::new(&location, Arc::new(HttpClient::new(None, None))); module_registry .enable("http://localhost:4545/") .await @@ -1537,8 +1533,7 @@ mod tests { let temp_dir = TempDir::new(); let location = temp_dir.path().join("registries"); let mut module_registry = - ModuleRegistry::new(&location, HttpClient::new(None, None).unwrap()) - .unwrap(); + ModuleRegistry::new(&location, Arc::new(HttpClient::new(None, None))); module_registry .enable_custom("http://localhost:4545/lsp/registries/deno-import-intellisense-key-first.json") .await @@ -1608,8 +1603,7 @@ mod tests { let temp_dir = TempDir::new(); let location = temp_dir.path().join("registries"); let mut module_registry = - ModuleRegistry::new(&location, HttpClient::new(None, None).unwrap()) - .unwrap(); + ModuleRegistry::new(&location, Arc::new(HttpClient::new(None, None))); module_registry .enable_custom("http://localhost:4545/lsp/registries/deno-import-intellisense-complex.json") .await @@ -1660,8 +1654,7 @@ mod tests { let temp_dir = TempDir::new(); let location = temp_dir.path().join("registries"); let module_registry = - ModuleRegistry::new(&location, HttpClient::new(None, None).unwrap()) - .unwrap(); + ModuleRegistry::new(&location, Arc::new(HttpClient::new(None, None))); let result = module_registry.check_origin("http://localhost:4545").await; assert!(result.is_ok()); } @@ -1672,8 +1665,7 @@ mod tests { let temp_dir = TempDir::new(); let location = temp_dir.path().join("registries"); let module_registry = - ModuleRegistry::new(&location, HttpClient::new(None, None).unwrap()) - .unwrap(); + ModuleRegistry::new(&location, Arc::new(HttpClient::new(None, None))); let result = module_registry.check_origin("https://example.com").await; assert!(result.is_err()); let err = result.unwrap_err().to_string(); diff --git a/cli/npm/cache.rs b/cli/npm/cache.rs index 0d88109de3..cda40fd172 100644 --- a/cli/npm/cache.rs +++ b/cli/npm/cache.rs @@ -4,6 +4,7 @@ use std::collections::HashSet; use std::fs; use std::path::Path; use std::path::PathBuf; +use std::sync::Arc; use deno_ast::ModuleSpecifier; use deno_core::anyhow::bail; @@ -295,7 +296,7 @@ impl ReadonlyNpmCache { pub struct NpmCache { readonly: ReadonlyNpmCache, cache_setting: CacheSetting, - http_client: HttpClient, + http_client: Arc, progress_bar: ProgressBar, /// ensures a package is only downloaded once per run previously_reloaded_packages: Mutex>, @@ -305,7 +306,7 @@ impl NpmCache { pub fn new( cache_dir_path: PathBuf, cache_setting: CacheSetting, - http_client: HttpClient, + http_client: Arc, progress_bar: ProgressBar, ) -> Self { Self { diff --git a/cli/npm/registry.rs b/cli/npm/registry.rs index ef050a7346..40d7f62191 100644 --- a/cli/npm/registry.rs +++ b/cli/npm/registry.rs @@ -63,7 +63,7 @@ impl CliNpmRegistryApi { pub fn new( base_url: Url, cache: Arc, - http_client: HttpClient, + http_client: Arc, progress_bar: ProgressBar, ) -> Self { Self(Some(Arc::new(CliNpmRegistryApiInner { @@ -172,7 +172,7 @@ struct CliNpmRegistryApiInner { force_reload_flag: AtomicFlag, mem_cache: Mutex>, previously_reloaded_packages: Mutex>, - http_client: HttpClient, + http_client: Arc, progress_bar: ProgressBar, } diff --git a/cli/standalone/mod.rs b/cli/standalone/mod.rs index 556346535b..2ef21d417e 100644 --- a/cli/standalone/mod.rs +++ b/cli/standalone/mod.rs @@ -32,6 +32,8 @@ use deno_core::ResolutionKind; use deno_graph::source::Resolver; use deno_runtime::deno_node; use deno_runtime::deno_node::NodeResolver; +use deno_runtime::deno_tls::rustls::RootCertStore; +use deno_runtime::deno_tls::RootCertStoreProvider; use deno_runtime::deno_web::BlobStore; use deno_runtime::permissions::Permissions; use deno_runtime::permissions::PermissionsContainer; @@ -161,22 +163,37 @@ impl HasNodeSpecifierChecker for StandaloneHasNodeSpecifierChecker { } } +struct StandaloneRootCertStoreProvider { + ca_stores: Option>, + ca_data: Option, + cell: once_cell::sync::OnceCell, +} + +impl RootCertStoreProvider for StandaloneRootCertStoreProvider { + fn get_or_try_init(&self) -> Result<&RootCertStore, AnyError> { + self.cell.get_or_try_init(|| { + get_root_cert_store(None, self.ca_stores.clone(), self.ca_data.clone()) + .map_err(|err| err.into()) + }) + } +} + pub async fn run( eszip: eszip::EszipV2, metadata: Metadata, ) -> Result<(), AnyError> { let main_module = &metadata.entrypoint; let dir = DenoDir::new(None)?; - let root_cert_store = get_root_cert_store( - None, - metadata.ca_stores, - metadata.ca_data.map(CaData::Bytes), - )?; + let root_cert_store_provider = Arc::new(StandaloneRootCertStoreProvider { + ca_stores: metadata.ca_stores, + ca_data: metadata.ca_data.map(CaData::Bytes), + cell: Default::default(), + }); let progress_bar = ProgressBar::new(ProgressBarStyle::TextOnly); - let http_client = HttpClient::new( - Some(root_cert_store.clone()), + let http_client = Arc::new(HttpClient::new( + Some(root_cert_store_provider.clone()), metadata.unsafely_ignore_certificate_errors.clone(), - )?; + )); let npm_registry_url = CliNpmRegistryApi::default_url().to_owned(); let npm_cache = Arc::new(NpmCache::new( dir.npm_folder_path(), @@ -235,7 +252,7 @@ pub async fn run( Box::new(StandaloneHasNodeSpecifierChecker), BlobStore::default(), Box::new(module_loader_factory), - root_cert_store, + root_cert_store_provider, node_fs, None, CliMainWorkerOptions { diff --git a/cli/tools/installer.rs b/cli/tools/installer.rs index fb83c3cab9..07606d5f8d 100644 --- a/cli/tools/installer.rs +++ b/cli/tools/installer.rs @@ -133,7 +133,7 @@ pub async fn infer_name_from_url(url: &Url) -> Option { let mut url = url.clone(); if url.path() == "/" { - let client = HttpClient::new(None, None).unwrap(); + let client = HttpClient::new(None, None); if let Ok(res) = client.get_redirected_response(url.clone()).await { url = res.url().clone(); } diff --git a/cli/tools/run.rs b/cli/tools/run.rs index c6e706285b..99312d5b90 100644 --- a/cli/tools/run.rs +++ b/cli/tools/run.rs @@ -35,7 +35,7 @@ To grant permissions, set them before the script argument. For example: // map specified and bare specifier is used on the command line let factory = CliFactory::from_flags(flags).await?; let deno_dir = factory.deno_dir()?; - let http_client = factory.http_client()?; + let http_client = factory.http_client(); let cli_options = factory.cli_options(); // Run a background task that checks for available upgrades. If an earlier diff --git a/cli/tools/standalone.rs b/cli/tools/standalone.rs index 0e8d9ca733..d34e5da833 100644 --- a/cli/tools/standalone.rs +++ b/cli/tools/standalone.rs @@ -26,7 +26,7 @@ pub async fn compile( let factory = CliFactory::from_flags(flags).await?; let cli_options = factory.cli_options(); let file_fetcher = factory.file_fetcher()?; - let http_client = factory.http_client()?; + let http_client = factory.http_client(); let deno_dir = factory.deno_dir()?; let module_graph_builder = factory.module_graph_builder().await?; let parsed_source_cache = factory.parsed_source_cache()?; diff --git a/cli/tools/upgrade.rs b/cli/tools/upgrade.rs index c76d36777f..b5aefe4798 100644 --- a/cli/tools/upgrade.rs +++ b/cli/tools/upgrade.rs @@ -26,6 +26,7 @@ use std::ops::Sub; use std::path::Path; use std::path::PathBuf; use std::process::Command; +use std::sync::Arc; use std::time::Duration; static ARCHIVE_NAME: Lazy = @@ -50,13 +51,13 @@ trait UpdateCheckerEnvironment: Clone + Send + Sync { #[derive(Clone)] struct RealUpdateCheckerEnvironment { - http_client: HttpClient, + http_client: Arc, cache_file_path: PathBuf, current_time: chrono::DateTime, } impl RealUpdateCheckerEnvironment { - pub fn new(http_client: HttpClient, cache_file_path: PathBuf) -> Self { + pub fn new(http_client: Arc, cache_file_path: PathBuf) -> Self { Self { http_client, cache_file_path, @@ -183,7 +184,10 @@ fn print_release_notes(current_version: &str, new_version: &str) { } } -pub fn check_for_upgrades(http_client: HttpClient, cache_file_path: PathBuf) { +pub fn check_for_upgrades( + http_client: Arc, + cache_file_path: PathBuf, +) { if env::var("DENO_NO_UPDATE_CHECK").is_ok() { return; } @@ -264,7 +268,7 @@ pub async fn upgrade( upgrade_flags: UpgradeFlags, ) -> Result<(), AnyError> { let factory = CliFactory::from_flags(flags).await?; - let client = factory.http_client()?; + let client = factory.http_client(); let current_exe_path = std::env::current_exe()?; let metadata = fs::metadata(¤t_exe_path)?; let permissions = metadata.permissions(); diff --git a/cli/worker.rs b/cli/worker.rs index 64ce284776..ae8822fe40 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -21,7 +21,7 @@ use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel; use deno_runtime::deno_node; use deno_runtime::deno_node::NodeResolution; use deno_runtime::deno_node::NodeResolver; -use deno_runtime::deno_tls::rustls::RootCertStore; +use deno_runtime::deno_tls::RootCertStoreProvider; use deno_runtime::deno_web::BlobStore; use deno_runtime::fmt_errors::format_js_error; use deno_runtime::inspector_server::InspectorServer; @@ -96,7 +96,7 @@ struct SharedWorkerState { shared_array_buffer_store: SharedArrayBufferStore, compiled_wasm_module_store: CompiledWasmModuleStore, module_loader_factory: Box, - root_cert_store: RootCertStore, + root_cert_store_provider: Arc, node_fs: Arc, maybe_inspector_server: Option>, } @@ -307,7 +307,7 @@ impl CliMainWorkerFactory { has_node_specifier_checker: Box, blob_store: BlobStore, module_loader_factory: Box, - root_cert_store: RootCertStore, + root_cert_store_provider: Arc, node_fs: Arc, maybe_inspector_server: Option>, options: CliMainWorkerOptions, @@ -324,7 +324,7 @@ impl CliMainWorkerFactory { shared_array_buffer_store: Default::default(), compiled_wasm_module_store: Default::default(), module_loader_factory, - root_cert_store, + root_cert_store_provider, node_fs, maybe_inspector_server, }), @@ -434,7 +434,7 @@ impl CliMainWorkerFactory { .options .unsafely_ignore_certificate_errors .clone(), - root_cert_store: Some(shared.root_cert_store.clone()), + root_cert_store_provider: Some(shared.root_cert_store_provider.clone()), seed: shared.options.seed, source_map_getter: maybe_source_map_getter, format_js_error_fn: Some(Arc::new(format_js_error)), @@ -562,7 +562,7 @@ fn create_web_worker_callback( .options .unsafely_ignore_certificate_errors .clone(), - root_cert_store: Some(shared.root_cert_store.clone()), + root_cert_store_provider: Some(shared.root_cert_store_provider.clone()), seed: shared.options.seed, create_web_worker_cb, preload_module_cb, @@ -616,7 +616,7 @@ mod tests { extensions: vec![], startup_snapshot: Some(crate::js::deno_isolate_init()), unsafely_ignore_certificate_errors: None, - root_cert_store: None, + root_cert_store_provider: None, seed: None, format_js_error_fn: None, source_map_getter: None, diff --git a/ext/fetch/lib.rs b/ext/fetch/lib.rs index 17f30d8ed3..51688a6fcf 100644 --- a/ext/fetch/lib.rs +++ b/ext/fetch/lib.rs @@ -3,7 +3,16 @@ mod byte_stream; mod fs_fetch_handler; -use data_url::DataUrl; +use std::borrow::Cow; +use std::cell::RefCell; +use std::cmp::min; +use std::convert::From; +use std::path::Path; +use std::path::PathBuf; +use std::pin::Pin; +use std::rc::Rc; +use std::sync::Arc; + use deno_core::error::type_error; use deno_core::error::AnyError; use deno_core::futures::stream::Peekable; @@ -29,6 +38,9 @@ use deno_core::ResourceId; use deno_core::ZeroCopyBuf; use deno_tls::rustls::RootCertStore; use deno_tls::Proxy; +use deno_tls::RootCertStoreProvider; + +use data_url::DataUrl; use http::header::CONTENT_LENGTH; use http::Uri; use reqwest::header::HeaderMap; @@ -46,14 +58,6 @@ use reqwest::RequestBuilder; use reqwest::Response; use serde::Deserialize; use serde::Serialize; -use std::borrow::Cow; -use std::cell::RefCell; -use std::cmp::min; -use std::convert::From; -use std::path::Path; -use std::path::PathBuf; -use std::pin::Pin; -use std::rc::Rc; use tokio::sync::mpsc; // Re-export reqwest and data_url @@ -67,7 +71,7 @@ use crate::byte_stream::MpscByteStream; #[derive(Clone)] pub struct Options { pub user_agent: String, - pub root_cert_store: Option, + pub root_cert_store_provider: Option>, pub proxy: Option, pub request_builder_hook: Option Result>, @@ -76,11 +80,20 @@ pub struct Options { pub file_fetch_handler: Rc, } +impl Options { + pub fn root_cert_store(&self) -> Result, AnyError> { + Ok(match &self.root_cert_store_provider { + Some(provider) => Some(provider.get_or_try_init()?.clone()), + None => None, + }) + } +} + impl Default for Options { fn default() -> Self { Self { user_agent: "".to_string(), - root_cert_store: None, + root_cert_store_provider: None, proxy: None, request_builder_hook: None, unsafely_ignore_certificate_errors: None, @@ -111,18 +124,7 @@ deno_core::extension!(deno_fetch, options: Options, }, state = |state, options| { - state.put::(options.options.clone()); - state.put::({ - create_http_client( - &options.options.user_agent, - options.options.root_cert_store, - vec![], - options.options.proxy, - options.options.unsafely_ignore_certificate_errors, - options.options.client_cert_chain_and_key - ) - .unwrap() - }); + state.put::(options.options); }, ); @@ -189,6 +191,26 @@ pub struct FetchReturn { cancel_handle_rid: Option, } +pub fn get_or_create_client_from_state( + state: &mut OpState, +) -> Result { + if let Some(client) = state.try_borrow::() { + Ok(client.clone()) + } else { + let options = state.borrow::(); + let client = create_http_client( + &options.user_agent, + options.root_cert_store()?, + vec![], + options.proxy.clone(), + options.unsafely_ignore_certificate_errors.clone(), + options.client_cert_chain_and_key.clone(), + )?; + state.put::(client.clone()); + Ok(client) + } +} + #[op] pub fn op_fetch( state: &mut OpState, @@ -207,8 +229,7 @@ where let r = state.resource_table.get::(rid)?; r.client.clone() } else { - let client = state.borrow::(); - client.clone() + get_or_create_client_from_state(state)? }; let method = Method::from_bytes(&method)?; @@ -632,7 +653,7 @@ where let client = create_http_client( &options.user_agent, - options.root_cert_store.clone(), + options.root_cert_store()?, ca_certs, args.proxy, options.unsafely_ignore_certificate_errors.clone(), diff --git a/ext/net/lib.rs b/ext/net/lib.rs index ff67186b0c..912b0723ea 100644 --- a/ext/net/lib.rs +++ b/ext/net/lib.rs @@ -11,10 +11,12 @@ pub mod resolve_addr; use deno_core::error::AnyError; use deno_core::OpState; use deno_tls::rustls::RootCertStore; +use deno_tls::RootCertStoreProvider; use std::cell::RefCell; use std::path::Path; use std::path::PathBuf; use std::rc::Rc; +use std::sync::Arc; pub trait NetPermissions { fn check_net>( @@ -67,7 +69,16 @@ pub fn get_declaration() -> PathBuf { #[derive(Clone)] pub struct DefaultTlsOptions { - pub root_cert_store: Option, + pub root_cert_store_provider: Option>, +} + +impl DefaultTlsOptions { + pub fn root_cert_store(&self) -> Result, AnyError> { + Ok(match &self.root_cert_store_provider { + Some(provider) => Some(provider.get_or_try_init()?.clone()), + None => None, + }) + } } /// `UnsafelyIgnoreCertificateErrors` is a wrapper struct so it can be placed inside `GothamState`; @@ -113,13 +124,13 @@ deno_core::extension!(deno_net, ], esm = [ "01_net.js", "02_tls.js" ], options = { - root_cert_store: Option, + root_cert_store_provider: Option>, unstable: bool, unsafely_ignore_certificate_errors: Option>, }, state = |state, options| { state.put(DefaultTlsOptions { - root_cert_store: options.root_cert_store, + root_cert_store_provider: options.root_cert_store_provider, }); state.put(UnstableChecker { unstable: options.unstable }); state.put(UnsafelyIgnoreCertificateErrors( diff --git a/ext/net/ops_tls.rs b/ext/net/ops_tls.rs index 8a77570668..b9b37b3282 100644 --- a/ext/net/ops_tls.rs +++ b/ext/net/ops_tls.rs @@ -813,14 +813,10 @@ where .try_borrow::() .and_then(|it| it.0.clone()); - // TODO(@justinmchase): Ideally the certificate store is created once - // and not cloned. The store should be wrapped in Arc to reduce - // copying memory unnecessarily. let root_cert_store = state .borrow() .borrow::() - .root_cert_store - .clone(); + .root_cert_store()?; let resource_rc = state .borrow_mut() @@ -912,8 +908,7 @@ where let root_cert_store = state .borrow() .borrow::() - .root_cert_store - .clone(); + .root_cert_store()?; let hostname_dns = ServerName::try_from(&*addr.hostname) .map_err(|_| invalid_hostname(&addr.hostname))?; let connect_addr = resolve_addr(&addr.hostname, addr.port) diff --git a/ext/tls/lib.rs b/ext/tls/lib.rs index 123d35acf0..3034e2ae98 100644 --- a/ext/tls/lib.rs +++ b/ext/tls/lib.rs @@ -34,6 +34,14 @@ use std::io::Cursor; use std::sync::Arc; use std::time::SystemTime; +/// Lazily resolves the root cert store. +/// +/// This was done because the root cert store is not needed in all cases +/// and takes a bit of time to initialize. +pub trait RootCertStoreProvider: Send + Sync { + fn get_or_try_init(&self) -> Result<&RootCertStore, AnyError>; +} + // This extension has no runtime apis, it only exports some shared native functions. deno_core::extension!(deno_tls); diff --git a/ext/websocket/lib.rs b/ext/websocket/lib.rs index 2ce141fc92..e03a13789f 100644 --- a/ext/websocket/lib.rs +++ b/ext/websocket/lib.rs @@ -19,6 +19,7 @@ use deno_core::ZeroCopyBuf; use deno_net::raw::take_network_stream_resource; use deno_net::raw::NetworkStream; use deno_tls::create_client_config; +use deno_tls::RootCertStoreProvider; use http::header::CONNECTION; use http::header::UPGRADE; use http::HeaderName; @@ -54,7 +55,17 @@ use fastwebsockets::WebSocket; mod stream; #[derive(Clone)] -pub struct WsRootStore(pub Option); +pub struct WsRootStoreProvider(Option>); + +impl WsRootStoreProvider { + pub fn get_or_try_init(&self) -> Result, AnyError> { + Ok(match &self.0 { + Some(provider) => Some(provider.get_or_try_init()?.clone()), + None => None, + }) + } +} + #[derive(Clone)] pub struct WsUserAgent(pub String); @@ -181,7 +192,10 @@ where .borrow() .try_borrow::() .and_then(|it| it.0.clone()); - let root_cert_store = state.borrow().borrow::().0.clone(); + let root_cert_store = state + .borrow() + .borrow::() + .get_or_try_init()?; let user_agent = state.borrow().borrow::().0.clone(); let uri: Uri = url.parse()?; let mut request = Request::builder().method(Method::GET).uri( @@ -525,7 +539,7 @@ deno_core::extension!(deno_websocket, esm = [ "01_websocket.js", "02_websocketstream.js" ], options = { user_agent: String, - root_cert_store: Option, + root_cert_store_provider: Option>, unsafely_ignore_certificate_errors: Option> }, state = |state, options| { @@ -533,7 +547,7 @@ deno_core::extension!(deno_websocket, state.put(UnsafelyIgnoreCertificateErrors( options.unsafely_ignore_certificate_errors, )); - state.put::(WsRootStore(options.root_cert_store)); + state.put::(WsRootStoreProvider(options.root_cert_store_provider)); }, ); diff --git a/runtime/examples/hello_runtime.rs b/runtime/examples/hello_runtime.rs index f97c045b26..2e930a03f2 100644 --- a/runtime/examples/hello_runtime.rs +++ b/runtime/examples/hello_runtime.rs @@ -32,7 +32,7 @@ async fn main() -> Result<(), AnyError> { extensions: vec![], startup_snapshot: None, unsafely_ignore_certificate_errors: None, - root_cert_store: None, + root_cert_store_provider: None, seed: None, source_map_getter: None, format_js_error_fn: None, diff --git a/runtime/ops/web_worker/sync_fetch.rs b/runtime/ops/web_worker/sync_fetch.rs index 2049d5ab85..ba5f325d63 100644 --- a/runtime/ops/web_worker/sync_fetch.rs +++ b/runtime/ops/web_worker/sync_fetch.rs @@ -8,7 +8,6 @@ use deno_core::op; use deno_core::url::Url; use deno_core::OpState; use deno_fetch::data_url::DataUrl; -use deno_fetch::reqwest; use deno_web::BlobStore; use deno_websocket::DomExceptionNetworkError; use hyper::body::Bytes; @@ -41,7 +40,7 @@ pub fn op_worker_sync_fetch( let handle = state.borrow::().clone(); assert_eq!(handle.worker_type, WebWorkerType::Classic); - let client = state.borrow::().clone(); + let client = deno_fetch::get_or_create_client_from_state(state)?; // TODO(andreubotella) It's not good to throw an exception related to blob // URLs when none of the script URLs use the blob scheme. diff --git a/runtime/web_worker.rs b/runtime/web_worker.rs index 9bc5ba011f..b688aae8b3 100644 --- a/runtime/web_worker.rs +++ b/runtime/web_worker.rs @@ -37,7 +37,7 @@ use deno_core::SourceMapGetter; use deno_fs::StdFs; use deno_io::Stdio; use deno_kv::sqlite::SqliteDbHandler; -use deno_tls::rustls::RootCertStore; +use deno_tls::RootCertStoreProvider; use deno_web::create_entangled_message_port; use deno_web::BlobStore; use deno_web::MessagePort; @@ -329,7 +329,7 @@ pub struct WebWorkerOptions { pub extensions: Vec, pub startup_snapshot: Option, pub unsafely_ignore_certificate_errors: Option>, - pub root_cert_store: Option, + pub root_cert_store_provider: Option>, pub seed: Option, pub module_loader: Rc, pub node_fs: Option>, @@ -407,7 +407,7 @@ impl WebWorker { deno_fetch::deno_fetch::init_ops::( deno_fetch::Options { user_agent: options.bootstrap.user_agent.clone(), - root_cert_store: options.root_cert_store.clone(), + root_cert_store_provider: options.root_cert_store_provider.clone(), unsafely_ignore_certificate_errors: options .unsafely_ignore_certificate_errors .clone(), @@ -418,7 +418,7 @@ impl WebWorker { deno_cache::deno_cache::init_ops::(create_cache), deno_websocket::deno_websocket::init_ops::( options.bootstrap.user_agent.clone(), - options.root_cert_store.clone(), + options.root_cert_store_provider.clone(), options.unsafely_ignore_certificate_errors.clone(), ), deno_webstorage::deno_webstorage::init_ops(None).disable(), @@ -429,7 +429,7 @@ impl WebWorker { ), deno_ffi::deno_ffi::init_ops::(unstable), deno_net::deno_net::init_ops::( - options.root_cert_store.clone(), + options.root_cert_store_provider.clone(), unstable, options.unsafely_ignore_certificate_errors.clone(), ), diff --git a/runtime/worker.rs b/runtime/worker.rs index 56684e9925..0d68a4b51e 100644 --- a/runtime/worker.rs +++ b/runtime/worker.rs @@ -33,7 +33,7 @@ use deno_core::SourceMapGetter; use deno_fs::StdFs; use deno_io::Stdio; use deno_kv::sqlite::SqliteDbHandler; -use deno_tls::rustls::RootCertStore; +use deno_tls::RootCertStoreProvider; use deno_web::BlobStore; use log::debug; @@ -84,7 +84,7 @@ pub struct WorkerOptions { /// V8 snapshot that should be loaded on startup. pub startup_snapshot: Option, pub unsafely_ignore_certificate_errors: Option>, - pub root_cert_store: Option, + pub root_cert_store_provider: Option>, pub seed: Option, /// Implementation of `ModuleLoader` which will be @@ -163,7 +163,7 @@ impl Default for WorkerOptions { cache_storage_dir: Default::default(), broadcast_channel: Default::default(), source_map_getter: Default::default(), - root_cert_store: Default::default(), + root_cert_store_provider: Default::default(), node_fs: Default::default(), npm_resolver: Default::default(), blob_store: Default::default(), @@ -228,7 +228,7 @@ impl MainWorker { deno_fetch::deno_fetch::init_ops::( deno_fetch::Options { user_agent: options.bootstrap.user_agent.clone(), - root_cert_store: options.root_cert_store.clone(), + root_cert_store_provider: options.root_cert_store_provider.clone(), unsafely_ignore_certificate_errors: options .unsafely_ignore_certificate_errors .clone(), @@ -239,7 +239,7 @@ impl MainWorker { deno_cache::deno_cache::init_ops::(create_cache), deno_websocket::deno_websocket::init_ops::( options.bootstrap.user_agent.clone(), - options.root_cert_store.clone(), + options.root_cert_store_provider.clone(), options.unsafely_ignore_certificate_errors.clone(), ), deno_webstorage::deno_webstorage::init_ops( @@ -252,7 +252,7 @@ impl MainWorker { ), deno_ffi::deno_ffi::init_ops::(unstable), deno_net::deno_net::init_ops::( - options.root_cert_store.clone(), + options.root_cert_store_provider.clone(), unstable, options.unsafely_ignore_certificate_errors.clone(), ), From 2ee55145c06e5986c96f9eb40653464e81d79413 Mon Sep 17 00:00:00 2001 From: Michael Lazarev Date: Tue, 2 May 2023 00:52:56 +0300 Subject: [PATCH 101/320] docs: correct example of piping the output of a subprocess to a file (#18933) Fixes #18909 --- cli/tsc/dts/lib.deno.ns.d.ts | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/cli/tsc/dts/lib.deno.ns.d.ts b/cli/tsc/dts/lib.deno.ns.d.ts index 74d3ffb0b4..90a12ad420 100644 --- a/cli/tsc/dts/lib.deno.ns.d.ts +++ b/cli/tsc/dts/lib.deno.ns.d.ts @@ -4003,11 +4003,14 @@ declare namespace Deno { * "console.log('Hello World')", * ], * stdin: "piped", + * stdout: "piped", * }); * const child = command.spawn(); * * // open a file and pipe the subprocess output to it. - * child.stdout.pipeTo(Deno.openSync("output").writable); + * child.stdout.pipeTo( + * Deno.openSync("output", { write: true, create: true }).writable, + * ); * * // manually close stdin * child.stdin.close(); From 000315e75a20e82616a227702c98346f2b5e8b59 Mon Sep 17 00:00:00 2001 From: Levente Kurusa Date: Tue, 2 May 2023 02:14:13 +0200 Subject: [PATCH 102/320] fix(node/http): Request.setTimeout(0) should clear (#18949) Fixes: #18932 --- ext/node/polyfills/http.ts | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/ext/node/polyfills/http.ts b/ext/node/polyfills/http.ts index 1a585f74ce..6f78777428 100644 --- a/ext/node/polyfills/http.ts +++ b/ext/node/polyfills/http.ts @@ -351,6 +351,18 @@ class ClientRequest extends NodeWritable { } setTimeout(timeout: number, callback?: () => void) { + if (timeout == 0) { + // Node's underlying Socket implementation expects a 0 value to disable the + // existing timeout. + if (this.opts.timeout) { + clearTimeout(this.opts.timeout); + this.opts.timeout = undefined; + this.opts.signal = undefined; + } + + return; + } + const controller = new AbortController(); this.opts.signal = controller.signal; From 2f651b2d64523bdd377d22b8b7213a04ad82f459 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Tue, 2 May 2023 02:35:33 +0200 Subject: [PATCH 103/320] fix(npm): canonicalize filename before returning (#18948) This commit changes how paths for npm packages are handled, by canonicalizing them when resolving. This is done so that instead of returning "node_modules/@/node_modules//index.js" (which is a symlink) we "node_modules/@/index.js. Fixes https://github.com/denoland/deno/issues/18924 Fixes https://github.com/bluwy/create-vite-extra/issues/31 --------- Co-authored-by: David Sherret --- cli/npm/resolvers/local.rs | 6 +- cli/tests/integration/npm_tests.rs | 10 ++ .../npm/local_dir_resolves_symlinks/index.js | 3 + .../npm/local_dir_resolves_symlinks/index.out | 2 + .../local_dir_resolves_symlinks/package.json | 7 ++ .../define-properties-1.2.0.tgz | Bin 0 -> 5093 bytes .../registry/define-properties/registry.json | 1 + .../function-bind/function-bind-1.1.1.tgz | Bin 0 -> 6301 bytes .../npm/registry/function-bind/registry.json | 1 + .../get-intrinsic/get-intrinsic-1.2.0.tgz | Bin 0 -> 11608 bytes .../npm/registry/get-intrinsic/registry.json | 1 + .../has-property-descriptors-1.0.0.tgz | Bin 0 -> 3854 bytes .../has-property-descriptors/registry.json | 1 + .../has-symbols/has-symbols-1.0.3.tgz | Bin 0 -> 7067 bytes .../npm/registry/has-symbols/registry.json | 1 + .../testdata/npm/registry/has/has-1.0.3.tgz | Bin 0 -> 1553 bytes .../testdata/npm/registry/has/registry.json | 1 + .../object-keys/object-keys-1.1.1.tgz | Bin 0 -> 7677 bytes .../npm/registry/object-keys/registry.json | 1 + cli/util/fs.rs | 82 +--------------- core/lib.rs | 2 + core/path.rs | 91 ++++++++++++++++++ ext/fs/std_fs.rs | 10 +- ext/node/ops/require.rs | 10 +- ext/node/polyfills/01_require.js | 6 +- runtime/fs_util.rs | 11 +-- 26 files changed, 135 insertions(+), 112 deletions(-) create mode 100644 cli/tests/testdata/npm/local_dir_resolves_symlinks/index.js create mode 100644 cli/tests/testdata/npm/local_dir_resolves_symlinks/index.out create mode 100644 cli/tests/testdata/npm/local_dir_resolves_symlinks/package.json create mode 100644 cli/tests/testdata/npm/registry/define-properties/define-properties-1.2.0.tgz create mode 100644 cli/tests/testdata/npm/registry/define-properties/registry.json create mode 100644 cli/tests/testdata/npm/registry/function-bind/function-bind-1.1.1.tgz create mode 100644 cli/tests/testdata/npm/registry/function-bind/registry.json create mode 100644 cli/tests/testdata/npm/registry/get-intrinsic/get-intrinsic-1.2.0.tgz create mode 100644 cli/tests/testdata/npm/registry/get-intrinsic/registry.json create mode 100644 cli/tests/testdata/npm/registry/has-property-descriptors/has-property-descriptors-1.0.0.tgz create mode 100644 cli/tests/testdata/npm/registry/has-property-descriptors/registry.json create mode 100644 cli/tests/testdata/npm/registry/has-symbols/has-symbols-1.0.3.tgz create mode 100644 cli/tests/testdata/npm/registry/has-symbols/registry.json create mode 100644 cli/tests/testdata/npm/registry/has/has-1.0.3.tgz create mode 100644 cli/tests/testdata/npm/registry/has/registry.json create mode 100644 cli/tests/testdata/npm/registry/object-keys/object-keys-1.1.1.tgz create mode 100644 cli/tests/testdata/npm/registry/object-keys/registry.json create mode 100644 core/path.rs diff --git a/cli/npm/resolvers/local.rs b/cli/npm/resolvers/local.rs index f5385c2f10..b4cf5af275 100644 --- a/cli/npm/resolvers/local.rs +++ b/cli/npm/resolvers/local.rs @@ -91,7 +91,11 @@ impl LocalNpmPackageResolver { specifier: &ModuleSpecifier, ) -> Result { match self.maybe_resolve_folder_for_specifier(specifier) { - Some(path) => Ok(path), + // Canonicalize the path so it's not pointing to the symlinked directory + // in `node_modules` directory of the referrer. + Some(path) => { + Ok(deno_core::strip_unc_prefix(self.fs.canonicalize(&path)?)) + } None => bail!("could not find npm package for '{}'", specifier), } } diff --git a/cli/tests/integration/npm_tests.rs b/cli/tests/integration/npm_tests.rs index 8f6ac75283..d4f2d3e455 100644 --- a/cli/tests/integration/npm_tests.rs +++ b/cli/tests/integration/npm_tests.rs @@ -156,6 +156,16 @@ itest!(mixed_case_package_name_local_dir { temp_cwd: true, }); +itest!(local_dir_resolves_symlinks { + args: "run -A index.js", + output: "npm/local_dir_resolves_symlinks/index.out", + exit_code: 0, + envs: env_vars_for_npm_tests(), + cwd: Some("npm/local_dir_resolves_symlinks/"), + copy_temp_dir: Some("npm/local_dir_resolves_symlinks/"), + http_server: true, +}); + // FIXME(bartlomieju): npm: specifiers are not handled in dynamic imports // at the moment // itest!(dynamic_import { diff --git a/cli/tests/testdata/npm/local_dir_resolves_symlinks/index.js b/cli/tests/testdata/npm/local_dir_resolves_symlinks/index.js new file mode 100644 index 0000000000..72d8913f51 --- /dev/null +++ b/cli/tests/testdata/npm/local_dir_resolves_symlinks/index.js @@ -0,0 +1,3 @@ +import * as d from "define-properties"; + +console.log(typeof d.default === "function", "it works"); diff --git a/cli/tests/testdata/npm/local_dir_resolves_symlinks/index.out b/cli/tests/testdata/npm/local_dir_resolves_symlinks/index.out new file mode 100644 index 0000000000..25d44c6b8e --- /dev/null +++ b/cli/tests/testdata/npm/local_dir_resolves_symlinks/index.out @@ -0,0 +1,2 @@ +Download [WILDCARD] +true it works diff --git a/cli/tests/testdata/npm/local_dir_resolves_symlinks/package.json b/cli/tests/testdata/npm/local_dir_resolves_symlinks/package.json new file mode 100644 index 0000000000..4c974022e3 --- /dev/null +++ b/cli/tests/testdata/npm/local_dir_resolves_symlinks/package.json @@ -0,0 +1,7 @@ +{ + "name": "foo", + "type": "module", + "dependencies": { + "define-properties": "^1.2.0" + } +} diff --git a/cli/tests/testdata/npm/registry/define-properties/define-properties-1.2.0.tgz b/cli/tests/testdata/npm/registry/define-properties/define-properties-1.2.0.tgz new file mode 100644 index 0000000000000000000000000000000000000000..69b5e18756c7f4c5367aa571021c1e451d0505f6 GIT binary patch literal 5093 zcmVwpm zViRbPut5O9L5Z8uf8VM;2=Gd5Z=&qX2=+6H>ZaL@!n~tdcflgS(V2+oDu^k4k zH~Hd^eN5AA*6U;&9G|)cZM8~Uf{4Gy-U)C&#-}x&j6ONIK6Ydfckd*iUdHVzxnT{ zG#-~YJkETEP@vf>d5-S0V3JM&FtEzxS5lgJ0lZYp@QpseH}K()!!jbnyB^V)&Ju}T zNBAS>8qQf9dk*J30A%F8S8_ekR|#x6t$h7;sm#Ge5hP!<+(ZYbpeqLh?IuTm#DDQ0 z=x2>;y3znModTOAbklnpGeC9-O_I>|U_0OtC$ux% z6i|%sgeM2&AO+b=eMwO|zH#tA`XyvO&Uf~Z0SoPa2wRoRqGAwG~XlL~`=y+A{d8cvMegB1m- zM}2}Jf^YKHhG{_mZ`!2aeRcCwtJfx-K6%&c{@A%}Uy@R*5AP5)e(K!3>E7ND!05HE zZ+<4-SEP0QGx?!&eOV^$|9aPJ_xq&V(>ib8U3J<3>s()4-ClOCUz3+Wcin}R+<_$w zOmDgbJCv+C?LIPl+wNVwf!Ef{&Q<5;=d$*yb90S6Uv+z=Mc%b~H=T>yt5%P^yY0Q} z_S?|?C9u8jT)*l;FYUMO>l*`l1sG}n2rs1nrge3NU1_abkiLiVk&EuTpL?CxZ*It& z?$u=*fG^u1S?lFhTXY43y0~g}-j>N_>uu|Gn^SdxOHV^IAszYYO&dYjUkmbM` z?IlBU*=}6{FPNU|mFY8{Bj>+X{KvA-gYiE33*vv%YHY;+qnh;`|DWR7huB6$RNOz) zv^kB*2e!caSj>LOyqF#Ahdj3E2;M(D<0w;_^cS;n=<_1nB4Xg|1%RBNpObyH6W=Fa zeYM(tuBPZer z1ED&iSPx;SkFrBhp%USeC3fsZvZlGzJ7DOMMA4lbXaDfvP%=rf2;*(C%3;gkLBQ=D zN=DAQu5uNKF_cL_XOOGRsh?rq6EnvHZ8NSO&m{-g44DBD=p4-9OH8g3++>P2-YAd< zNIT^1oV`SlT0i5J94zDz1{+a=bT9JhIw*+pkAILC%h!WL$tPz6TpoqATNEd-H+8qB z0M1ezlVmX+`)s*$-rHjVB&iapcFlWxd`zcOa|pPPArM<&>LcF~ zi+rDh2iB0Bl3$@SNgGB;hMwX8nVtz{=iWl-Sm6nS6*j|RWo4ohmUiz0H3O2Ev*eo6 z_FTw154ha|AM*@8z7^d|NfGkRIjM>co$V9K2?pMfYha+>hK1FgUCYG#&#=w z@nQ=c$YBSO`OIJsybTc7`JHKhLHw7W29|A}HvZRZ=0^OlHX7!0{C}$YANmbeIjvMCumR7;h+Rn{*gJxuROG<}KPA{JsqiNS5gwY%GC%bqAAMk;=gybp zBOs6|fR|wgxlQ10v^517!A6?}gw8=^q(4LF4XMo-Yy!8`xVLp^%nK6WU(xmPNas5& zCg5)H0K(wIl-qt35jLS51bW~is|1?^#bB0aDd2C={tzvQX3#|lTYX)}!&LYHwP2T5 z*v&b@CM>`sQ09mdDpHFw1ZNPYx)$p}3Nkl{7#U9f0_epv7J|TucDJ)vlm~KyLFkyZ;!AuC28=;OvuoCdPebs$!%$!`z zPbO7xoF@;(fryN^DR2g1;+E|xoe4I{B6sD|p5?c!O!w6baag7z@H=*V`;u@sng|S{&9ennq3O05Sc7mtH-CEjqBh(l`$9 z6Bet*0m4H<&G~kC+9LBGYRH~cEq9NeNSR4R)q_HQuTfy`q>TDUN7fr)p+xQ`R#G&Wt zQfI)?0+sI#rP^f#QCdgWfJ*-k5?iCL=4N9(aojLVqm0k)rY}p`EJ87`^#j8Iv!oWT zA&8j|TL$J7&iL?n4!zhFmcm!y7z|-S_?sEPQ z7wCnuC~>JkEpoA@4B@dV;D$c9J}6Hrl&?pn!snC2KO+AxuAisP|BuXOW6S-Y>T~}8 z6wjCBLb(M^YxzE&`tTfDc#A|FDdE9T9P(P$y(dnC)QKEF4-zOCFAQJjN{T=oJZ*_AmDfd*btI0Qkz{E`gjd{2{;d_;~^fyRhJq?@Bds~d(Pm>d$F zSf*9eO-rwu$R3o-y=kIpIvHHE`yp1Z@Kh|XilK@^NC06EHA3e(Die`QViyTVW)EJ1 zu2d9)_;Do}ou8kNU_-LTZ7_0<9jEEqqfZ_Z$XSuZv~D^_cH{WynAXS5dTo4CWz8mK z4ce&IX!WFih~t^Dln!||JX(!TBM{?=jhQtzKTixvOilO1WKC;)T(?f@mhIFV?6`T{ zth?-};heb5f*4rF9xcW*ErQFF)5 zTFo|TjUL%2$Cg8zW9l3URb8?Pk1Q_zmkL}^r8W$&N?2~sQZ~yCx{#NQGI;unkUVgb<%W> zt5tgJHeK3m9F3`a(uAqgzI~zVWOa4x;s{;Wztc4C07wPR-qOvwUaf2A>yJjhCW9Vx zLH00~3w76)WyFzVMjfpDqG^d?$${et9A<}e@Tv|tXO4Xt+Mp@ddmvMwV=)28k>F_w ziP;P#RcXDzUYX&-mPmA6Fs9Ygz=#MT(X;c4!qGPc0`3H4jb{Y1FyA>NHjD#0d3Ae@ z=kvy5=5vbW#$lB&#L+k$#qL*o7fP+;E!8G`SR2z~(#ujE%rp$6Rr$n%8of_!xn4G%hHz-; zH+H(XEgRIOpE*+j#o2`^tggnbu6nGS#~*8Tl&@VM2DLJL05V8Ec*vO14?K4v$qdvF zwlsz{4Y4Be{zqC{FC0V02-`InvJGS~;_c^MX=f(ycVPiq-ZLM zm^4dFS_9If-IFH#SszxQJtZz|;K_V!q>TAA3k(3kG|6fhIza=vLfJ44!v1BGaFC?` zw7V-|!lcL~EY*%sKTJmC^1!%yjSo})U261=fPK)Fz)fM1kVL6RwO}W-jP-4$f|AcXosvO zAJF<-s8zK(^z!O_VngA#czO!E0}kaEr&Fnfe9=^Ov$;bFTUHASiniMB*OU?jL$kRn+jkAD_;D4S-$96*T_hkwmc=VUATTwfuPuZm3GgC7p$QSMR{^{x;v!}|a? z4#-n|T_WcYj*j@4oQ!it@~?UntgBJe{vV!gYi(75D)t_uY#@U8{T`^n;8#NVFBr$C#3GOUwig2M=Y$BtPy3WW)`3^H1S7mo zWKe#4t-1;vbD*^fv^lV50u=8?iGGb;_t69)%qff7zhu-ukdw@>SOL4kG8ydC{W94f z@AE54y9icqROQ_u6QH*D55?8C?Kj)DUTl;1+G_l_mie7F-mdXGZI3u*ydWHu#g;O^ zS#|o<(^)1sbH6>Qsx7}G2eUxr?N9N8aq=k0wxaN#o0G>+Ssmuqw*B zI;`Ja9bn-+@7MYsnE8U(ei$(E0~{8GWl@$;MHum88Br$zuq+6yc+gX#N`ip#KH=4W zmE~+KD{UjH<>afo%-vTefJ)25P)_gAx##H4j7#tl)T_dO6t%V*&pwg5g2W;)DlrPt z@DrJR=>=|hzIKg5TZ?stxq-;;8Fq-1v_&g`{QWYBp5sFM3iCAaRa*m2KKaq7ttz{m_}0o;fx4Diz!ylnnscFk?gg#{BEbhU{ypRY_PSyrmP%H8AJkJf3T=C8fU$q@QegBOR#40+^Zq~PNI+6*|L*%gb=9I!t$(8Z zU$t7@`2T;b`t$F9o@)OG*%=ncA)C*OV)7^lf9=YAg4c-fKtItmu}V(KKp50ei)b3N zFgPV&k{$y@Tb@r-f)zN}ue=ioSXGX36QF_V1$TDn`;d5ebGDnZ3y9=miI{Wv5Z<}o zZn_^r-Su+p6m}B6=ccD*YpBuiuqX?>ba6L^O~>Rn1iDa| /dev/null","security":"nsp package"},"repository":{"type":"git","url":"git://github.com/ljharb/define-properties.git"},"keywords":["Object.defineProperty","Object.defineProperties","object","property descriptor","descriptor","define","ES5"],"dependencies":{"foreach":"~2.0.5","object-keys":"~1.0.2"},"devDependencies":{"tape":"~3.0.3","covert":"1.0.0","jscs":"~1.9.0","editorconfig-tools":"~0.0.1","nsp":"~0.5.2","eslint":"~0.11.0"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"7f59dd1723500ba6390a2a6fc330e20ad7a1f58c","bugs":{"url":"https://github.com/ljharb/define-properties/issues"},"homepage":"https://github.com/ljharb/define-properties","_id":"define-properties@1.0.0","_shasum":"64e04df26f37a219a4467b2cde19eb075de9d004","_from":".","_npmVersion":"1.4.28","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"dist":{"shasum":"64e04df26f37a219a4467b2cde19eb075de9d004","tarball":"http://localhost:4545/npm/registry/define-properties/define-properties-1.0.0.tgz","integrity":"sha512-vxfdDa3w7qextqSHUtH08ZxQKucIvu/QONdqv/h4HsQiDfE8wHXphVySR3Lgvut1LbDCwHQkP1DzcWRSuZlqKw==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEYCIQDibzfFKtipNe9VvTEnT7z/PNhXuZHIUf9fGk1Ij4U4GgIhAITZsMW1GBzwfg0SO7hIIJg5bwPxUL7dapWUOR+PC20b"}]},"directories":{}},"1.0.1":{"name":"define-properties","version":"1.0.1","author":{"name":"Jordan Harband"},"description":"Define multiple non-enumerable properties at once. Uses `Object.defineProperty` when available; falls back to standard assignment in older engines.","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js && npm run security","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run jscs && npm run eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","eccheck":"editorconfig-tools check *.js **/*.js > /dev/null","security":"nsp package"},"repository":{"type":"git","url":"git://github.com/ljharb/define-properties.git"},"keywords":["Object.defineProperty","Object.defineProperties","object","property descriptor","descriptor","define","ES5"],"dependencies":{"foreach":"~2.0.5","object-keys":"~1.0.3"},"devDependencies":{"tape":"~3.0.3","covert":"1.0.0","jscs":"~1.9.0","editorconfig-tools":"~0.0.1","nsp":"~0.5.2","eslint":"~0.11.0"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"0fc836602486b1360bb54f430c18cebba25a0288","bugs":{"url":"https://github.com/ljharb/define-properties/issues"},"homepage":"https://github.com/ljharb/define-properties","_id":"define-properties@1.0.1","_shasum":"aa2f1a1d38cc4ebaabf314ac1e77e024060e0ec0","_from":".","_npmVersion":"1.4.28","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"dist":{"shasum":"aa2f1a1d38cc4ebaabf314ac1e77e024060e0ec0","tarball":"http://localhost:4545/npm/registry/define-properties/define-properties-1.0.1.tgz","integrity":"sha512-+Kc8DhpxT9gYggrcyv63MbFDXIH4ye6xzvqXStvojl74Vo2V4BqfRNWZZaeuUhmcqywFdD11Giex2COwC6AMdA==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEYCIQDOuXmwoDIJrWyXnymCBygPxhzg0zJH+pxNwd+4vkxvzgIhAJS2CGZSNENNLJ8taCSARD6vD7VS4nJ6Bgy8Ae5lvboN"}]},"directories":{}},"1.0.2":{"name":"define-properties","version":"1.0.2","author":{"name":"Jordan Harband"},"description":"Define multiple non-enumerable properties at once. Uses `Object.defineProperty` when available; falls back to standard assignment in older engines.","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js && npm run security","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run jscs && npm run eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","eccheck":"editorconfig-tools check *.js **/*.js > /dev/null","security":"nsp package"},"repository":{"type":"git","url":"git://github.com/ljharb/define-properties.git"},"keywords":["Object.defineProperty","Object.defineProperties","object","property descriptor","descriptor","define","ES5"],"dependencies":{"foreach":"^2.0.5","object-keys":"^1.0.4"},"devDependencies":{"tape":"^4.0.0","covert":"^1.1.0","jscs":"^1.13.1","editorconfig-tools":"^0.1.1","nsp":"^1.0.1","eslint":"^0.21.2"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"37dd7335f8ec75f93ffb0768a321a8f277a2bc94","bugs":{"url":"https://github.com/ljharb/define-properties/issues"},"homepage":"https://github.com/ljharb/define-properties#readme","_id":"define-properties@1.0.2","_shasum":"6999cad02fd97bd62b06a9eb121d8d6966d48d37","_from":".","_npmVersion":"2.9.0","_nodeVersion":"2.0.2","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"shasum":"6999cad02fd97bd62b06a9eb121d8d6966d48d37","tarball":"http://localhost:4545/npm/registry/define-properties/define-properties-1.0.2.tgz","integrity":"sha512-pUaWgaSuCBbnAvTEFLT4+9plxRT02eXu7cRA0kwj8vjoGD+w4/uL5wLDSy+JcqTNd0kpP3/rWgPxhqylW+i7PQ==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIBnniPlNMiOhsQ880VdcxE/tovPeXpMr8s6cWnlHCDcGAiEAmkMc+1/WBZot0JId8bk3G0NB/vROPenRp0l+8fNj21c="}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{}},"1.1.0":{"name":"define-properties","version":"1.1.0","author":{"name":"Jordan Harband"},"description":"Define multiple non-enumerable properties at once. Uses `Object.defineProperty` when available; falls back to standard assignment in older engines.","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js && npm run security","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run jscs && npm run eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","eccheck":"editorconfig-tools check *.js **/*.js > /dev/null","security":"nsp package"},"repository":{"type":"git","url":"git://github.com/ljharb/define-properties.git"},"keywords":["Object.defineProperty","Object.defineProperties","object","property descriptor","descriptor","define","ES5"],"dependencies":{"foreach":"^2.0.5","object-keys":"^1.0.4"},"devDependencies":{"tape":"^4.0.0","covert":"^1.1.0","jscs":"^1.13.1","editorconfig-tools":"^0.1.1","nsp":"^1.0.3","eslint":"^0.24.0"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"0855002376afdcbc6c6c5d56cdb207cc69231535","bugs":{"url":"https://github.com/ljharb/define-properties/issues"},"homepage":"https://github.com/ljharb/define-properties#readme","_id":"define-properties@1.1.0","_shasum":"e445de572ba03584e707e6e7fa7757bcb61e2688","_from":".","_npmVersion":"2.11.1","_nodeVersion":"2.3.0","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"shasum":"e445de572ba03584e707e6e7fa7757bcb61e2688","tarball":"http://localhost:4545/npm/registry/define-properties/define-properties-1.1.0.tgz","integrity":"sha512-o/M7oAJDcb9Q4BNA03OmiPpAqFcka0CAhdmF9er4P7I8PX9CLqyHLjLMJOOTjj6N/LF8/0nMaVKTbth0ouSffw==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEYCIQDaR4GU49eGjoyJ/nR8fT33SeiCnT4lsFkQpjyWiwENEwIhAL7HgxI6iRVZ/g2reGdHTbXfS19mULx+Npark1W4fdrr"}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{}},"1.1.1":{"name":"define-properties","version":"1.1.1","author":{"name":"Jordan Harband"},"description":"Define multiple non-enumerable properties at once. Uses `Object.defineProperty` when available; falls back to standard assignment in older engines.","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js && npm run security","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run jscs && npm run eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","eccheck":"editorconfig-tools check *.js **/*.js > /dev/null","security":"nsp package"},"repository":{"type":"git","url":"git://github.com/ljharb/define-properties.git"},"keywords":["Object.defineProperty","Object.defineProperties","object","property descriptor","descriptor","define","ES5"],"dependencies":{"foreach":"^2.0.5","object-keys":"^1.0.7"},"devDependencies":{"tape":"^4.0.1","covert":"^1.1.0","jscs":"^1.13.1","editorconfig-tools":"^0.1.1","nsp":"^1.0.3","eslint":"^1.0.0-rc-1"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"4647949f0b4da52f9968977a9be754e5e11c5ac4","bugs":{"url":"https://github.com/ljharb/define-properties/issues"},"homepage":"https://github.com/ljharb/define-properties#readme","_id":"define-properties@1.1.1","_shasum":"ac04abba5d32847f912cfbe41aed932faa14061f","_from":".","_npmVersion":"2.13.0","_nodeVersion":"2.4.0","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"shasum":"ac04abba5d32847f912cfbe41aed932faa14061f","tarball":"http://localhost:4545/npm/registry/define-properties/define-properties-1.1.1.tgz","integrity":"sha512-rVN/zTp+R6Tiu0GP2GYkLPZax/F5b2uh3VQnIdsfVsKHDz1yfNsUWiy367ytlDjaYzMYOhIkGLUm1PtypHMTnA==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIATKnr2USfTV+5IMhzkLImZlpy7AHG9ACf8oVwUhzuSNAiEA1hXotlSI/6a6Y1Z8uBOcKELWeXHjDXXFx0uC3uEVPMs="}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{}},"1.1.2":{"name":"define-properties","version":"1.1.2","author":{"name":"Jordan Harband"},"description":"Define multiple non-enumerable properties at once. Uses `Object.defineProperty` when available; falls back to standard assignment in older engines.","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js && npm run security","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run jscs && npm run eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","eccheck":"editorconfig-tools check *.js **/*.js > /dev/null","security":"nsp package"},"repository":{"type":"git","url":"git://github.com/ljharb/define-properties.git"},"keywords":["Object.defineProperty","Object.defineProperties","object","property descriptor","descriptor","define","ES5"],"dependencies":{"foreach":"^2.0.5","object-keys":"^1.0.8"},"devDependencies":{"tape":"^4.2.1","covert":"^1.1.0","jscs":"^2.3.1","editorconfig-tools":"^0.1.1","nsp":"^1.1.0","eslint":"^1.6.0","@ljharb/eslint-config":"^1.3.0"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"6467a10e6f493d8a1a4f6ec8442ffee137aab7ba","bugs":{"url":"https://github.com/ljharb/define-properties/issues"},"homepage":"https://github.com/ljharb/define-properties#readme","_id":"define-properties@1.1.2","_shasum":"83a73f2fea569898fb737193c8f873caf6d45c94","_from":".","_npmVersion":"2.14.7","_nodeVersion":"4.2.1","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"shasum":"83a73f2fea569898fb737193c8f873caf6d45c94","tarball":"http://localhost:4545/npm/registry/define-properties/define-properties-1.1.2.tgz","integrity":"sha512-hpr5VSFXGamODSCN6P2zdSBY6zJT7DlcBAHiPIa2PWDvfBqJQntSK0ehUoHoS6HGeSS19dgj7E+1xOjfG3zEtQ==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIFmYfFegTKCrSnO8+YFjg0SZr5jYybWblLiyd9f62pdBAiEAhGGfud/YdiL+CoxNQRaOwOT2Ft26BF3U16av4Qj/by8="}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{}},"1.1.3":{"name":"define-properties","version":"1.1.3","author":{"name":"Jordan Harband"},"description":"Define multiple non-enumerable properties at once. Uses `Object.defineProperty` when available; falls back to standard assignment in older engines.","license":"MIT","main":"index.js","scripts":{"pretest":"npm run --silent lint","test":"npm run --silent tests-only","posttest":"npm run --silent security","tests-only":"node test/index.js","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run --silent jscs && npm run --silent eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","security":"nsp check"},"repository":{"type":"git","url":"git://github.com/ljharb/define-properties.git"},"keywords":["Object.defineProperty","Object.defineProperties","object","property descriptor","descriptor","define","ES5"],"dependencies":{"object-keys":"^1.0.12"},"devDependencies":{"@ljharb/eslint-config":"^13.0.0","covert":"^1.1.0","eslint":"^5.3.0","jscs":"^3.0.7","nsp":"^3.2.1","tape":"^4.9.0"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"e5478e3d2880b90a97daa62d76abed34d91154dd","bugs":{"url":"https://github.com/ljharb/define-properties/issues"},"homepage":"https://github.com/ljharb/define-properties#readme","_id":"define-properties@1.1.3","_npmVersion":"6.2.0","_nodeVersion":"10.8.0","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"integrity":"sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==","shasum":"cf88da6cbee26fe6db7094f61d870cbd84cee9f1","tarball":"http://localhost:4545/npm/registry/define-properties/define-properties-1.1.3.tgz","fileCount":10,"unpackedSize":23025,"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.4\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJbc67HCRA9TVsSAnZWagAAiLUP/3CxQIzm2kQKyFeL6rHJ\nHrqG2U6H8UXQbVjxAgTa4gli6BZSzIk6WkooZ1INWKJdfApxKfWJAVbRYCwD\nhXIB8rzI/kcsQPO+I1CVZvUmSdFwhtX7Wc2bzWmH5KN0m8LclaX8ddYZXXi3\nhtDIFXTVYIEam4fTdrBREO59hRopvcYBsR0QM3yjcD74FY7ugPxqzIrhhd/w\nlnbSUcFww2UKubiQX2YmNp+boJ74oEGoq7rA8L4PUdU1ffOPRZ47pAlGPVxu\noHKTk/KViS69KD1eChpS1z+ptPgZWIYobziluafItmX+A4XNFrHuaizF6s6r\no2lJWdT2DAABZlMLPaBBuiXM9U/STYLj+THpBlqSqNxXAP/c1tBIG70R0EHy\nSVnSAUcclxKQ5PfAeLBiR6sux49MvmX/Aq5ykC2OxAzgbknOQb4DSh7oOuFE\nsMxIiErRJ9gpTXVrgvlMwy6on+XqAGMhJZHDPvPrb0lw6Sv/wNwvxrd13bF9\nxKVEywS6OBgZ1Ag0+tgt+Iqp+1h7ZFMxWSzlWzJIlLwVAGKqeaCI0cDnvHaT\nB6I0PxC9mHMCBRMi9zjfGgYhzOOf+QlxthaM9fB0BDVraf4s1FIEUVfMDvCa\nGkPb86BE6dnLzCsIdt+aWbxW+lwRVwLu+QrPFFGTE44KNR3YuLG8eEEWEHpN\nHZZs\r\n=7SxH\r\n-----END PGP SIGNATURE-----\r\n","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEYCIQC0rdupohHcNiPth5mboCDG22vQVDBbBkyKmpxYVwkAfQIhAN3GX6Q+b29wDdOkUdmUxnjGUuzPsyLMftGxNeNEznj1"}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/define-properties_1.1.3_1534308039180_0.8446271629420612"},"_hasShrinkwrap":false},"1.1.4":{"name":"define-properties","version":"1.1.4","author":{"name":"Jordan Harband","email":"ljharb@gmail.com"},"funding":{"url":"https://github.com/sponsors/ljharb"},"description":"Define multiple non-enumerable properties at once. Uses `Object.defineProperty` when available; falls back to standard assignment in older engines.","license":"MIT","main":"index.js","scripts":{"prepublish":"not-in-publish || npm run prepublishOnly","prepublishOnly":"safe-publish-latest","pretest":"npm run lint","test":"npm run tests-only","posttest":"aud --production","tests-only":"nyc tape 'test/**/*.js'","lint":"eslint --ext=js,mjs ."},"repository":{"type":"git","url":"git://github.com/ljharb/define-properties.git"},"keywords":["Object.defineProperty","Object.defineProperties","object","property descriptor","descriptor","define","ES5"],"dependencies":{"has-property-descriptors":"^1.0.0","object-keys":"^1.1.1"},"devDependencies":{"@ljharb/eslint-config":"^21.0.0","aud":"^2.0.0","eslint":"=8.8.0","nyc":"^10.3.2","safe-publish-latest":"^2.0.0","tape":"^5.5.3"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"08fa234e22964a179aad624bed13eca44ad8c6b4","bugs":{"url":"https://github.com/ljharb/define-properties/issues"},"homepage":"https://github.com/ljharb/define-properties#readme","_id":"define-properties@1.1.4","_nodeVersion":"17.9.0","_npmVersion":"8.3.1","dist":{"integrity":"sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==","shasum":"0b14d7bd7fbeb2f3572c3a7eda80ea5d57fb05b1","tarball":"http://localhost:4545/npm/registry/define-properties/define-properties-1.1.4.tgz","fileCount":9,"unpackedSize":10361,"signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQDXsYGCvtExg9J1AGDfmtzB2pJPElyT+JVZGkuBGXJoXQIgHdeuoUI0aLNb+B+05UiELF99rRCn/kSrjgMoWoBzNvc="}],"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v4.10.10\r\nComment: https://openpgpjs.org\r\n\r\nwsFzBAEBCAAGBQJiWQ2PACEJED1NWxICdlZqFiEECWMYAoorWMhJKdjhPU1b\r\nEgJ2Vmqpiw//bqOxAELjon6L7lB1w+12phuunCilvX1V98bYW2u+29Uh95oi\r\nNfYzb4AsnaDvXu5WjqOa3wlHxBMl9k8UAe5/C6KbzyiZzRuHTpLXCwHFpi9x\r\nXbvOxmG/KTeFH6QJ66PPj2B7mDDCKBdvicdOQWLlYSomngmBdE+xiBZC9vyl\r\n72VClJN4lFb92B1Bi9iak4J+jUYby8IuWTXSIksjzwBBgpw80JqBTKJ5gZAi\r\nGWFl3TmV8H0tWfmwM2RaYxUcCIka/PSsJZU22OQqncm2z9VggbGN3zL3OQe1\r\nTpPDnaC/QIC+ZYNpkP530ArtS5DKSuxiqNtgySO/uMNtdvHBBGsLs4mslwo4\r\nHGsUumHGGAcDsbN6nOfI6BlkEb5sVxmJkFm5PJcHQOVnP54e8IJjvxA1qWkM\r\nO49WMu3sbPPXjxiO60aybNXUIMdBNeuUqX96BW5kJQr83mhbW4dOIsI1k5XP\r\nbekhqLFp4tIHCoXcZg+kWTibjNoiAK9stFf0gWMWrjmQNu6m3umv8uxmH3kc\r\nC+RQCemUghhQHkJtCh9KiekYhZVIweqvyK4sUlgYE2Ev3zsA+VWllk1/rh9I\r\n/9y6V8q7IbfKC6kd3VOKgLEOG9aYNPiy23/1u9XGY29YJl7zzR7e/nnu9Iwp\r\nR2ureEUvrygxBbF7SNx7muKKBfs3Sy7Xo2g=\r\n=RjYF\r\n-----END PGP SIGNATURE-----\r\n"},"_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"directories":{},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/define-properties_1.1.4_1650003343480_0.7576999598646093"},"_hasShrinkwrap":false},"1.2.0":{"name":"define-properties","version":"1.2.0","author":{"name":"Jordan Harband","email":"ljharb@gmail.com"},"funding":{"url":"https://github.com/sponsors/ljharb"},"description":"Define multiple non-enumerable properties at once. Uses `Object.defineProperty` when available; falls back to standard assignment in older engines.","license":"MIT","main":"index.js","scripts":{"prepack":"npmignore --auto --commentLines=autogenerated","prepublish":"not-in-publish || npm run prepublishOnly","prepublishOnly":"safe-publish-latest","pretest":"npm run lint","test":"npm run tests-only","posttest":"aud --production","tests-only":"nyc tape 'test/**/*.js'","lint":"eslint --ext=js,mjs .","version":"auto-changelog && git add CHANGELOG.md","postversion":"auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\""},"repository":{"type":"git","url":"git://github.com/ljharb/define-properties.git"},"keywords":["Object.defineProperty","Object.defineProperties","object","property descriptor","descriptor","define","ES5"],"dependencies":{"has-property-descriptors":"^1.0.0","object-keys":"^1.1.1"},"devDependencies":{"@ljharb/eslint-config":"^21.0.1","aud":"^2.0.2","auto-changelog":"^2.4.0","eslint":"=8.8.0","in-publish":"^2.0.1","npmignore":"^0.3.0","nyc":"^10.3.2","safe-publish-latest":"^2.0.0","tape":"^5.6.3"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"auto-changelog":{"output":"CHANGELOG.md","template":"keepachangelog","unreleased":false,"commitLimit":false,"backfillLimit":false,"hideCredit":true,"startingVersion":"1.1.5"},"publishConfig":{"ignore":[".github/workflows","test/"]},"gitHead":"aa5afa274d736c1ebd59e403b5b3b5b404689a57","bugs":{"url":"https://github.com/ljharb/define-properties/issues"},"homepage":"https://github.com/ljharb/define-properties#readme","_id":"define-properties@1.2.0","_nodeVersion":"19.6.0","_npmVersion":"9.4.0","dist":{"integrity":"sha512-xvqAVKGfT1+UAvPwKTVw/njhdQ8ZhXK4lI0bCIuCMrp2up9nPnaDftrLtmpTazqd1o+UY4zgzU+avtMbDP+ldA==","shasum":"52988570670c9eacedd8064f4a990f2405849bd5","tarball":"http://localhost:4545/npm/registry/define-properties/define-properties-1.2.0.tgz","fileCount":9,"unpackedSize":12453,"signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQC2R6SBVZWJCCg7Z1PkTr8TYLGBpk5aQNaXmqzjLJml2QIgJipbNCmGIXHgvp4vOv0L/SBkYkerCHZ2PyQBAVjAScQ="}],"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v4.10.10\r\nComment: https://openpgpjs.org\r\n\r\nwsFzBAEBCAAGBQJj5niKACEJED1NWxICdlZqFiEECWMYAoorWMhJKdjhPU1b\r\nEgJ2VmqcWBAAgCes8tVtZ6ORE3V+bO+EyPBU76QcCa52bvve/uQKu2DvcSJz\r\nnvnC1aIUlDkR8/ZHI8LC5ONJOos5nIvVwC9cjqjXekk9Y1pXtdmNmplQms04\r\njWnZGp23RWhR0RkghJT7c33Zt+6o0f0RokC/pXheR37qRTclwtx/QXHc3U15\r\nqWb8fQbotRHSnoo7bMAM6DVmEeGQD8KIPd5NtoLx6qL0IWP0kQzo1LUs8Cif\r\nE6Y6jkrknPhptz9aVOIj2H0AVtj7NzjSSxtk4qjg3v3Vw35dSj1vC5UuoT65\r\nn6bBMSk2uS7ISTcKJJsYvRHbfk06XQJdO0/LXPlzqLDjt34tA7M91l+Wmvbz\r\nJNuuoo0jmnMk0PLK4PCvAGLBnPUiBH+02iPwLL0EfrGSRaBIOEi+ZNrAqu2M\r\n8kb58GjN4qtfBwobdq2AIDSJKJ3tkss0KV7a8j54+sKyDHEoAXD/XziI4Q4v\r\nNtZCiFF4xtmphCOgj04j1Bp26gB0c6SRTXm7Glsw7vm4bpwsxxdL5GIoIQAA\r\np3laNAtXOOgVDguFoGHwfJTkL2d2lCpfiY66YpemYptsA0lQEEjvVhyrkHpO\r\nesbdlZi8EiZcZTNdto2Gj5iFr8Pqd764Z9+DLH/DaVGFcX+z6NgNRN1Ia49L\r\nLOetyEJ8/3fa4D+k3b531EH/iXJl8zQu9q0=\r\n=oqSW\r\n-----END PGP SIGNATURE-----\r\n"},"_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"directories":{},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/define-properties_1.2.0_1676048522165_0.7917569075351913"},"_hasShrinkwrap":false}},"readme":"# define-properties [![Version Badge][npm-version-svg]][package-url]\n\n[![github actions][actions-image]][actions-url]\n[![coverage][codecov-image]][codecov-url]\n[![dependency status][deps-svg]][deps-url]\n[![dev dependency status][dev-deps-svg]][dev-deps-url]\n[![License][license-image]][license-url]\n[![Downloads][downloads-image]][downloads-url]\n\n[![npm badge][npm-badge-png]][package-url]\n\nDefine multiple non-enumerable properties at once. Uses `Object.defineProperty` when available; falls back to standard assignment in older engines.\nExisting properties are not overridden. Accepts a map of property names to a predicate that, when true, force-overrides.\n\n## Example\n\n```js\nvar define = require('define-properties');\nvar assert = require('assert');\n\nvar obj = define({ a: 1, b: 2 }, {\n\ta: 10,\n\tb: 20,\n\tc: 30\n});\nassert(obj.a === 1);\nassert(obj.b === 2);\nassert(obj.c === 30);\nif (define.supportsDescriptors) {\n\tassert.deepEqual(Object.keys(obj), ['a', 'b']);\n\tassert.deepEqual(Object.getOwnPropertyDescriptor(obj, 'c'), {\n\t\tconfigurable: true,\n\t\tenumerable: false,\n\t\tvalue: 30,\n\t\twritable: false\n\t});\n}\n```\n\nThen, with predicates:\n```js\nvar define = require('define-properties');\nvar assert = require('assert');\n\nvar obj = define({ a: 1, b: 2, c: 3 }, {\n\ta: 10,\n\tb: 20,\n\tc: 30\n}, {\n\ta: function () { return false; },\n\tb: function () { return true; }\n});\nassert(obj.a === 1);\nassert(obj.b === 20);\nassert(obj.c === 3);\nif (define.supportsDescriptors) {\n\tassert.deepEqual(Object.keys(obj), ['a', 'c']);\n\tassert.deepEqual(Object.getOwnPropertyDescriptor(obj, 'b'), {\n\t\tconfigurable: true,\n\t\tenumerable: false,\n\t\tvalue: 20,\n\t\twritable: false\n\t});\n}\n```\n\n## Tests\nSimply clone the repo, `npm install`, and run `npm test`\n\n[package-url]: https://npmjs.org/package/define-properties\n[npm-version-svg]: https://versionbadg.es/ljharb/define-properties.svg\n[deps-svg]: https://david-dm.org/ljharb/define-properties.svg\n[deps-url]: https://david-dm.org/ljharb/define-properties\n[dev-deps-svg]: https://david-dm.org/ljharb/define-properties/dev-status.svg\n[dev-deps-url]: https://david-dm.org/ljharb/define-properties#info=devDependencies\n[npm-badge-png]: https://nodei.co/npm/define-properties.png?downloads=true&stars=true\n[license-image]: https://img.shields.io/npm/l/define-properties.svg\n[license-url]: LICENSE\n[downloads-image]: https://img.shields.io/npm/dm/define-properties.svg\n[downloads-url]: https://npm-stat.com/charts.html?package=define-properties\n[codecov-image]: https://codecov.io/gh/ljharb/define-properties/branch/main/graphs/badge.svg\n[codecov-url]: https://app.codecov.io/gh/ljharb/define-properties/\n[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/ljharb/define-properties\n[actions-url]: https://github.com/ljharb/define-properties/actions\n","maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"time":{"modified":"2023-02-10T17:02:02.425Z","created":"2015-01-04T08:34:45.318Z","1.0.0":"2015-01-04T08:34:45.318Z","1.0.1":"2015-01-06T22:29:12.451Z","1.0.2":"2015-05-24T03:26:06.197Z","1.1.0":"2015-07-02T06:52:56.628Z","1.1.1":"2015-07-21T07:17:08.897Z","1.1.2":"2015-10-14T22:28:41.286Z","1.1.3":"2018-08-15T04:40:39.246Z","1.1.4":"2022-04-15T06:15:43.631Z","1.2.0":"2023-02-10T17:02:02.316Z"},"homepage":"https://github.com/ljharb/define-properties#readme","keywords":["Object.defineProperty","Object.defineProperties","object","property descriptor","descriptor","define","ES5"],"repository":{"type":"git","url":"git://github.com/ljharb/define-properties.git"},"author":{"name":"Jordan Harband","email":"ljharb@gmail.com"},"bugs":{"url":"https://github.com/ljharb/define-properties/issues"},"license":"MIT","readmeFilename":"README.md"} \ No newline at end of file diff --git a/cli/tests/testdata/npm/registry/function-bind/function-bind-1.1.1.tgz b/cli/tests/testdata/npm/registry/function-bind/function-bind-1.1.1.tgz new file mode 100644 index 0000000000000000000000000000000000000000..44bef1635cf6255fa76e8594e6b55acf33e6a033 GIT binary patch literal 6301 zcmZ{l-23^S z^WlA;^A8-lI4q?94e1#$Fl0p~_4?vS*L!TunvI?oBd?Y1iOOa7n;BJ*Y$~KM=^E>! zY)rCPE8M2JcP3c>WtK{YaW!YT@iQiPU4L;=|G)D1I5@`lQ3x3u$M!us9Q=`yg6s2U zQMFS3>|0I1K$z6UI&`mYRHf2!nq&m{cNx^{w*|8sw*6X^SdbBBa)ibDN8;AlmN7}l zKlgM6u94po_2Aq+QwTA3uO<wt$FfvmWMmwSEZLM#m`>xqM-g22u|D`O%lgOD8>yWAP@Gj6g`? zgEMSp{kbpxiG?QJb=qaLjF8U8H4(92dzMRwaNbuU+X-py{YQ-`+zU|cAIBYbegq+E zE-t$wY;3w0ZVV;5jGQDM$+<~Gd0T({WS=)b&J+r__}jU%P_nT5@V(76q2D31G$wbR z3x5FJ7fmZBCREfoXGtM^w&C0FdgP4DtepsWWv-_grD#B#J1dtvQSNQ3_s?Zw8>sEK zsr(EP2Cv;blA(QR1yoYrCaCj@1+S?M#|PqDGX-{SPrD9LH+Uk`-sY3eZXe8vmb(3B zc#n9cg9lMR;9jXmh2P<}&?bPW&*VxtyhOZ#fk{-4h0hMjhZ&QYD)=-W*jec5-G(D8 z3Y_yGHDl8b@(5&-k>arPJZe&LGm3&8BkH%L6#gi*g@gT1oXyAJzdlG#_lrWE4fytT zfFVu1`k>iQNMCltBaz-zB-?RHKYZ%gMh(!=(VmVY7JeAhjvMa7{^?$fRj>5N3F>X? z=4V`qBv&TEOwBD$o?AW>(f_@NxfYDNjhwJ0h5EcY#IUhuYx}pu7G1;0Hnwz1NRYlG zkE-pETUy9WlDIwO0+qyW=(E%|F-)y@Y&x?1<#yGrFsF@RhsT4%HivgTdJ6p=af;Vk z8^|_uI1qD>-%waldm$C=CivsPcgIqdA*nDJh*v$4zm-;(2!ox?)>2Yo3S_(eXMx5y zQ(7PO8e~fR#JJ5VJ)P8dDS8>^JHanb+Z!d z#wH_*#eK>~Uq6Jjv@qp+4sfnx+En{P!>gpvhefi+2%kCUe@~^W^DCol%4dM|R>>!2I)asa zUt2=BgqaHEO0opfZ%~$Hc&c94qO$d9?Z#`MpUZh`(n`1m?uu35EGRc>oz^im&E9Qx z1L3}KnvzD?ljhooK1GMLtkLE5C)~hzw02rH%Ah#DMS1}BxX+E2#!A=t8r6MM{&g() zZVzVuwEl$bzX_E12--Hs6>~nPX;sjjt$g`z|I{H%kUKi%%agc>&I!c-iRQaE{{#S7 z1*+d8UW?YxEZ^OB<%0-mac3ts`juZ6&f9THm!0pl>7#niJwWmvC`wtR$lV%XEs%Cv zv=S0pq0EhmG{|$-i58hhOOvfcdiLP%R(%GxYr**nNpXyO%>i0cwGSbbY$SVM4Xa%; z3U?8@rvA}E3PY36b>$i6X`1zb*(as2h?TD%5XYR&6cB_%N`a9r^Z z;vanG#|6-cqgk_M{&Xy1wuv;ce~nDW8zjOxQiqAu$^Gay4fWI5^g=G32)DqBG?cw3 zwai8#ETZri_faU)lfrK_u_o58$fD??2(2v5p)X9fr`RYDqWcLMjvqngKYifk6Rk*2 zkR_$KRyd1mpUo{vV?#V0=INX5VnhAgo->?;A1s!y88<+f$YjrlzoB(`BcqkYxbQ#=^ySIV1hv>~P4|F>1z=J6e*=XcD z82QM)60}IHS;0)Zg|k4^_xruNm1c#!cH)T!m64;Il|T9GGqo!wj(Hyec)6mO&(Nhe z1;IgB?)+#_{lG7)kuaXn-7p;Vb+3#a6{7GQdgL&Q2A2yaeSZzH$k;>gO_yqKk`3+J zljwso&@Z@ejqup+NT#^+b(olN_f*5-Tj*j+b*L=xrcfn*l;@K5A0HVT#I{%#Au20e z6R}uY|BUfIS|0vsVdlvh!b^9GJD~jll{GlEn^JU(zf&35#FS_ktfKjznbIo*eSe zK_{m+)i5_-Ntn31a_Guf$~JGE{xb7(Z^26@#Zg8Qx~|(nZC+MHAes=vE2TZ7Z|kdOWe2KsY&Qs^h8%# z#-}+k+(%GfX4I$%KR!`8P4{&|ArT4sj^PxL?~H_&TcZfVva+3f@|PudQT=j|X^nH@ z)bV$YtNPM2=o^ONn%tbY+HIDvaD7a@37ipqW|jI!l1e8)#G{odFpI54~6ZOorD z!X@ipNd&N>bKkEP*$L&vkDILJ-|L(O?p?JRn-+(b8FW!gi?yFjy{Q{F1I}Zea8Xm! zz})$ilai3!4&h#TKN@YWB}pkTFDD(VO!VaV8>&hYj^1_>0UWcq&?@ta!CvBm$*9}P z)PvvJck|0u&0lVc+X2vAbZ9C%^!aRTbxTx`zHdCNJ0BgIf(|V}hi0OShev#JXlyv+ z2DIf54^Rp&gm8Vvcyh6AoKXV&|LC0b?0YMzyZW}Kt6;eERElSe9}+uqe-(PpPmp-F&80KCJBcZA%%O7RWI3IG?;g1CB)>ye&t!T ztq-?Bt;mw26YCi)?;H^KOq+B%L+eMw5!(re)?hbisQx`~&IOuuEkNbd*aTq2?sxg* z_cX@f3x1Sl8xm`-P$SB^Miq|Hi|dC#d3{5kxc95bRpg4c7|xs}dfhWaVOLsDm*)^G z#5Y6!SGJCA-P4AUEm7CEh}PPz7leUbSJ+{Q9@oq+LY0^L>7E!Dga7PiQHwPEF5cfT z^S;w=k30u4l$wh%y*-&z0P5~{aIV@0AdSdmoA}S~P~k^z?o4_y6oi?Gb!9_n?JPg) ziP)~hcLwonHcVSSzklCN5Gh#^WM6ra%eg4n5J~6L=S<_Xc^s!22G#(MIFLOJwm%c- zMoY|dEYML|u)B&FVP5vMUf*ZQyj~hbDT}`xw%sB2J%La}pF!XT0>Ml^67s8UV0$w= z8lNR{F%22v&A!ujPw+#N{NPU`H6)^um37zg=JMCoWSQcV7_%#z15-CfxPuUe^5zfB~Zt4F~kV)j94Fq(PJME&dXE}itLWL(5HSU4d* zY|BlTls+uBzE^nN+Y%?jf;sNmKz0(-yCrw($Ex95qeL)b)?@W^&FL70cASQF*%#h| zFQ|$XQlp{LIp#l9o&>B3cWf+@iB48%0zA;nDbK#07)BUVGV`V{~mu6chBW|d5C}?qIc%KdDot}WPIqg!+(*vM9Q-s25qYa z3gL?;e8#a=L1B6JkC1VDNP6B6F(2D(hgt>D`_a!i#;RRdh$--9W{Qgfy@#I?X{ZN$ zW0iN`GIsL!qOt;X7`JiTd#;c^yBhs=7wtgZv_NAnD8t zzljxtc?y?4G?}u3pEFIEHpUfHhy7V1%k8F5={xhctcm^&ZM&IrX8wi1>lpLyb4vI) z>D0>AGY5Cv8*eVqyvzexdGfKl0f-WyI)p~RZr>E{KG*cTSXu8m^~1EzW-y9w1|{7~q@)Qao_byL~yIMMh~J&=Q^A|9R)Cq@?4aFlZ>oRwOZgnxq6 zzED3>B(P^y&tQh$a`ut%Ke=Y6nG46N{Dw`E}&Q`T7;%GN!>w4Qv(o>TFw(8gwS z8rM4L9)EL4(>jcF=|K92w-n+!NwKpmhoW67@Xj)LPXt?|BSB#=8)LEQu;a{?G2I)j zU=BT5SOwWb5`@~+RzTs*vG_DHchfKZs>()WgT@oBWSLtzUzaB+chfeIwv)dVk!8lC zlo;Wgzi6%SV0w(?LWsCM!iyx`xa>*vYd#bMh=_^s77pd%h0(|NmAB>%4h_hQ%J=kV z334Q=yI=oqFklceL!}x^(y*p*))_^P=)eP#^vE<8*`FJcZWlANx^WBp*~Du8V4BRk z;qolNm<9ZEQ0$irq}2ZIv=`oE{Rb5$tOQVxbKRE4s^sK~6Q4n1IZtajIlM-CrQvIR zB^M9+_jR?yu|gB@EH4luR`xJUSi=EN!^& z*FXulwUN_qFhvj1N*CYV4oVB~X95%k365*CAt$2-yP*Z5Sq8^O238J-rMNF@`=#-9gQeaAlH!H2#zo}Nv1{GsPjRf902`Iy=>n`<8v+RyE2J`=M(TUeY)d-z zU!id%zOhSUA)t_>J>DnTI#!rGd(c5){p$4XjFm`kf~Pnb9Z)0DlOpG@8<5Yek5FTk zvSVEsQo_r8_{_qO<4B`2h@DQQ?XD2{s=AcBX+ch-#|Fs^eR zX!gi_P$xt|;4Dd)h2kamllsY-`^`(Q+zHQH&knv#sf(D1d46?%3IWN>3^L{hU(K>- zM>VOB8%$ckHY+!5&0kMR%albTRWQg3*9Jn~p6(W0SDWB%CFOZ+{v{RE9JE$S$+Ud} zakBK?3dOliZ!x4b9kKLAz1V1;zA_u!k6ri4)QL-v#HXfNJ>vz$P&4{7OdmaFidW?CMCFr zx#&{`#t~}cJ&bAe3vZH7tIw{Q*QYKNQcS^lZbeWch_|K2cWGJ*@;IQZV3#-H~q4Fxfm=|4STaq z(i~LiWyn+?5mb?rj~g;pu*=1Ri`plK{cF=er%rAxd`-r5_)}h2bVWd#AMK!UZD#2wM6G&33mHEWXcx6ib=uia zE+;WAL1I#R\n\n\n\nImplementation of function.prototype.bind\n\n## Example\n\nI mainly do this for unit tests I run on phantomjs.\nPhantomJS does not have Function.prototype.bind :(\n\n```js\nFunction.prototype.bind = require(\"function-bind\")\n```\n\n## Installation\n\n`npm install function-bind`\n\n## Contributors\n\n - Raynos\n\n## MIT Licenced\n\n [travis-svg]: https://travis-ci.org/Raynos/function-bind.svg\n [travis-url]: https://travis-ci.org/Raynos/function-bind\n [npm-badge-svg]: https://badge.fury.io/js/function-bind.svg\n [npm-url]: https://npmjs.org/package/function-bind\n [5]: https://coveralls.io/repos/Raynos/function-bind/badge.png\n [6]: https://coveralls.io/r/Raynos/function-bind\n [7]: https://gemnasium.com/Raynos/function-bind.png\n [8]: https://gemnasium.com/Raynos/function-bind\n [deps-svg]: https://david-dm.org/Raynos/function-bind.svg\n [deps-url]: https://david-dm.org/Raynos/function-bind\n [dev-deps-svg]: https://david-dm.org/Raynos/function-bind/dev-status.svg\n [dev-deps-url]: https://david-dm.org/Raynos/function-bind#info=devDependencies\n [11]: https://ci.testling.com/Raynos/function-bind.png\n [12]: https://ci.testling.com/Raynos/function-bind\n","maintainers":[{"name":"raynos","email":"raynos2@gmail.com"},{"name":"ljharb","email":"ljharb@gmail.com"}],"time":{"modified":"2022-06-18T04:14:28.973Z","created":"2013-06-16T23:25:41.232Z","0.1.0":"2013-06-16T23:25:42.888Z","1.0.0":"2014-08-09T17:02:51.069Z","1.0.1":"2014-10-03T07:38:13.045Z","1.0.2":"2014-10-05T07:23:52.930Z","1.1.0":"2016-02-14T08:28:42.411Z","1.1.1":"2017-08-28T07:51:35.937Z"},"author":{"name":"Raynos","email":"raynos2@gmail.com"},"repository":{"type":"git","url":"git://github.com/Raynos/function-bind.git"},"homepage":"https://github.com/Raynos/function-bind","keywords":["function","bind","shim","es5"],"contributors":[{"name":"Raynos"},{"name":"Jordan Harband","url":"https://github.com/ljharb"}],"bugs":{"url":"https://github.com/Raynos/function-bind/issues","email":"raynos2@gmail.com"},"readmeFilename":"README.md","users":{},"license":"MIT"} \ No newline at end of file diff --git a/cli/tests/testdata/npm/registry/get-intrinsic/get-intrinsic-1.2.0.tgz b/cli/tests/testdata/npm/registry/get-intrinsic/get-intrinsic-1.2.0.tgz new file mode 100644 index 0000000000000000000000000000000000000000..b55e814a0a5aa15fbe6bc888cab775b30e4898be GIT binary patch literal 11608 zcmV-eEvM2SiwFP!00002|LuM2dK!wM5Wm-0TvuzFF;ES#q@cFAR@VClxngT1o zKqcXi6rcZ1{{Gf`u#+Z9zOzsAbj5}}_{~1&Y!POVwDTXc4^x`@J0g8?4QYS@Uo*SF z;6DvbcbX}N)=JS^DMp#{GcP4w%~+<&p=nBQMt%xX%JR{IeF7n6kg0J3K{8*?*ylLE zDU#S<@PF|0+!&G_I-4b*MvK*KwsXO=qtzl9E$Ez$<}?qc@#3n$jKk54Ew1uukus;B zM_69gP8ls(IznQ3oGfH%o_0|HEon;Unar4!=^$)5o3lk;r&%U*!m^Tx`!|9B@WUWK zOERlj9wWpEs*gB;eKRW?WsyMLu41U>YVNbNEGSFTeDsmss1(uM$u&#UIOLT)2QX}Q zXs1|o!cNYzeDAyOc7{B;+BaAf`!ovZQk8%d#XrF5&%IxL{rms_Z>jYE;wJc~DF0ja zKPZKk{?|;${Hp)Il>P@TSt{Y3V9J7zpj$vMfM7sIL}wY#N!b!q2uo2@fMWOznrAhi z)AVL%NceC3oaXZxetyV6F`~wT=_Ht~LMHXgPC=O~CDboFpfJAbi+}C-e|7Zi@c8WT zUoid+&2n1f-*EJ=0yH0GAtki-jAM#zK8p2hQ6mVoHI{#g%*u0ZQ?K8IwU zgmHwQj8|*9@@H{29g;9c(*7!k#0(R8Lk_VXdr3+%Hkd?p(V%7x~omFlEeJdB-19@81Yi2=)NR&xyH|9D8w zPF}qIgH@Aw2{_y{B!h`v1`SfC_z zba;luULT%5`w@N~{BZQ@=fq@0kUT$leem*-W1Rq%(;k8e?#Lg1Jj5g{?*RUL z_V(!H7_0H@^%n9Fl|6qcd!f7pEt$hdpdgz&PPV0PpxvP{F1q zwH^Tw{Qc9}VL^~QKRkE^w9XJ2tEhm-U+3t5*ZH6Oc`sRhv;c*+$l_r9A^RfpzoBbR zYyLM4&Hg(7e~G_7%yLA`kNwBJ-Zf20RSL)xm_Glqic>b|kH>rcT?vEYQq9Zhl3^z1 zMM-){gK0Gzl12>D6iCSY)m0TB;WOC?%3Q*y0=r@ha24_-eE~B|1$s_DvQd#)#9{io z1Bpnpn?=xt#4{=gW0=SD8rDm;SdX}=%y|-ge3K?HmF71!bVwg9Wk%6}0nDX8BrBMJ zE1;M-@?$U_ifq*s*Ht){jjriz#YR#5IbVT&qoBUMI(hTF3VD?*3lh&+6fc-!@g#ja zO_NUzvJ|HDlm_`8#_IMuF^VdD8)uLO#>bPgHiQ176W*2MC1&L}OE#8Z`28e$#L-cF zpuEp$HuS-4l#L0--7dzK@-eVPX9Gk5gQ6;sL((sr9=?O+1m+7RWp_L-(QE2V#P){7 zs8AqNnh}wKhKs>@Lv*MJeEqp(HOHD~U96s~yo&+&vt>5q))*i3!LkC2i^M7gN+UdC zc6I|M?&rfaO;Y>`fD85+-Y>@G#m+i@p5H9!96CWMEkc8UWf_h%aCmr39sB{%>1Bt3 zU$z1l5WI*5_uPaW?QYX!pe#P$RMZD(6|RQDN8OUU^H#Lbrzu;Ic^rl_MwT?qL8Mr= z=;wrwB{JARcz0(DJ`Zqdq9pg|3SHzYwxOFPpU}m?6UZ65T>%dAEWTP~{a=8Gct|J>&IqwHr1!ZlohCV^aE}+ROx?D z`Y4oKu@X7b0TAbapWD3tkR&jy7ENYl^1Zq3KZ5C?f`1FoMKLv|WokFq7%uJSiL4m! zLNz3hvctt{4z`FtVHCOj>4W?30VPEli_%*|2p?g=iO+i(-A!f459VJ$4 zzNiUSrSe^~uTL{p(KQ<^hDHwwTLebLkch7^`Kv^76SG;E$&9sz!=SGKDm28GJ5vzj zl(WIVAr~S^k>I~JP>}g0KL2cCcQ+r*t*DciG@stP?JD|GNn)irJ95hw?n13MSqH?e z{m_ZCnV+?J{|Glt-8+`LS9Vb&qv-J_LahrMnYYP93w&+aUCFnV3`KM4!D%whf+S@^ zf(Q@P&SDso88-qN;`KL@eT6xpxo(FYp^n(M6%0yeBtcykMZH=FRAoEC`n4AhA=1&(_b zL}NVY^vYUI3B6txFBk0l=jVSwzwzRtzhBZ;U;|12!Gn~6SGg#fd`y?i+06hW#Y5=* zD;|^2P&jeTpD+B2A6cica@L0hHJCIZflmff0*kKQJPzdqv0eR1t?osMg7K`-nraei z=}mBPgDLwLiFdtGx@>^EihMWj8u4FiY;+8T+X-)m-6ZhXKDda3D(Fcq&eGNE-CRr2 z=3$eH>~{hG%ffAGeq|y3b>h%rBMKKkNozB>2q%JKlYzL`bV(jQc~S|%)fJi$86HXv@)cKRoWuYS zYhyHv|H`CwlYATqU`VckjS+}P=qL$U7NqeKB)w)6Nwx(3_=@F?=-^s<0GhM(_NXEB ze47S0@@i#v0jWWatD&kmjF@cl6WU~2MRinZQ$gLGU4*fV7N~W5HS8|qrtA9o+m{8b z%56N^dQ@ zB6+p+4bs;Q*N{NPYn4Tr)**i~=iU-1@hUPXQ)*IJV|c9ub~H;RP1xnSkpSU z2;yz>uvx!Vmid%yJb_6~+bkh>P;}jJO_{lm%Bv%+rF3Ts8|6PP)G@?QsQ z%UF|)DzIv!HHb8qaocOS^~iJ+y|sMVWTIR*E7FPumM*=4i(0R73!DrWtW_1i?N$`` zTD4g}Cc9Q<^V<5BV6m(UZ{%LfVOd+okZ<(fMasnj7%v8w%Xn;_mnVv=HA`3XeceFy zZ?gX@23h$03-0;4^ZqYWw@kgY|JyOOulv9LMf<-BwV?L-R2KL^`B#3)O-B`rNPU$# z)k&lH4rKTCh&XRL!VLxfjExp}oCHs`;pXK`gfvDX{wBAA<%idxj#&5%@qN8x%abbFS!Xz#_@pj%EIjPB9igA7e{2kforUah{@kzj_LPXrKo%$kEa?R6KF}k zgB$)kIwbB8^mYuuBFy2Z$w$!QhyZkYKg+#)&FQ=m{1W`kL!EUsV!^ZTS5OR zX+_agl^$&B??p0NEub^PQQ7bruB?x6R~bwBSG9*9pkxFrW*Jg0!mwiw&3KDoOOVEV zhDQ~8WlXo;2s~=1+F1}1nAzpZ$a>01OYj_nfuACw$%|*N0~?#GOos++UQqSwz8)W zBZOtwGSygdv1-se-EY&@0*4kC=uXLqPPWPVTOYp%KMudRLFlQ7QLUSaO6)w z$dzdrl`IrjA3TrZ2RIyx-9d?=>UMPe_VnoZ?C9AUmevD9^5E(!Wmhy;*6hA2uAXsA zk#*o9CoDDr?8`O$BI$=!6tP-CW$JCEl#nI8qr7GG?N|HzY1tm<7v~~%9807N5E>v{ zH4T-Qo+fiph(Ra$F8IRt+Aq7b%&vH~oAYm>)5%T)E>GrhkX2h)q}00 zq>^qaLxCwWyr1xsCxk3WLiW??5vCSD>RHc{`7&8B(F^TV4$Kx6I>nEAR*g
    --J zzZ|{{#ED~|SZ&Z`n9oVTXGuo=;G|y65}KPvv(Ku@x3p#zv?SRUgKF|!NLEGQ1+G%Y zf8|HxPT3V6b*qu?$iA(J4ir&TWsDa#8qc|d8aPrqy~5nl9cP9DPEpeB4pK7lc3Kip z5o>pGnRq*~YWKz&v|TI)jruM#j_I*Tg)gGuyX*g3CW7`%@Vm2<<4Se#r>Kb7?uWJD zX-&M>bXh^eb;Qlp2MC4NS)C1pz!-U0t8J$xfaWb=4|coiEsM^lSQLKpgkyf61}2Gb zJW=g1+*g|eF>y)LjPdb=8Rf4sQ_dyoJI?^d=5bc7o=mx=tt6D9@%g4s;q%Sy_@9N8 zozlfsY4()AL?#0(1Jq>&pq8S5X0kmb2QuXr9wd~cadr54DGI=EJn@WGiNdd22m*w{ z+ay3;gzhdtfW1M0&Zab#HfzhoYvtWmohG~}L2e@3mz$p}iT6+f{r;VdKmpStBffP& z7f}MP)a#ps+cD#P(u(i`P+|0QfZ|t?S<cQXbxu=qP2YS`n2h5|QrPX(y@kd5K)>XSqxupOJRnUOp-=G00 zcc1}K1&z9fkSTYd0Z=@2BYk`ftBm<}s7>x_J7Upm#AyI`n&-=Ge{b(9&ZjGX93=C- zJTSdIj9?@gosF_74U+_Fj45ZlJ&m(n zj2M+Z?YXpMD@R277l2ukd90#BVIp1b!jl&%Z#C{ z{H;PjuMS@xJp0r8gI7lfXNP6DrkoRNbKQBrlG7LaLglLHMHwsULfH)bp5ZDR%hn>m z4++x7mFw-Ba3x3^7jJIcgoycVEZ&A2@dSjzT(et+qgV4f8>to7kw`81y&b!&>|4lm z(3GKz1~GN$sslkqDH<3mc?}9{gLkrXSg73+_E7G;Z3C;2K@&lxcOg~d0u8)s)>>5Q z|97FvtWMN!f!WmC4KcZsgsc;e`^!b)_BSXM>1&Xrk=4eM8EeqGJEY9$LPIzD4Z4Q{ z7=LmwOsaqgYd?-|>R_2lmY`s4HA;t+u4092+kh2`UEqza{Q_K`hwBD!9)MYgP(8oV zK;|0jB-)58VI-<_ksEx<)GkJ=2=DVLTWr8UP648h`5sJ!77nB}pi-v72Q@&E+=9)L z*){6`=IIi;^x7M>Ei<|*T=(UhsI|;)boA6)Xqeaqe#V+I11X(=O~1E^*39oD!lkH* zsg4G>^ByH#f;rxQ{7s7)-H+=U`N(guc1U5C&XW0~4OC`?$q+og6dj-0Ugr7!B9 z7IBihu`FBM@Dd~o@G~J7QTU?wKuvr6SNja8z)<+v5^t$Hz>`efMED`6HWh)VsHAK; zqXFB*h8SQK3KFMaBu~$fhB-hwPhIwqsf&_;)fP-|h9pVKv+bAos%Vo&WjK`25|) z#rJ=IcRqN!KNvlI7k&?4-i_gN_v!B62j@Hei`@a{J$?Aj*ggOAyLT6VfA?;@``y!B z_zgb>PoHoMAn^`Bi?4rw^zH8M-qmAS>|qwrC3^-+5et6zZt(8i?o-Gnd*6LWe#*q{ zf&Auc;?v+`HiJ5_46pVp?)u=T9mlFB!j5>Gyum|bmDoTveUO#cJ#Ys$PWk0fMGs4W zA<>3JS6i5&=x(492jQ-*AjF;3KKb>p1+ZxH`qsykBE_f@;6^#5+1w%tGY6bVy3CGn8o8gH44 zmqQ}1`W}+MtP*(@W%UXULHS&K$bGEnZ*yq!l$2#DykSk2K$(53_c0agcE&Ltne3nw81r6r|mm=uKmVXN66bZ-som$HuS zP@HPzSIA^T;;-`J+K+1*&oGcWCU3w6hv9~oy7JpVFzWeGo}cWMJN9_7ylMAE3-cqz zSjK+66^Fg@crX7zLb4au57th`i=AV<2AnCJz~@Q0n&BiOj?Px(wsp+6LuM;Y&I{t& zL=0}eXe&@d-U|%J#id*SIG2gHruRbL3e@*y5YS9 zrC<~uOQnmOPq@*K%N))5nMj{q#S3xkFlZ#yO7f1{dLvd#bKqch1%kv6WVMWq?aOcl-N zGyTVS`C^qq!=%qBe;ETM9It*?j4Sws4lNa>(IrG<<~86HF8Z)&ZNDc=KJsaN@ZV6uzOW6ngkrGgNpoziWn z2pD0e@T#Nc;r~3Snk+kfSeFTS_NkW8zm(fSA+=(12)yFAL@VwiV%4n zQ|da!+PE12N5IudS)JU%jECSX)WJNUiMjSNX-BbetImFph8-{kE>%c8kk2 zQhqYOIOfl5Jmj#lOf^s+K14uqWd$yc{&0i){I5Z0^TT}Nun`GqPHPq6FljFQ7O)c3 zEd`290Snn|zM9dTwaDaCnwL|0CU#q(+oy#0|8@PJiLKjPK(h^ETxXX83`0~Ccg&)6&) z7xF544+NnPaCpvAbT=A@gMO{PMJ8?yFJ!(nMrDo^L@|mQF#PizW#^)Lqe>N4Ns}Dg zZE;&ii>sS@l^(YBlVW?L8YtH(D?9BwmMs69o&S=bV|=#d%fA0HnOIiq{Wr_{djIzq zd;bUL6nyIo2I!~|czm8`2ee`)9yr_4$HrLWiRv;InE}oZx2)5|hqj#Eh--|{R3*PV z1EXYdwF1E!&Db@YRWFPMH70Bq8RjH;Jm-%B?aXP+DXTMyJK_}~?sln%;l}bgp$GCQ zGMLEdYKG6)?Tm}Vj621nWDgpt;wiOpP3#43Ermal_FzZUwBoh)WIOt&H$?4(2^Vf&tx#XkBJs;cnv)1%Qxfpz5{Cg$}>+DA{A6oPLkychOR>324=HxC7##g z5EW6xMjZuHT+f;%SE$UdVvxZwB>2k5%fnYEFN<0t;d`U1bnMC)n+CV_hx{81l|_-C z(7iO&!6 zv!)&%pvGvVRPK=9;qL%Gz&4*I_-NQp{V^f(8^P^=T)}{!A^X4OBRyJigGaT1tZ+wV zvn954n#o$LI?7jg5b5>NTTaqnU1hh^>uS6`9+=7$?P)THvCp4HyDNonQbt3m*+O>x zoGn3u*dmC_S1#o%n|pGzAqY?uWBqsjZ0KWctc#|FaUs$m$xumEahACgp{F106we9p z4826Fc9O_~a`Ymv{8iPBwQ^qX(U`8D@zN|f8o0eo_rrflyTb(c9F zj|G8UGQ#%~&5@>$bPq{G!*Xq!^?D<6PQ@)`7i6^zF-XN9S;dDwLYPdu}!(iqj=CTW|Y3Vxiw%|!3Qs#MqV^4yKWlVf)qJ@k()X^Nvp^+O>b4_c4 zjn>h2s-<+FMJ{#k&y_@s!iniI-*+|FHg(VQZQb?YlWDG{)4+D_&J}Q@g%dr}?fY{h z5&a;vG|dkT*LVCV@LX+TGC#6G5H(j144ZRv%syRk2kWwSdi|2e3@+;@&yg!?2LUy# z`*S4`Bg3;z%XK~9VV)n_w&&9bT4>@kPh%4|;9R{$|MLQ$_#~HTi(<(vh#bc-?q9M* zG(bvpD~LkfW?@J*n+B0>nwA&nTI2&SyM@XD)v=|@F@(wi0#fA|Bh4G>MqA~y#h@XT zEla;Ds+to)OX^z)hd^|7d!pIQXBO3s&;fDrY|9Us=1q(+wB5>}7PdDAj!-G`m5xW^ zMH=A4(rBPNTL`H{)FK?ohT$+9G$SzJ*vtz+5I{M)uI^jaPBFIZ6kTRXgwG+FX4eDOyy=MF{&?X@b?_W8I=-QFZz~FZs$D#%(ion-G-|`HYcMXq) z-u9KdwGJI#cx{Rf^oeG9TTIac(VZ~Y4nrdhJm{rBw?da{hBu*}P4x+wHm&KW7Jon% zeABa8q}`tdiD>!0WBPh%d!DQ7p!4hrG_U7-j^{ZM%uN3T(OEAmfe|>CyM?SsMAP>) z7z`0qj5@ZbYcP(@!0-$fxS<}J>>sYpU8cqL^#ge|18oa|k%(X@`H{(VNAo<-2KjNA z>lj`LQ$fki8@ zm~MvupaXHwcGWfCj;vsdfhZ6)!}83?H*Hr7EEh)u*kigeVWwtVhG!bQIwCeJ3{*j1 zs_<-2H8IQ$=m&$J!(gJ*wiuTZG4w#8gJZ$KJ@W#e8Aj;olL>P)MhzYGMoTrQ=xKfO zfmD$Qg9`?`XKmx=2t*os)bRtbUUeEdLEwSO%ybs|5z{PZ;@P{mTatVXK$R%lW(;t| zz+{eTYRsdd=DSRbY}cZmt{Z`&1!3qpP2-`y4t7bzk_9&%afQkG9+(2LHC$^8#Ul}^ zt812SLpy*D@+0axI@*&aw3O+BmQ~}S+AfulCAJGlU;q}-7DkIe^i7vJo?);FXaghA zT{8lI)S37;UJC#gY)yt?T=92Z$WN!QGCq~LCUo;QQyE9JOxtn|Cjd7y3LMv(m=g-> z091kDf{hLoG=%(v$VZBSl01Pz~$_rh~n>c#py8)O)+5}|O30&XM8uFR4d2-E& zxSc_8zEfYKkje=or1c7Z)C$J}`?SaBZJ4T=I{uVRM1<6VY?uvRz?)GQsDHFVKoKrltmPGdR~x_&FBre zRHv;j`IUGu=<;|bPrHo3B#EzK2#U)E_>+OS6b^G6B!Y07_ZLpHLpbGR3BDqC9Qm#S zbSGbmsr*1pVooj=tT3Wj&^q78er3dR+?01s4(F>4b_&8 znpsi!+}y9_0S@Nqn!Cl^DG?`*4T^9w2`6soS+?U-aJkX>_B|LtFq>Ab3N&-FWRCXZ zHTc?B=viwZ;LbKdNr~uCu*MzJg|eAz;jH9>9ibVvufwDn_~!Noda;1LE~ffeL>5ap zocDFzx1B9KZ;2TBQAF9q1VwLqP#rs9nr>RI1yisUO?+XDl%oMuVVa^V#D~EQPnMP2 zqWLY%agMrxIc(x@G5JVD3nqUPJh;FGGZvTvlUxgpz_%M2R;vTthfEGb7)SLekd98sR!z|0vt%4fP#03Vw2xE)r%z=BShk;j>cYRBF*Oa{LrMzn+Q(HS7UWrZKMRoapc$j+_{JtR{5}#XY zq2X%Vgzy9+0}CAtB=CcDF9=-6fx+$To{8>~7tp{ir5&}Ae7A&D_l5B2zNtITHc?%H z=z~?}S>Pu5%yk3s2_||7LhgdE1~a2+u$D{RjzZv?Q#5fMdz&>Ufk+KAuuQ`Q!_2pV z8}Oesjro>_i-4xKTwZvNpT}B7I&TXVZB3gFHsO@7Q)VIKE1fM>Fk|to8V1ooa=yJ4( z^B4?9xx4cG5}yDuyvhAbm56T0OdAw1)GZ9XD4f_6k2$`h8DOtZ{0TSCFfM@C=0bVk zR~vS)MR^ht27nIkCyalF%Lz<3JS);{-#2WRI-X|y5>xTSb#1Ukwum+fM8neDzy+Vx za$sU+cBr{NFztdP8+f5*X$>ZuL0uf3k>S#C3-?7LLi=boSOS(Ay3iMq2m0Crk0uB} zPdF2*H&`fkem*7vSXDUdy!h$(`O)#q@y&cDg2}!ghHRT)GDq~OV}iwM`3!tXaPTn3 zs`=1=)D9YzNdNXv|3n&BMT4L3;(v zU=60wuWuv$PkR5UI2`b$-+$69@LgK(KY@Pwy8rLX?EkB&3G$n4wfyb*!*jWvj{JbD z9~bAK21oM5!YI4Gy0|!(f-_pBvy0#CAy%&kc=bh;3$<`Hist})QT|3WgjYuD#kpK> zDInEv1zDJmc((z~S3n$+GRY4AT7q7aPVnnd4MqM`Xs^V_P)tjF#5!1v(d)|RIHSgk?`xXUB2p)svF|Ow#viL6z$kv@AC5U zL&ooYscy-x-fdMAKXuC$%`(PM>=$4ng+DF1x5s-F$bjiEO~R}vGS|J|olnQhbFowhgIJXI~o@f;%WHSao`po z=;|qc#QR=@!7h37+vc00Wr|=)>?Z7de+fq7C-xE&;uPC)x|u6GQ99 zQ7+DQNxFE~!cDRg7BD5t1jA(LJr)4hs~PSa^W#}8E&@RIp=m(#R+a01Q4Un#L&i6S zE90l8gLAzn=Oy2QS!eiG%wBzJy9eeTB8vG@ z68wrQ=&Yr7H{55&wP0ZyQ9BjNu1CJm) zmE!lL(U)~YsH7v)k%_BmcW?SBOdHcZJV3p7mD1%j+vBY-7_3@UC0Xz@$9EvzYnXCt z8wh%TnE>8XDCS9h^p6w?ArdR|!(x(0%OnV=ylKt=Q52WZ%B28 zEC3C>p)CbKi4lO@C-WA)+SEuYdo|{{A1#ukLmLx&Q!kk%eRc literal 0 HcmV?d00001 diff --git a/cli/tests/testdata/npm/registry/get-intrinsic/registry.json b/cli/tests/testdata/npm/registry/get-intrinsic/registry.json new file mode 100644 index 0000000000..b09a472f65 --- /dev/null +++ b/cli/tests/testdata/npm/registry/get-intrinsic/registry.json @@ -0,0 +1 @@ +{"_id":"get-intrinsic","_rev":"8-56a236fdf4f8cc6cb2833aa6d5ee81d7","name":"get-intrinsic","dist-tags":{"latest":"1.2.0"},"versions":{"1.0.0":{"name":"get-intrinsic","version":"1.0.0","description":"Get and robustly cache all JS language-level intrinsics at first require time","main":"index.js","exports":{".":[{"default":"./index.js"},"./index.js"]},"scripts":{"lint":"eslint --ext=.js,.mjs .","pretest":"npm run lint","tests-only":"tape 'test/*'","coverage":"nyc npm run tests-only","test":"npm run tests-only","posttest":"aud --production","version":"auto-changelog && git add CHANGELOG.md","postversion":"auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\""},"repository":{"type":"git","url":"git+https://github.com/ljharb/get-intrinsic.git"},"keywords":["javascript","ecmascript","es","js","intrinsic","getintrinsic","es-abstract"],"author":{"name":"Jordan Harband","email":"ljharb@gmail.com"},"funding":{"url":"https://github.com/sponsors/ljharb"},"license":"MIT","bugs":{"url":"https://github.com/ljharb/get-intrinsic/issues"},"homepage":"https://github.com/ljharb/get-intrinsic#readme","devDependencies":{"@ljharb/eslint-config":"^17.2.0","aud":"^1.1.2","auto-changelog":"^2.2.1","es-abstract":"^1.18.0-next.1","eslint":"^7.12.1","foreach":"^2.0.5","has-bigints":"^1.0.0","make-async-function":"^1.0.0","make-async-generator-function":"^1.0.0","make-generator-function":"^2.0.0","nyc":"^10.3.2","object-inspect":"^1.8.0","tape":"^5.0.1"},"auto-changelog":{"output":"CHANGELOG.md","template":"keepachangelog","unreleased":false,"commitLimit":false,"backfillLimit":false,"hideCredit":true},"gitHead":"516f403fe75287a2a80a8d48c2061f6b3238ec0c","_id":"get-intrinsic@1.0.0","_nodeVersion":"14.15.0","_npmVersion":"6.14.8","dist":{"integrity":"sha512-EMuu0ud8uAP4Zs6tQqMeHiY1PbIBDcZ92QVxqeLfqTMbyvqcDbrtHjfu0RWh8QaUNJ3lP1DSX3J2okgj9JE47g==","shasum":"035ccf14a00ae2eb3d110a00fcd10e74706a8fe7","tarball":"http://localhost:4545/npm/registry/get-intrinsic/get-intrinsic-1.0.0.tgz","fileCount":11,"unpackedSize":25104,"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.13\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJfnCtMCRA9TVsSAnZWagAAwm0P/0h8E1pcOVKKP6XQ6No4\n9tdWqfwRUlG8RTYs8sXW8g2qL3PxQdM1ql5GztOTUSstrtEE2sux290V6w1B\n829I8YHJbw667RuqIOuUBnXjaFm3Eb6S1Tvhvlbff0MtEoP9dZwgvqHn6yLx\niIBIRDCEJhuqrfVmjbpy6hLDEsxhaWsSxPj81gm+aHY6xVb4f/dZvrDp8R9j\nlaEwsE7EK+cEn3ifTQYYHlv8an9QkPFTHDLjeZ+wdWBnut+tepMeFM+ZjG+d\ngdTg2IeNfXFw/QSU5eDQtjqHZ2Fv2T4fFn2blhkrIbEMmwxczzM6QuQiOGc8\n1suIs9vDdt8qq6h8ESs9hr5I2hgE3M4Xxt5ziZ95TifSDRNyyQGbMy5vj3CY\n0z2e5M6zr5b2mkiWm0A5tZI4Mdy/2XrpJxTE6/opYgvA5mQ0GIYzO7r1Zt+G\nmHD/MDeTe2WxBWizo3nv0IGRvZeHZ/JjcRHdHeRAq+rqJ6o4hvYanxfoGlGA\njCUXYsZzR2XLfxBiTeSUO9VQ5YSBtsfU+egeRNwOw5PwxpGwfW4VUVOPHwHJ\n5dHlRGuWHDOn+4uF+09o5B70By6rcGZsHV62jX5ci5JclHswBdrvcftucfyG\nyR2qyuEnxq7O+S2D/uMylQLqTdCdJ6Bf58TKGSzpsp45oWrSmIsSTdiVWIsG\nz7pB\r\n=DQ3f\r\n-----END PGP SIGNATURE-----\r\n","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIEehDACke//ohQCAy5pJo/R/9J5UGrufkNBiQJqe3y2DAiBC7txNPrBmQB4PjK/Ydow1627eRDEIl0wz1IkhWkBAkw=="}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"directories":{},"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/get-intrinsic_1.0.0_1604070219549_0.3039159077605891"},"_hasShrinkwrap":false},"1.0.1":{"name":"get-intrinsic","version":"1.0.1","description":"Get and robustly cache all JS language-level intrinsics at first require time","main":"index.js","exports":{".":[{"default":"./index.js"},"./index.js"]},"scripts":{"lint":"eslint --ext=.js,.mjs .","pretest":"npm run lint","tests-only":"nyc tape 'test/*'","test":"npm run tests-only","posttest":"aud --production","version":"auto-changelog && git add CHANGELOG.md","postversion":"auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\""},"repository":{"type":"git","url":"git+https://github.com/ljharb/get-intrinsic.git"},"keywords":["javascript","ecmascript","es","js","intrinsic","getintrinsic","es-abstract"],"author":{"name":"Jordan Harband","email":"ljharb@gmail.com"},"funding":{"url":"https://github.com/sponsors/ljharb"},"license":"MIT","bugs":{"url":"https://github.com/ljharb/get-intrinsic/issues"},"homepage":"https://github.com/ljharb/get-intrinsic#readme","devDependencies":{"@ljharb/eslint-config":"^17.2.0","aud":"^1.1.2","auto-changelog":"^2.2.1","es-abstract":"^1.18.0-next.1","es-value-fixtures":"^1.0.0","eslint":"^7.12.1","foreach":"^2.0.5","has-bigints":"^1.0.0","make-async-function":"^1.0.0","make-async-generator-function":"^1.0.0","make-generator-function":"^2.0.0","nyc":"^10.3.2","object-inspect":"^1.8.0","tape":"^5.0.1"},"auto-changelog":{"output":"CHANGELOG.md","template":"keepachangelog","unreleased":false,"commitLimit":false,"backfillLimit":false,"hideCredit":true},"dependencies":{"function-bind":"^1.1.1","has":"^1.0.3","has-symbols":"^1.0.1"},"gitHead":"1802957d1ff6a04965505f54c3d354ad7fa31034","_id":"get-intrinsic@1.0.1","_nodeVersion":"14.15.0","_npmVersion":"6.14.8","dist":{"integrity":"sha512-ZnWP+AmS1VUaLgTRy47+zKtjTxz+0xMpx3I52i+aalBK1QP19ggLF3Db89KJX7kjfOfP2eoa01qc++GwPgufPg==","shasum":"94a9768fcbdd0595a1c9273aacf4c89d075631be","tarball":"http://localhost:4545/npm/registry/get-intrinsic/get-intrinsic-1.0.1.tgz","fileCount":12,"unpackedSize":26012,"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.13\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJfnPbUCRA9TVsSAnZWagAAO2UP/37xSp1p1f50Pt6yyL4k\n1BKcgA+OfPCEhFnJA1AKqYeL8rVBLr7VoSvMzTQ9JonFIIXLlnlVe8P91KWE\n1AXoYJr/dW8ZG7vHs37jK7aiEweyYlgLebWPOM2T2bU0WFoaaIws1fa+TwTS\neCqY8Q7XysXV3syWXX1El/2TIXzSVa8g8gOVJy/j8j+fthSAPD0H6ZTCvYQ6\nPSWIFAYhRIWXLGel3T/TE1p61AWZuEtf8B+e6K8hPiMuzhNjODCBqJQV246D\nPznhAbJV81wNIdM0ohuT19+t7GqjjKbKKMpU0LZzSCjZF3Q+zLI4H+qMY0Bl\nHFiqspAfS0r/wHWoBkzODoHWMduJ/JPtE/uee8ae92iC9fR9Y8fSOWXTt07W\nFWSGyLyJ6CQS7d+dJwFb+2cQNckV/9VKu+y58z+i6x6/FExmHNBdYt4ps3ju\nH89DQEmfq5wyLcceng9K0a7A6vfLM6MvEk8FugXVhGORioFOkscE3f8gS7Sc\nIzbl739iiG3oGvNzRgF229t2xwUZXVNqGJ4Sg3AQM/RX75+Mu1Jlx52z0ECY\nLZGX16A+J3N955DxJktRA1l7RA+zihIs1fZKHm+fErP547biV5p+TNocKrrn\nwghypBweNbcNkzNds6qczoB/3Vsc2OxHaVUBWNleqmlrQ6Qk1AA4ZRIdhQZ8\njWu7\r\n=BhVO\r\n-----END PGP SIGNATURE-----\r\n","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIFzb1R9CMnRu3GNwT893R3yms0wnrxROjmNn7s1aWEdLAiEAgP3VkUFew7/H+j05N3mW3XntPRU+Smw2z+q8kZl3CC4="}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"directories":{},"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/get-intrinsic_1.0.1_1604122323843_0.022947285149020447"},"_hasShrinkwrap":false},"1.0.2":{"name":"get-intrinsic","version":"1.0.2","description":"Get and robustly cache all JS language-level intrinsics at first require time","main":"index.js","exports":{".":[{"default":"./index.js"},"./index.js"]},"scripts":{"lint":"eslint --ext=.js,.mjs .","pretest":"npm run lint","tests-only":"nyc tape 'test/*'","test":"npm run tests-only","posttest":"aud --production","version":"auto-changelog && git add CHANGELOG.md","postversion":"auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\""},"repository":{"type":"git","url":"git+https://github.com/ljharb/get-intrinsic.git"},"keywords":["javascript","ecmascript","es","js","intrinsic","getintrinsic","es-abstract"],"author":{"name":"Jordan Harband","email":"ljharb@gmail.com"},"funding":{"url":"https://github.com/sponsors/ljharb"},"license":"MIT","bugs":{"url":"https://github.com/ljharb/get-intrinsic/issues"},"homepage":"https://github.com/ljharb/get-intrinsic#readme","devDependencies":{"@ljharb/eslint-config":"^17.3.0","aud":"^1.1.3","auto-changelog":"^2.2.1","es-abstract":"^1.18.0-next.1","es-value-fixtures":"^1.0.0","eslint":"^7.15.0","foreach":"^2.0.5","has-bigints":"^1.0.1","make-async-function":"^1.0.0","make-async-generator-function":"^1.0.0","make-generator-function":"^2.0.0","nyc":"^10.3.2","object-inspect":"^1.9.0","tape":"^5.0.1"},"auto-changelog":{"output":"CHANGELOG.md","template":"keepachangelog","unreleased":false,"commitLimit":false,"backfillLimit":false,"hideCredit":true},"dependencies":{"function-bind":"^1.1.1","has":"^1.0.3","has-symbols":"^1.0.1"},"gitHead":"eec980691af2fafb4e0d9207e473c9e1eb7995e6","_id":"get-intrinsic@1.0.2","_nodeVersion":"14.15.1","_npmVersion":"6.14.8","dist":{"integrity":"sha512-aeX0vrFm21ILl3+JpFFRNe9aUvp6VFZb2/CTbgLb8j75kOhvoNYjt9d8KA/tJG4gSo8nzEDedRl0h7vDmBYRVg==","shasum":"6820da226e50b24894e08859469dc68361545d49","tarball":"http://localhost:4545/npm/registry/get-intrinsic/get-intrinsic-1.0.2.tgz","fileCount":13,"unpackedSize":92891,"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.13\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJf3FiZCRA9TVsSAnZWagAAhqwP/ApVcuwN67ClrOqU4sXI\nq1LZjlVXwkM54mRbfqChOsZUZsxW1V8xCpeaaZE0h2JKH+PnzS/GUvAcd0iA\nXbyjsIfvc66lu0bBIMKrP/zLMQ7LIm3q9Vr7iLIWi7LXXCGNqhtNx0rGgPVi\npk6c0o6MUK6Tr1RGtdpQZVnJqF9veFC6RVApu+xFYt8QWXuYDTGKrS093aRU\noI3SmbrjLSlskjXSVREGFja/L5JsiHbds7meSHPWdF57AhatrEb9X8h93fdy\n4Pz1yUKjd1QFXoAg4Pw+TLRPO0VN4JYeHWwaQ+mmOl5RViz+Yiq6joR+Fo1r\nsdSWHdijgx3XzGH4nbiP9mjR/TcypqZQeEP1H5TDZfDSSRSg9Eus0BQuHwOa\n9kNLDQywTsBBsB8S5tlJ4QSrTSn6Y8q5RsQIl9IIHwAUW/0GyiAUfCJMYCiE\n9A13GnS6ZPJEdJu960P7ZlbvnfpPbiQaMOMyC6kXOfACBkcxhhc4SofQkMZw\n5v7Xjg3Nz6inEnpbXuuU3Tj3WmDMMWoyX06sDbv50X/gzciNSy6ptcJgultt\n8aGrP+i/QWfHzGdguIVlz+2wf5kYG1jRyvbKsVZKJ2wvnnxUC8Ji0yjFO6j/\nKuwcsva60yDyaAjpy4Sbw7WSE1etufVa0rXf96788xqPhAhvFcGzzVKiOigr\nUP2R\r\n=SifI\r\n-----END PGP SIGNATURE-----\r\n","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEYCIQDLO/WsMu1yogrwHBInw7hC3MUpX9G+E/a0CiaubHBZGAIhAOSWmWgEOV39qmGn7YkvbHgzc+IxhjCPiTch0LAig1NA"}]},"_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"directories":{},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/get-intrinsic_1.0.2_1608276120877_0.84071357918607"},"_hasShrinkwrap":false},"1.1.0":{"name":"get-intrinsic","version":"1.1.0","description":"Get and robustly cache all JS language-level intrinsics at first require time","main":"index.js","exports":{".":[{"default":"./index.js"},"./index.js"]},"scripts":{"lint":"eslint --ext=.js,.mjs .","pretest":"npm run lint","tests-only":"nyc tape 'test/**/*.js'","test":"npm run tests-only","posttest":"aud --production","version":"auto-changelog && git add CHANGELOG.md","postversion":"auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\""},"repository":{"type":"git","url":"git+https://github.com/ljharb/get-intrinsic.git"},"keywords":["javascript","ecmascript","es","js","intrinsic","getintrinsic","es-abstract"],"author":{"name":"Jordan Harband","email":"ljharb@gmail.com"},"funding":{"url":"https://github.com/sponsors/ljharb"},"license":"MIT","bugs":{"url":"https://github.com/ljharb/get-intrinsic/issues"},"homepage":"https://github.com/ljharb/get-intrinsic#readme","devDependencies":{"@ljharb/eslint-config":"^17.5.0","aud":"^1.1.3","auto-changelog":"^2.2.1","call-bind":"^1.0.2","es-abstract":"^1.18.0-next.2","es-value-fixtures":"^1.0.0","eslint":"^7.18.0","foreach":"^2.0.5","has-bigints":"^1.0.1","make-async-function":"^1.0.0","make-async-generator-function":"^1.0.0","make-generator-function":"^2.0.0","nyc":"^10.3.2","object-inspect":"^1.9.0","tape":"^5.1.1"},"auto-changelog":{"output":"CHANGELOG.md","template":"keepachangelog","unreleased":false,"commitLimit":false,"backfillLimit":false,"hideCredit":true},"dependencies":{"function-bind":"^1.1.1","has":"^1.0.3","has-symbols":"^1.0.1"},"gitHead":"aaaaa0d5cd17d4b0b274cdaa1f7f3e6007fc9e59","_id":"get-intrinsic@1.1.0","_nodeVersion":"14.15.4","_npmVersion":"6.14.10","dist":{"integrity":"sha512-M11rgtQp5GZMZzDL7jLTNxbDfurpzuau5uqRWDPvlHjfvg3TdScAZo96GLvhMjImrmR8uAt0FS2RLoMrfWGKlg==","shasum":"892e62931e6938c8a23ea5aaebcfb67bd97da97e","tarball":"http://localhost:4545/npm/registry/get-intrinsic/get-intrinsic-1.1.0.tgz","fileCount":10,"unpackedSize":29482,"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.13\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJgD0SqCRA9TVsSAnZWagAAVjQP/276R/hJLC3r7gUvPhUj\n8mwJCVpdCjIzaf4fBVvu0mB4cXe4G+t+N1w3JG9wkBqTpoHjNRzUUxhcFYeX\nnos9b4CeNftDFVgwixFHcRS0Nk0A6SUSj7jdmLiyrM3Lc0KVrMfe7G7ECeSV\nKGWViXtP8oEZJ6FZURMS9yMraQzeh5ChjcGKXsX0Jf0IpUXlDaib0ElChkLr\nN6iXsGveM9tYf15JjBW/gyJXhMPQLGE37jdCBkoW6WeOT7twWr0KDcmn6QHg\n775CZxPl1VJpEiXIoSk0PnAxRN95MIRZvdQ9k1ctSuE5kpErrTZk7j7i4i2T\n5bbOOcLvxX+StCvNtOh7M52RyDxPaagFSoKaNHxmW4e2muDDuvWRPA3n/FI4\nuXw1J1Lb1lvbhx/L9wLNN9SdPFcFOA2+t23SJE/F8abLHNsdhoBlCyoCmULL\nIKdrVXWxFbopnQF3n18ajCIDJ9E4J1vr6XU7+xYc7Pl1Nuel9AfQU5PuLAFy\nj0ziiUntUOuYWC0xHuhnYVHDWmU+1UB5IoxlQi9uAYp0/RBWg4mmAcQ2dK9B\nJDxOaa/Rmkp1F/5htSqD6hvfAH8Pv/SpEglGRUPH4mmHF183iLEwls2GfOAJ\nh2Baw9u2yX7COfPYqDG2MVbwB6wafDPgUKNNCZ+FjMRgLCt9VrzwVjPu9QF/\n5V7l\r\n=QaDr\r\n-----END PGP SIGNATURE-----\r\n","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIA+e/mSh+QgkqBLYqQTWcVvq5FQ05WbdMfaLjOMgM3N5AiB29JZnuXxh8SsBV17yu9nizQr5iwkWwK5HPPpe9Dx8Vg=="}]},"_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"directories":{},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/get-intrinsic_1.1.0_1611613354056_0.8648044903277086"},"_hasShrinkwrap":false},"1.1.1":{"name":"get-intrinsic","version":"1.1.1","description":"Get and robustly cache all JS language-level intrinsics at first require time","main":"index.js","exports":{".":[{"default":"./index.js"},"./index.js"],"./package.json":"./package.json"},"scripts":{"prelint":"evalmd README.md","lint":"eslint --ext=.js,.mjs .","pretest":"npm run lint","tests-only":"nyc tape 'test/**/*.js'","test":"npm run tests-only","posttest":"aud --production","version":"auto-changelog && git add CHANGELOG.md","postversion":"auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\""},"repository":{"type":"git","url":"git+https://github.com/ljharb/get-intrinsic.git"},"keywords":["javascript","ecmascript","es","js","intrinsic","getintrinsic","es-abstract"],"author":{"name":"Jordan Harband","email":"ljharb@gmail.com"},"funding":{"url":"https://github.com/sponsors/ljharb"},"license":"MIT","bugs":{"url":"https://github.com/ljharb/get-intrinsic/issues"},"homepage":"https://github.com/ljharb/get-intrinsic#readme","devDependencies":{"@ljharb/eslint-config":"^17.5.0","aud":"^1.1.3","auto-changelog":"^2.2.1","call-bind":"^1.0.2","es-abstract":"^1.18.0-next.2","es-value-fixtures":"^1.0.0","eslint":"^7.19.0","evalmd":"^0.0.19","foreach":"^2.0.5","has-bigints":"^1.0.1","make-async-function":"^1.0.0","make-async-generator-function":"^1.0.0","make-generator-function":"^2.0.0","nyc":"^10.3.2","object-inspect":"^1.9.0","tape":"^5.1.1"},"auto-changelog":{"output":"CHANGELOG.md","template":"keepachangelog","unreleased":false,"commitLimit":false,"backfillLimit":false,"hideCredit":true},"dependencies":{"function-bind":"^1.1.1","has":"^1.0.3","has-symbols":"^1.0.1"},"gitHead":"efa0daa5166f1a06658001e34f49b5f1185786eb","_id":"get-intrinsic@1.1.1","_nodeVersion":"14.15.4","_npmVersion":"6.14.10","dist":{"integrity":"sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==","shasum":"15f59f376f855c446963948f0d24cd3637b4abc6","tarball":"http://localhost:4545/npm/registry/get-intrinsic/get-intrinsic-1.1.1.tgz","fileCount":10,"unpackedSize":32513,"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.13\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJgGroRCRA9TVsSAnZWagAAAaMP/1kYGifz/BvcSYhnlVk+\nSnCwbyuOaTefaIpH15yyWb9sjo+1fgUw4Ej3GmVdpmyW45Tj0WePwRWhbpok\n1aKIx3P8/q8m95HymXcR50VRByFyxpNFxtWuo674yTzvYxN1+QqXVSO7xeLI\nL+bRYOScvb+f5DI8t5LqhZlvQgfiqyWXZI4L+gbwfIIrE7EUg5DZJZrzIBOY\n5SExvgueChcIptQgu8ppE5kADlGqmTHUBt3P68EU5HRc5Z/LN5csgTu63VkJ\nxx3pTXa/Q672C9qj1CqedmughzgkfBjSuKOhbQWgILCbNy0A6TKKVirpc2fB\nuI0f4vWTf1ImGrspsfIH2IR4SQqMmVy8qpgwG/YtU3q9Si9pOcXQ1q+JnyD6\nDoLaiTEVPC8ks/bKGjtNBDUmlnEuyluaaFuK3cfJQMGp2n+FNLXI5LBz9uoR\nkpqUHNJBFJ9HbbMfBUmTS3K3duAkgOR+izFQgAJJWzYbuAvM7GGAoy1eQUrY\nuD1tAQglMbB0YwsjnDxvGcV32iFoMttrcXb5xKUOlVaFMD2D9PDryeO/gu0N\nm3wDWCKhmMjGNWV6WA9q0mD6YRCPHZUwmb4xSFdz/i1MP4iVjVKc1tz6RAiT\nLqxKnm4uPjTsPPGrXWYdRs5EEF2/QHcmCex2kwk5Ul4fsVayaNOAzB3F+iSp\nbWf1\r\n=5aSN\r\n-----END PGP SIGNATURE-----\r\n","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQCNo13JEcIXzNTEsjtVSMSsBL9CAqU56ZzTh56ilFAwmgIgCFy2IWS7fXyDYWF1/aSqiRCTW9wVIONaN0YUk7J0diM="}]},"_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"directories":{},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/get-intrinsic_1.1.1_1612364304893_0.18784978138621788"},"_hasShrinkwrap":false},"1.1.2":{"name":"get-intrinsic","version":"1.1.2","description":"Get and robustly cache all JS language-level intrinsics at first require time","main":"index.js","exports":{".":[{"default":"./index.js"},"./index.js"],"./package.json":"./package.json"},"scripts":{"prepack":"npmignore --auto --commentLines=autogenerated","prepublish":"not-in-publish || npm run prepublishOnly","prepublishOnly":"safe-publish-latest","prelint":"evalmd README.md","lint":"eslint --ext=.js,.mjs .","pretest":"npm run lint","tests-only":"nyc tape 'test/**/*.js'","test":"npm run tests-only","posttest":"aud --production","version":"auto-changelog && git add CHANGELOG.md","postversion":"auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\""},"repository":{"type":"git","url":"git+https://github.com/ljharb/get-intrinsic.git"},"keywords":["javascript","ecmascript","es","js","intrinsic","getintrinsic","es-abstract"],"author":{"name":"Jordan Harband","email":"ljharb@gmail.com"},"funding":{"url":"https://github.com/sponsors/ljharb"},"license":"MIT","bugs":{"url":"https://github.com/ljharb/get-intrinsic/issues"},"homepage":"https://github.com/ljharb/get-intrinsic#readme","devDependencies":{"@ljharb/eslint-config":"^21.0.0","aud":"^2.0.0","auto-changelog":"^2.4.0","call-bind":"^1.0.2","es-abstract":"^1.20.1","es-value-fixtures":"^1.4.1","eslint":"=8.8.0","evalmd":"^0.0.19","for-each":"^0.3.3","make-async-function":"^1.0.0","make-async-generator-function":"^1.0.0","make-generator-function":"^2.0.0","mock-property":"^1.0.0","npmignore":"^0.3.0","nyc":"^10.3.2","object-inspect":"^1.12.2","safe-publish-latest":"^2.0.0","tape":"^5.5.3"},"auto-changelog":{"output":"CHANGELOG.md","template":"keepachangelog","unreleased":false,"commitLimit":false,"backfillLimit":false,"hideCredit":true},"dependencies":{"function-bind":"^1.1.1","has":"^1.0.3","has-symbols":"^1.0.3"},"publishConfig":{"ignore":[".github/workflows"]},"gitHead":"1692762305146cdee0bd0a31cb0a57ffd9240c8c","_id":"get-intrinsic@1.1.2","_nodeVersion":"18.3.0","_npmVersion":"8.11.0","dist":{"integrity":"sha512-Jfm3OyCxHh9DJyc28qGk+JmfkpO41A4XkneDSujN9MDXrm4oDKdHvndhZ2dN94+ERNfkYJWDclW6k2L/ZGHjXA==","shasum":"336975123e05ad0b7ba41f152ee4aadbea6cf598","tarball":"http://localhost:4545/npm/registry/get-intrinsic/get-intrinsic-1.1.2.tgz","fileCount":9,"unpackedSize":36671,"signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIGk690kzjXZ7zcAng4wWyvMEdoQ4xPaEtBm2SQIm48nMAiBm4P1A9nW2MVt9ngQfwiaKLc6wAZZBcdlpzlq8Br1v2Q=="}],"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v4.10.10\r\nComment: https://openpgpjs.org\r\n\r\nwsFzBAEBCAAGBQJioLxOACEJED1NWxICdlZqFiEECWMYAoorWMhJKdjhPU1b\r\nEgJ2Vmqs3A//RY5KjVotePADUXDnTk/obYp33AmFt7aXnVRafoQNIhvDC7Ya\r\nMj9g+8NGAG2D1xBgD+Q/dhvvfZQlpuLRzkfQg4V92liFmgpoEB0ue6BP0TD8\r\n37S9yioBWG6LTJkqbvjc68V3gi3t5jWTqHJeYPW4mxJF6MMCx7m9EYWGtqUR\r\n0AnVNqH9j4SJ/X3qHach0vgsI8hnb8iXeTg8X7465MmQke+tygQbT3rYLN/L\r\nSni+uwm6EMybJ8Lh5GKq5U6aKr+inAYm/h47js4D7/A+tvfzYfWvLjr1l4J5\r\n+cMKLskFEP6g/Xz9jaYCCRxe7YGaiTmH/sUgT+kTzo2oJaYh6xd/6bgvGCut\r\nPFBBxh0lknSR1wbiQz3hcdHu42D0a9jiOmtc3DlkiRzrez6pEJMDnKu+Pbck\r\nkqhrBMLYyLYkLHJzeB07aN+KuspIZgjMJ/rSsgqla8JHv6TqWx0BbaoZ53VA\r\nPzf3fs73zh7IhNLznCQVNHR9iM0w+dUMI2n6c0QlOaimFkJ+61cHA13zU20x\r\nllmejv9s5XEkvuVSU/ibuYEkbnqDg62sWcm0HgGuL6k+RKe7Mj/gOds/Zn9n\r\ngvMIl1y7zeaIrEmQHfj6ndXAB1Mv9eIySBA4//nd+oVZLibt4pkAJIIy8xY1\r\nnHBGLFN4jrtJBI6I36xrNfrC0DVumPnTh8A=\r\n=ZCcH\r\n-----END PGP SIGNATURE-----\r\n"},"_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"directories":{},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/get-intrinsic_1.1.2_1654701133878_0.7209001100988714"},"_hasShrinkwrap":false},"1.1.3":{"name":"get-intrinsic","version":"1.1.3","description":"Get and robustly cache all JS language-level intrinsics at first require time","main":"index.js","exports":{".":[{"default":"./index.js"},"./index.js"],"./package.json":"./package.json"},"scripts":{"prepack":"npmignore --auto --commentLines=autogenerated","prepublish":"not-in-publish || npm run prepublishOnly","prepublishOnly":"safe-publish-latest","prelint":"evalmd README.md","lint":"eslint --ext=.js,.mjs .","pretest":"npm run lint","tests-only":"nyc tape 'test/**/*.js'","test":"npm run tests-only","posttest":"aud --production","version":"auto-changelog && git add CHANGELOG.md","postversion":"auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\""},"repository":{"type":"git","url":"git+https://github.com/ljharb/get-intrinsic.git"},"keywords":["javascript","ecmascript","es","js","intrinsic","getintrinsic","es-abstract"],"author":{"name":"Jordan Harband","email":"ljharb@gmail.com"},"funding":{"url":"https://github.com/sponsors/ljharb"},"license":"MIT","bugs":{"url":"https://github.com/ljharb/get-intrinsic/issues"},"homepage":"https://github.com/ljharb/get-intrinsic#readme","devDependencies":{"@ljharb/eslint-config":"^21.0.0","aud":"^2.0.0","auto-changelog":"^2.4.0","call-bind":"^1.0.2","es-abstract":"^1.20.2","es-value-fixtures":"^1.4.2","eslint":"=8.8.0","evalmd":"^0.0.19","for-each":"^0.3.3","make-async-function":"^1.0.0","make-async-generator-function":"^1.0.0","make-generator-function":"^2.0.0","mock-property":"^1.0.0","npmignore":"^0.3.0","nyc":"^10.3.2","object-inspect":"^1.12.2","safe-publish-latest":"^2.0.0","tape":"^5.6.0"},"auto-changelog":{"output":"CHANGELOG.md","template":"keepachangelog","unreleased":false,"commitLimit":false,"backfillLimit":false,"hideCredit":true},"dependencies":{"function-bind":"^1.1.1","has":"^1.0.3","has-symbols":"^1.0.3"},"testling":{"files":"test/GetIntrinsic.js"},"publishConfig":{"ignore":[".github/workflows"]},"gitHead":"65cac0bca7cf7db4d1594bd1f7c68e921adedb5b","_id":"get-intrinsic@1.1.3","_nodeVersion":"18.9.0","_npmVersion":"8.19.1","dist":{"integrity":"sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==","shasum":"063c84329ad93e83893c7f4f243ef63ffa351385","tarball":"http://localhost:4545/npm/registry/get-intrinsic/get-intrinsic-1.1.3.tgz","fileCount":9,"unpackedSize":37128,"signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIFx7EpcX7UchnW1MjTW4LY/IDpL1jl3H+M29ezR+WSHQAiEA7sXR/8EoSjeBOAK0Z3he//k1OtgvYgkt6hGAGtrHojM="}],"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v4.10.10\r\nComment: https://openpgpjs.org\r\n\r\nwsFzBAEBCAAGBQJjIACSACEJED1NWxICdlZqFiEECWMYAoorWMhJKdjhPU1b\r\nEgJ2VmpAuA//URHhCHAEO247jjMd3BcpIpNWgbXTtnIEpHqbGulxZuzwd8hD\r\n7a9fqywLrEQq10reRxRNeS6Zk7BSv9QqwtZx7dTmi95ZxPETZvF2khJ6ggyj\r\nhAMonDjaP79Ki5Dwz/JH2WxsefDcAAPRRftEmm73oSJgt9EEdssmmAXgG5JS\r\n5OU3tCLGb4ricSaPNv2g2QDDLuLh/j6axKGn5bsQZFCvK87PV1vR/9Q6EVUz\r\nNDgWOxcgQTXgpVJYPsd6j8FiB3PiuFmd7/aLiqUMncStQDzklRHd8zUcxay3\r\n+0NplukrzPQPRDjMLuLeIX6WX+145sPZcThc7s9nrfmk2ODpDmLUYPZdki6U\r\nUBBa9aK3kDBIocvwVrleIzyY53SKvmmZ6jqmP5wS9pEWPa1gdD+VugZGazEK\r\noYK1MH77WG9fJb/2n27AWhJ/Tm9m177G+9rYQKIA+Q9JmZom+qNQviXkSkHL\r\n9MhOdjGzH0hnhX25ml81l6I2a/spKuN6RsHKNruUEUUxAyQYxIm6ZJs6D2Hy\r\nDjd+LklfZnCUsJUIJarqkB8XnRYsrKR+zrcTjxuRS0vQMBs+t/DYyXaS1k73\r\n4SR/biyt43/SOVtwZ25ThMxfGBZ+gwIqsoih3Rovs18QsrZNDyeU3fzfcTCM\r\nRioeF4ejfq26VnL5JSIEvGDWBNihLZhCw5U=\r\n=Yum9\r\n-----END PGP SIGNATURE-----\r\n"},"_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"directories":{},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/get-intrinsic_1.1.3_1663041682205_0.23362607287463288"},"_hasShrinkwrap":false},"1.2.0":{"name":"get-intrinsic","version":"1.2.0","description":"Get and robustly cache all JS language-level intrinsics at first require time","main":"index.js","exports":{".":[{"default":"./index.js"},"./index.js"],"./package.json":"./package.json"},"scripts":{"prepack":"npmignore --auto --commentLines=autogenerated","prepublish":"not-in-publish || npm run prepublishOnly","prepublishOnly":"safe-publish-latest","prelint":"evalmd README.md","lint":"eslint --ext=.js,.mjs .","pretest":"npm run lint","tests-only":"nyc tape 'test/**/*.js'","test":"npm run tests-only","posttest":"aud --production","version":"auto-changelog && git add CHANGELOG.md","postversion":"auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\""},"repository":{"type":"git","url":"git+https://github.com/ljharb/get-intrinsic.git"},"keywords":["javascript","ecmascript","es","js","intrinsic","getintrinsic","es-abstract"],"author":{"name":"Jordan Harband","email":"ljharb@gmail.com"},"funding":{"url":"https://github.com/sponsors/ljharb"},"license":"MIT","bugs":{"url":"https://github.com/ljharb/get-intrinsic/issues"},"homepage":"https://github.com/ljharb/get-intrinsic#readme","devDependencies":{"@ljharb/eslint-config":"^21.0.1","aud":"^2.0.2","auto-changelog":"^2.4.0","call-bind":"^1.0.2","es-abstract":"^1.21.1","es-value-fixtures":"^1.4.2","eslint":"=8.8.0","evalmd":"^0.0.19","for-each":"^0.3.3","gopd":"^1.0.1","make-async-function":"^1.0.0","make-async-generator-function":"^1.0.0","make-generator-function":"^2.0.0","mock-property":"^1.0.0","npmignore":"^0.3.0","nyc":"^10.3.2","object-inspect":"^1.12.3","safe-publish-latest":"^2.0.0","tape":"^5.6.3"},"auto-changelog":{"output":"CHANGELOG.md","template":"keepachangelog","unreleased":false,"commitLimit":false,"backfillLimit":false,"hideCredit":true},"dependencies":{"function-bind":"^1.1.1","has":"^1.0.3","has-symbols":"^1.0.3"},"testling":{"files":"test/GetIntrinsic.js"},"publishConfig":{"ignore":[".github/workflows"]},"gitHead":"0b60d7ac9d93e8824a36ddd52635be1fc13758d1","_id":"get-intrinsic@1.2.0","_nodeVersion":"19.4.0","_npmVersion":"9.2.0","dist":{"integrity":"sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q==","shasum":"7ad1dc0535f3a2904bba075772763e5051f6d05f","tarball":"http://localhost:4545/npm/registry/get-intrinsic/get-intrinsic-1.2.0.tgz","fileCount":9,"unpackedSize":38691,"signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEYCIQDb29OYVbJKfex+ljyYg1fRxZiHvAcbeMgBRIcq6cP6MgIhAMPAotqdPrJxkwnAeSq+RDK//aoFWESiSJuvWBmlhUAH"}],"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v4.10.10\r\nComment: https://openpgpjs.org\r\n\r\nwsFzBAEBCAAGBQJjykKtACEJED1NWxICdlZqFiEECWMYAoorWMhJKdjhPU1b\r\nEgJ2VmoUEQ/+PfWdGnewUZa86B0H4haSxBRBlwuFGg7GpdoEhJ3Ll1A9p3Jb\r\nvU2+9RyeNp1p2LNrktOAenAcs7I3dLl0dAspHjjL3uLNhPkrlpeVqOktXs+H\r\n7l0VaAOrLZVi1f+akY117IO0OO6FwRLV42VdM7QIH2BcfXuCyDDke41rq5oS\r\nR9I+8C2SCW2/OxXcMG9nYOpW494hmHRRYh9mpovJUOpAerMUgy334rK72ArR\r\nNsgnAu4luu/7RmC5BNPS26Q7NVCVf7THdx2v3OSkgFvTrdS+wu0NhqkakppS\r\nfGTYkR1m+7vX9YLHIokoIDjHtHaNPMUb7e51OxegjtPEh7FBacfRs0bxfx7Z\r\nJLhYAbjSanGci/gfC2gT1YIPUgydWbx1Ejmol9j7QmA9BQuHSxHu+SiaRA46\r\n+F/Fzbkp1sC0gqo4qGN04Lw8+2g2DHGfBygd6vcUtnaHMz2coCF4rlvcW2fN\r\nz6tT4pcE/AWtC6l9yCWzAWDjEZjF2kBycuiY36IlhhPjtj3qiGQqnTPLL10d\r\nUWA9ZTqFH2k+o4tKhz8g1kQeBApgpRgr9FfukaNq/TZi2tguQ2MlHQ+0R0ZC\r\nZZRtnin4nEpjZ+GkAcfnm9QCrripiWwDtSgXsKvgSICOdp9urrSgfcEAuEvM\r\nrTjOosJAUVohG06+klaUIe6mIssavg3AgjU=\r\n=CxS4\r\n-----END PGP SIGNATURE-----\r\n"},"_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"directories":{},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/get-intrinsic_1.2.0_1674199725115_0.9427568240984563"},"_hasShrinkwrap":false}},"time":{"created":"2020-10-30T15:03:39.549Z","1.0.0":"2020-10-30T15:03:39.692Z","modified":"2023-01-20T07:28:45.383Z","1.0.1":"2020-10-31T05:32:03.992Z","1.0.2":"2020-12-18T07:22:01.056Z","1.1.0":"2021-01-25T22:22:34.211Z","1.1.1":"2021-02-03T14:58:25.007Z","1.1.2":"2022-06-08T15:12:14.076Z","1.1.3":"2022-09-13T04:01:22.362Z","1.2.0":"2023-01-20T07:28:45.291Z"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"description":"Get and robustly cache all JS language-level intrinsics at first require time","homepage":"https://github.com/ljharb/get-intrinsic#readme","keywords":["javascript","ecmascript","es","js","intrinsic","getintrinsic","es-abstract"],"repository":{"type":"git","url":"git+https://github.com/ljharb/get-intrinsic.git"},"author":{"name":"Jordan Harband","email":"ljharb@gmail.com"},"bugs":{"url":"https://github.com/ljharb/get-intrinsic/issues"},"license":"MIT","readme":"# get-intrinsic [![Version Badge][npm-version-svg]][package-url]\n\n[![github actions][actions-image]][actions-url]\n[![coverage][codecov-image]][codecov-url]\n[![dependency status][deps-svg]][deps-url]\n[![dev dependency status][dev-deps-svg]][dev-deps-url]\n[![License][license-image]][license-url]\n[![Downloads][downloads-image]][downloads-url]\n\n[![npm badge][npm-badge-png]][package-url]\n\nGet and robustly cache all JS language-level intrinsics at first require time.\n\nSee the syntax described [in the JS spec](https://tc39.es/ecma262/#sec-well-known-intrinsic-objects) for reference.\n\n## Example\n\n```js\nvar GetIntrinsic = require('get-intrinsic');\nvar assert = require('assert');\n\n// static methods\nassert.equal(GetIntrinsic('%Math.pow%'), Math.pow);\nassert.equal(Math.pow(2, 3), 8);\nassert.equal(GetIntrinsic('%Math.pow%')(2, 3), 8);\ndelete Math.pow;\nassert.equal(GetIntrinsic('%Math.pow%')(2, 3), 8);\n\n// instance methods\nvar arr = [1];\nassert.equal(GetIntrinsic('%Array.prototype.push%'), Array.prototype.push);\nassert.deepEqual(arr, [1]);\n\narr.push(2);\nassert.deepEqual(arr, [1, 2]);\n\nGetIntrinsic('%Array.prototype.push%').call(arr, 3);\nassert.deepEqual(arr, [1, 2, 3]);\n\ndelete Array.prototype.push;\nGetIntrinsic('%Array.prototype.push%').call(arr, 4);\nassert.deepEqual(arr, [1, 2, 3, 4]);\n\n// missing features\ndelete JSON.parse; // to simulate a real intrinsic that is missing in the environment\nassert.throws(() => GetIntrinsic('%JSON.parse%'));\nassert.equal(undefined, GetIntrinsic('%JSON.parse%', true));\n```\n\n## Tests\nSimply clone the repo, `npm install`, and run `npm test`\n\n## Security\n\nPlease email [@ljharb](https://github.com/ljharb) or see https://tidelift.com/security if you have a potential security vulnerability to report.\n\n[package-url]: https://npmjs.org/package/get-intrinsic\n[npm-version-svg]: https://versionbadg.es/ljharb/get-intrinsic.svg\n[deps-svg]: https://david-dm.org/ljharb/get-intrinsic.svg\n[deps-url]: https://david-dm.org/ljharb/get-intrinsic\n[dev-deps-svg]: https://david-dm.org/ljharb/get-intrinsic/dev-status.svg\n[dev-deps-url]: https://david-dm.org/ljharb/get-intrinsic#info=devDependencies\n[npm-badge-png]: https://nodei.co/npm/get-intrinsic.png?downloads=true&stars=true\n[license-image]: https://img.shields.io/npm/l/get-intrinsic.svg\n[license-url]: LICENSE\n[downloads-image]: https://img.shields.io/npm/dm/get-intrinsic.svg\n[downloads-url]: https://npm-stat.com/charts.html?package=get-intrinsic\n[codecov-image]: https://codecov.io/gh/ljharb/get-intrinsic/branch/main/graphs/badge.svg\n[codecov-url]: https://app.codecov.io/gh/ljharb/get-intrinsic/\n[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/ljharb/get-intrinsic\n[actions-url]: https://github.com/ljharb/get-intrinsic/actions\n","readmeFilename":"README.md"} \ No newline at end of file diff --git a/cli/tests/testdata/npm/registry/has-property-descriptors/has-property-descriptors-1.0.0.tgz b/cli/tests/testdata/npm/registry/has-property-descriptors/has-property-descriptors-1.0.0.tgz new file mode 100644 index 0000000000000000000000000000000000000000..ee60a4f9e021ce8aec6a2b109929d89b02280cd7 GIT binary patch literal 3854 zcmV+p5ApCHiwFP!00002|Lq(7cjLCLzeaxr>g(#Hv*d3(Nt<@Q#CP%P#Cf)_U7zPQ zv_#oTWJwdH!%8LdUuH!|a`Ppy( zluD&Wtp-oQ>8aG~@L9FqD3!~NMhWrorCe&@?`O~O#3EuLit-yw5+BHZ|0K^lZ7&!4 zev~_gD2#bQ!!~~#anBM6%Kgo~nXzz^lkhNhIYDvEUe1m^Q}@`M>vI;FGsl}^PpLp% zj-|WYn?|z?;_-XkWC6BEm4Y0=DOMP$8|a5=B;k}cRWPH zzz&W|ldrBUCl|L1rvIz#9>Cieu_v{OG=gwAvp z!M=F_l~Sbw9Zv+@jNtDBO}paZ+z|ppH8=uhJmixFOhe{H+$w+_at^)?CMFD1UVz94 z<}D!LA$kJvClPZz%o2csiEK2)jAlqi_;z&9LXOB5Fd=-?VMq@w-;C$nix{N_U^|$4 zzp`#81!BZe{A*J&>6s0zxPAutbGQ#)&ScWCw}Y<&wFo%fEfMOagd8^wcR$A)~O5Z2>;Iw!3Q@`{2dt(mCSmC5j zyRFVe0nS<%t=DZis)tni8lfra;K%befrwuV|D6syy-S+LY438_$7TU%*&k*@f9wp} z1!(m<15(JVe($27kvefiPZGg#mu*FbR1Iri1W~B{X3)+E!dbi3MOp(IOjAseja_{H ze~$l-XYsejjd=d}U#>OEkK%u`(%i-W=XmlM=|IKE{MVXx$3j@$fx`)g(0|8H$oKP8 z9_dcnkv~YVzp%LNc>F5#F_1(H9I!fmKmV7W3{=KC+F$Yox#38u%$P{K&bTl`Cy4w| zATcU4$oTv;#{L0S6C4}%H{_7;y(nC$YQskF{=0_)a z`0o`2-n0L!Hy_u3o8{g9?~n5TWD2W(KVkvTzt>S`=!Q|uumx`5aP7O;|BhA}k>1Zg zO#_`l=Gyinx!@xMi86OTS_B+77f+8vZ<3Bf5_yMQT?QbR{y|i>Iou8E*IUr8AxHz@ zBLTEMQpJLA_bU5JOr@QSkhP|pcOY(^;21t9LJzCD5=vQ;i%*&ccD&R)seNxxPypsZ zTPlLWQxcUiruXi>$0F_fGCm$qjc!kdy*=Dm+Rii%*~H~pwGbcf;XeHlH1X=G7z%hF z;%+-bB+!HGAd5g3O@#*BY(AlYJYq>r?md}JB+`PbF#f^!+-K zm>;1OV0Oeq7>$$_j>k(a;oP`qNwa#$EKtTol|y{o8dx*WEZ(jq5t|13pZ_e`Z8cWJ zKv_oR0u-P+{u(g6t5Znbas=r_#|FoQL>B=I1=l4>k<3qiH-scf1CpTcBZVTDjzOHN zJZV}`&Hs}9;IJo?lKP6VJZ4zW7}9as8oPRa%ZJF?)chZc|H-dGW&7vd|5Y31$ML^j z-<|(F+xZWj;?iL+E`L4sGx|#Op_7{29S^BSNO)zVWRxT@MM^r(d|die0_5JE6Z#(2 zZ_(G>adZH#`=k#OaX=aOcW}-;i)y^Ix5|sB_f^Gd@9NBuoafAuEYp-Vr%sw=_MRS5 z8-!pKS5XkSX6g!K4I5bk;If~W~*b3K9SE|ce5 zg>_Uy!(A3`rc*@nmtHW3F!o@BWI9$7qJaq-`N^zPHn5YU-Z(#yueUO*ig0H?ReucWpubkMe2(mI%iQ|hX#bL39 z?C`r=vGo56W5twM9QnFAW8Rdz{uI9a5~fZB%(8%rwy)dW-s@}{MEIkJrf}1rQ>k6o zF`95ozM<87S%1W)pxf}z+}#)Za^tLXY+!&FK5h$R*{KP|p_{b-I*Xz}92blDK8q&?N{ePIPhRvJQQWpL4I-8ByXA{} zKa{sNM#?G_zGAhpn)hg}tCy9fngP-=t7}iewk1b&giBK3N16F_K%DzK3WFQ(a~4kM zsNoxRk>qbvT(~aH?_0TIF)^^z)sc5A8hJt+BA`zZiehphC7E?{>OzAqIzvfv5>LhZ z>3x89i;fU67b&ZmKj(ou5&xA0Uxb`lbDnt4yE7i30dmiDmbYq>`!>ZS4^jyxar{>$ z^SY#EOTkt_tE(aDt);z+96?VGjl&eW^gkK3a)K}Yh=i61dZfVU7bL7)GO9*J!JhJW z8?uork+8Z^H>#;VttY0xR$^4F4E!%0Ld4w_bCIWlgtk__NU#QNPZ}n?u8!y zm>Upj{DhQ)@Ske`IjQ z%jV58?t+?CR%YC+HS64}vGQTftk&7VicCpbgF>*T$tu>T3!&(kwIjQ1+jgnaJYto@ z$)wzZHu;jS8!m z8uk&2(`+<}JLR8_Cns-~d81x$elq)nPV>m(jcUbe9Ga7(TFtDNEY_Gb%M)|boE(~m zbz)D+Ij|rQAZC8-T56x;`qJfdvJ2j<*iEzX$xb)N%_#K zano$t&C;ZFpnZb>&$bNDe*RmlR2z@Zf9r(5^Z(D`|F@YRe1m)QZ%3bx63+@RnKk9( z5w1Hu+41P&ZaN;15*yMn+a7;Yq*0oNB%2HnxBXI&3LM7@J2(eL!NAxSeGmRyM&kCF{WCQ8~)5zqX4&-Ix_ zG}G4%c?pr|gil^;vQ(se)dO!+jhgm6wGn&R{@Vk5e&VylyF zI?k4ALtQ#yTA#g6tLD~tEU5|H-DM;`K)mg>@`B3qH{_|+UO>ki=86DkrNFk5-ViGdXoE51|U7Ut2m?4NhoDzLCpe?yzHfel3Cv79JZdEGUQQ$jX^c`|N zSx-szl!P*_xO(H&B0KO+b7r7(xpg8W^kEBYHL5Q~`N+&SA+o!2`fT+dLw%v*t2dW) zx7t|D-RG+Rlq-+^{-aW>R(ARS^W^`-85eXVkf-@0w;(CY)89UD%r#vWQ9%cDD`h3e zFj95uu@Yhww>+czV~(8Sz<>H&cU&xWB8@usL^IS_MB+(^CWc832vuO5`lD62=Z6Ea*E;QGf;Lavsi|s_aDaulCdHgVoy=eQW4xyYQ%8Gg;Zlcfs z1+^-K

    j(fIojUAP%U&Rs+=y=>g2l3E)k?E0T$h@~-3Emg#O3y;Sn!ozvbuyJz?8 Qo[![Version Badge][npm-version-svg]][package-url]\n\n[![github actions][actions-image]][actions-url]\n[![coverage][codecov-image]][codecov-url]\n[![dependency status][deps-svg]][deps-url]\n[![dev dependency status][dev-deps-svg]][dev-deps-url]\n[![License][license-image]][license-url]\n[![Downloads][downloads-image]][downloads-url]\n\n[![npm badge][npm-badge-png]][package-url]\n\nDoes the environment have full property descriptor support? Handles IE 8's broken defineProperty/gOPD.\n\n## Example\n\n```js\nvar hasPropertyDescriptors = require('has-property-descriptors');\nvar assert = require('assert');\n\nassert.equal(hasPropertyDescriptors(), true); // will be `false` in IE 6-8, and ES5 engines\n\n// Arrays can not have their length `[[Defined]]` in some engines\nassert.equal(hasPropertyDescriptors.hasArrayLengthDefineBug(), false); // will be `true` in Firefox 4-22, and node v0.6\n```\n\n## Tests\nSimply clone the repo, `npm install`, and run `npm test`\n\n[package-url]: https://npmjs.org/package/has-property-descriptors\n[npm-version-svg]: https://versionbadg.es/inspect-js/has-property-descriptors.svg\n[deps-svg]: https://david-dm.org/inspect-js/has-property-descriptors.svg\n[deps-url]: https://david-dm.org/inspect-js/has-property-descriptors\n[dev-deps-svg]: https://david-dm.org/inspect-js/has-property-descriptors/dev-status.svg\n[dev-deps-url]: https://david-dm.org/inspect-js/has-property-descriptors#info=devDependencies\n[npm-badge-png]: https://nodei.co/npm/has-property-descriptors.png?downloads=true&stars=true\n[license-image]: https://img.shields.io/npm/l/has-property-descriptors.svg\n[license-url]: LICENSE\n[downloads-image]: https://img.shields.io/npm/dm/has-property-descriptors.svg\n[downloads-url]: https://npm-stat.com/charts.html?package=has-property-descriptors\n[codecov-image]: https://codecov.io/gh/inspect-js/has-property-descriptors/branch/main/graphs/badge.svg\n[codecov-url]: https://app.codecov.io/gh/inspect-js/has-property-descriptors/\n[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/inspect-js/has-property-descriptors\n[actions-url]: https://github.com/inspect-js/has-property-descriptors/actions\n","readmeFilename":"README.md"} \ No newline at end of file diff --git a/cli/tests/testdata/npm/registry/has-symbols/has-symbols-1.0.3.tgz b/cli/tests/testdata/npm/registry/has-symbols/has-symbols-1.0.3.tgz new file mode 100644 index 0000000000000000000000000000000000000000..a5f34be724d2aa496c67044058ac56d94d6711af GIT binary patch literal 7067 zcmV;M8)W1kiwFP!00002|Lt9SbK5wQ-+!Y|fpMq(S}cN{xqJ2N-s z^^HIPw3tw&j->4PX6L(KHvmf1%XTzL=627jR4fAMMxz0A{~8Z!LfHp;Df)(}vNW$t z_J_~^F-_BVU9v76KQ`t$o^M)~@0;))zF6|_AHKq8TGg}!Q9eT>;X`JB|4BZ-bRO-L zMN#iOC3QI!-3~m9kG05o1xY)9&EAh_8SlvSa+(QD1)4`YWBRcNs;k9V?v-6g*eEC3HS`SqfU<@2I9rk5$}<1 zJAZiie6i1@ng96>`y!M79`oOFmgm3e1kS_!|622(X4!1|KiXji049G}m zR>_=FOp3A=CC>4k8mvUGnvTb`yxHjz`HX)@^*F=7@2eu;!Kn}Cgk@7MEd$Q?8D}~B zYf;rZoktfB)BN|H|Az-J21loZ-(mjSrWdTte><=q=Kt6Dyg4`{hba?zB|4oK#pI?; zFGn?b!uE)5T0Z$(QF5A-*8l=&&O4pAq8z7Hl@>WkD>4$Lh;PVcN%LCpE=fuuNRbc* zpy5(HyOzSUo9cmy8RZCiqv#W=*DXmR6%K;VH6?r!^#2m?#z5#dh?Il2jtg zI=~?Xxn*1D(#bL?`UDkPYZXi!Dj3PBJZXs8UE8UVzGEjqe4kF$Ie!MV1xU z*fdt;JjE7OPdlA6$f7YAUD+1Zy?IfCs7eIvfXTd9byhV3uRvm<6$bT!dTFaUB~}k} zC$CeQ5p)b#-*PhzsQ>kVoF2bA`*HtdKn_mH+mquT4qgsklAZlicm`kbt z?+*7*$lG@(Z;ww0Q2k3_dvtL0>I7;Tycryw8Bi;vk--n}Ku%xpA0A>=o&9$p{RzrP zUL3#u>Ez(M*JtGQ@!`t>Bz`*p$@afJ9H^?GQ7;bn58iai%l$X|-whpEQU@;{f z`SJAtld!&h`1j)M;P?oe@#6UC>;xXW(8`mu8R?G)r-LrpKRGx>g}gdBe$(xsI)UO? zG6A}yf#QOyCJQ|RMezCE>0ri?yd3Nw0C1hxK97kE=c_O3`~? z8SksF;{PqjUcvv4?LGMaukzUiFGrMr+?YG(I@ewD$3K#V{3m;=L?xeg_i*I^xj)$*(Te2YbipfW zR@HQZPGlEAgysj8Ns33c0iw`svF>iGVi0aasV9IBX4UIbIe&s*f5rbwQG;WB2GNIX zI*6s{s(Xi)VkcItys z|84gFQq;ZTI`2)&0&rn{(^F$)dxvm`{kJ`T+5Y>o{Db{}u>TMC|Ni#BWwSTU;Rn*y8mYe&IA7cz3hKcrk9OPDSB^Hc} zA`?`Ssu_p$O`;~B;|(on(_~zW*EkidAD(ikKBSYUm*hNs{Ja>%_kL5mt0ig2uI)AGn<-9)@INw&5@Iq9wVd7(2)vNk` zz+=A`{*(Uscfo&-<63L*pZ#F}e+T$kbXnm=mnGdtD&0Q5UC@#$YtdBx}|9^FnO^D~T>P z>)>vVP~sDqLg?NHoG_jcy{y4;3z>}SdQv^@_b=0WG>r`yLVb09u&;O;w5rm}yq{Lp zR8)N{kbF=;R8E@Xb~2lH*4pOWx8ff~d?J!e$kDNJO|ucjok{YI*2WrfHfmyZn5}C1 zKEqYBQUvpU-pF%49@dMd2j&kHBRSWG@d*8J4k;J#tTG@if!Uxt00;aURs;lFT*i`T zJ;9Urn!^=>gG%$7e(YlEG%p2ZBUQ2}CJPfm55^DTrdqRr*vfr^(ioY@FYA$1gt>Xn z+-5M2=vdrR)tbWhtywF$n$7$5rb@EY)&PsQIa_G%F!9{HkO{KeH#PH{m6_a(=!0sI zPsd=~K<8>)K&4%>d2rmNwU?q|WjdMJ%J$IhJd#c4ts-|ZnTvzVuvqz=+PI?GR5Y!} z;R-rwWs6zMWEr&4Yv)fZj+S=(O9#*YYWqJjK>OP9zZ1CLYW(k+5BTr*z<(=-KF4#u z(%Q2kuOMo}6Z&eB-Gcd60^Zvq({?<+5|}PV_uH-NVbu_I4rJ}~jiJ^SQGH#IYAN`Z z!m3L9f0|1V*6R+3ys&7v*WHF-K_EP-C{NW@;G1T45!I}qWSZQ}vUfX=(7*y9S5l*y z2DUBODl2)ms1p=Xfey%36066BRm=^N@ia+93B#f-8pir+3FB4~*A9s_dRiJKs@&rl zwpv{ULYSy|w_A1>mQsaQ_)A6=(NFq}qwJ0O-r}dOnzY+W@Qu_zV; zH`Yt@rXWU{`DBb>l`OGrXeA>g)Wv$eD$TH6Xz*J%*jhzo5Y;Q@T#Bl0%FrnX7BFY| zRC@C{2TifW=V?_J>;tYjDE3bBdLEoUDU zW7+ROtXIiFMIYQ7ouW8^I0Tp9U{dF^eCR~dOUU1MuKw+b>SX~B7||8r`k;a*t$o_IFWB8P zv`~A0-+_NS=yvp7&?*IQbJI*p^sFd2RHqs=Ei?VRxH6mOIPYsbfZVb;2GVKf086fm zQrO>L-kz`6ZJ4S_3`E@RtU--6=lmB26Dk@Hf;l)t1Dwg*|ZNUy|vc6s=cG$N5AT3 zLJ+~#bpK^Z*rmnuYJwx9tTd-ac74AMiQXKXNy{8hFRQ!x>0N+Uck6}shV!Txi;22! zd_VffrJ#H)v`e~rDJC%5Mb6Us8w&Kh3;G(JE6{}%{FiMSwsL3K^<1Tahcla`_SDn4 zZj;-LA(iYKmaRud)!2sTp%FHvHyIR3ZW@u{=}~&aWReAb*3cZ+IkhEG@oa`n^9&Ej zv^KG1*6L_^a}tX!obpi|;8& z)h(1vCzP*gW;z)`l(8lQ=3`l;yr*>r98u+uF0|T34xVy8{?N{%(*OLNn1bqu)Og<@dt2M9UnoLbqOycC(BmEf(*`Z!L}B^+X`yO17(bw}&Cnlnnd zcu}JJ*qMhIAo67?M2@>DL3$@R4BRNV`2?&hnT;j#$28ADw-Pl|F( zYg~zl(JV3z>szr3tP&ogO)-GT^H&@U;SDh`ymOPZ(s`P#!e^ID?CNDhFbt z;WJedK^!ZthD0$S=R?m5V>?WSpC~j)($}Ou^QdR}EM|-f#|$D@xF)4h5;$RO`H{1S z5|2eqFJ#0qM3gs_uL~G9NNot+Eizs`Bc%YuinlvjVFQTC+|Z0IXB#m!X~O-`v}4Z; zgvCS>1#T1uHjBN43y}n_ySJd&QlP?+1=J7m!-pgTHw}`Q7B_`3w~<7X(vU|1Gb1-h zJi+6@5piHcZwE98Vmmg2MM}i96juZ=@m!Xnf~UhS8Lq{}m;q2&{b?fjGvmZk^}FUo{zZY7l_ zP0WynA>-Toqbxnc;*#Z%s6|qAy>fjm_;(a5auOH#|6E`iRW8fiY2%CFVxk6LcS=5SUwr25GCOFiQ>@Y4&Q1( zE7Hg^ZO@Jp(`B|Pm={|KW4>@g*Y=qoo8YqM{@mW$pIePTx3xdFdZyj81LeC z-WOz(A6wQZsh~M0Y3;4z39a#QdRgM& zlxxUX`*)Hn+1Jw*z?MV10wYHH>lg0tFJr*REG%GAgTpBsH$xUvV%#C8VJv?#Ad6?g@ zVKYXR*3O5a8~Aawl^mMX^F0`*z6sxGEMTbGo(bO)7iLeGfF2hNZjyc^xPnUFog7wO z_&Ub-GjSnz`R8WWvV9wD_%>o`QpT(RkqdQA+YP862Vo2-L6|Y~ICQ7~1<-V}koLk?*i$8Kc0;OTtJW!z_`16a+Z?Y>8qV+-VQja&Z z-|dj0B8{WaihUL(wh*3U0!9RcKw*wBAG$MiqD5qgo_1#+-_u0~9H#Qld`>`j3m?pF zv{iR2(ipUnICdQQZs>!7HUq(_8G4`_;ejJ&+jn>4ejRX5)ZVJwG%4rY6~17u1BS$- zfLHw36%lC527nKk_OszA%ML=v3AdX+k`!>B?_!fcB(bAxI(hf;uq z&6sW#N@-Fjw7}i?6wD2H;ZTI8>p&<4UN8XP$HVx3GCflfK8{8lg%C;tsM~53DpDK# zF`zZ(Pyh>_83mvph)EpqIBp^wP8S@Z_I%TPimOD;zJ+v)XlqOp?D$sFXwoF|Krgj6O_V_AkYrBnwgr^VUcCKmMJ0^ z{ID7LjzxXfp$@pLz_qBgp{v%mD~}^Xqj5Bp^eU}-EG4Sp*cVJV5qR6J7L^pK zYeh+7S>SyDBT|o>(CRo2q9jNFKiGEE9ud=tv@W+$o0Pz?O!>t(tU!Koji_1I%dhl}r!3Q}6+MXBWGLxJc>i%LP@+h|Z6z*REeM#L zD5hZl?F1kx_n7P3+>e*n_g3Rs+wws{TLq4qG)NrZaRDRw;Ey6VVBGTk0K(hAGZP!; z-fwqDcdz<{!i+p?J2R_DEgDc;c(&=IyN?4HD~@Mbfe(;NxLiZ31%O9quBt>=g$Av! zULD(<(}=TzeZX|-PhkKCTl!NZWjqitwgnh|KMI2oqH=JmzUljbLP6{Nv#+V$2jRD& z4FEoyZ591%($Gg_z)c22Cjyjf`naaWTrZ4ijO%Z$0nNa-K=WR`JHi*l8aHE%yeahp zYHg)uO&VKKV1|+D1Ddh}z^ay@p$Hr&av>aHR0f*I(t}LEZB(OTnsG9s;N7sZn~0a% z;F-l%SVWWB4voOWdk|^}n&5&S=p#s25>c0fs0&e#GBuNrMEduIeTHf>#G`jZTq}}J zZ>Wy*kBUONy-0w$+-_iM($ERO)5O?s;QNvw6ySA1Y2bH6EUeXS27Nw7Z!Ms<%-FHn zR?}CL`Us2wx%mzQvlPWXcOfjI5%<9r!{FJ3+}4eJ^gnR~!MwMX|52p2MI9JUk(t0K zcDd_7h{hc!h9J`6K21#Jc$N25KGW}+;5MTbpDFv;z~xyM`w*FIwLGgx9ap$v=vWR| z?Idy_fb?SO2~bqX6ENb^hAeIHK;Z2-!B!wdlL}#mzDvOmLn!FM#1cL@XBaA`<(b0u z9Y3~MyWSM=i6 zE|3H)wjffV)Q%+l^J3ft^f^ojXz{~Yi2u*$KTZbwFW(Hlz~PU3-2V(#-~Z-$59dF= z()o{;(IEd(O(%ao|K?mDrXb(qO7z9KeQ|MaUHnHMb2}Z#Us%?!E*kjTOUE#lF6PhE zGP}U?&2PS3oa?Q{S;^+PDao6IoHqntG%^L|9u)I+<`r2}Abb0BS?ysGAv7Kqtp4ES0AL$AAF?fYdW3baQMEu<$YVN zE|WLa7T&P(T;Gs?M%3#hP_kvQ*1mexAV;`!qYqExA)v0@%Sj4|T;vt0<(H&>pXQuy z5N!U&zoqi`6p_xP!*>bRBU+PjQPqI2)$d$X>JBET(LjAk1)esq%WBhB9Y9f=!W4o% zJ9n$s*VhIlysxBZZEWKv;yv;YqNl=I&wRgEknEv+I-y7jFP&8HpfPEw6Kr7$eTRBoXyGKk{L9QJJ;#~zNAz0iYiG&W&H1I{OQRaeQ(8u< zxMr)GYn#TWME?#{XT>bu%9f8;`7T<~g+U_~DF-S3P!Pm0cp~pFdJH{Z=%z(~G3TPw zQGm3c<$?I)Y5OnvXNb~wHP|@?n ziP~#eNU?fT=Y8(h$)sRxWWdb(kk-Ix;}C9p*{9_Gp@F9BwDF&z$oj2ildpvTEQo`b z@t^HF55NESmGIx$NZ>h^_Ny(4UWmcpy%HJ0+Xr}{x9W7%D0xcG)r#RoXF}^z6!}x~ zn4AbG$kgv{;mZPmK_Xp%)p1EU%V2VDUYEbc`++E=kTHUIY~Fu zeP8sFbY8knp$%zLKP5|MwSyt@6r%i_pW`xx`L~(ni>4&`R+emzBKNQcB8*F;i_Ub) zd-9@wdDF?#7!dD--fU)i*c6$|Di8dOrg^=Y>Hv$ph9WJE)`Ee#xkZzzz?_1?s4vDV z1Zum(B`TXVx4=f}MD56_hMw= 0.4"},"gitHead":"e4a5e7028c87d509902ff292f4da3ea45c7c50cf","bugs":{"url":"https://github.com/ljharb/has-symbols/issues"},"homepage":"https://github.com/ljharb/has-symbols#readme","_id":"has-symbols@1.0.0","_shasum":"ba1a8f1af2a0fc39650f5c850367704122063b44","_from":".","_npmVersion":"3.10.3","_nodeVersion":"6.6.0","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"shasum":"ba1a8f1af2a0fc39650f5c850367704122063b44","tarball":"http://localhost:4545/npm/registry/has-symbols/has-symbols-1.0.0.tgz","integrity":"sha512-QfcgWpH8qn5qhNMg3wfXf2FD/rSA4TwNiDDthKqXe7v6oBW0YKWcnfwMAApgWq9Lh+Yu+fQWVhHPohlD/S6uoQ==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQCraijP8TUgset3RP/0apBanH6US79uNIP6cuXhcXO2XAIgcC1SUirdAx9l8oZX/ALh1KkxopaC+SvsCad2NUwNfMs="}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmOperationalInternal":{"host":"packages-12-west.internal.npmjs.com","tmp":"tmp/has-symbols-1.0.0.tgz_1474328796481_0.2780582248233259"},"directories":{}},"1.0.1":{"name":"has-symbols","version":"1.0.1","author":{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},"funding":{"url":"https://github.com/sponsors/ljharb"},"contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"}],"description":"Determine if the JS environment has Symbol support. Supports spec, or shams.","license":"MIT","main":"index.js","scripts":{"prepublish":"safe-publish-latest","pretest":"npm run --silent lint","test":"npm run --silent tests-only","posttest":"npx aud","tests-only":"npm run --silent test:stock && npm run --silent test:staging && npm run --silent test:shams","test:stock":"node test","test:staging":"node --harmony --es-staging test","test:shams":"npm run --silent test:shams:getownpropertysymbols && npm run --silent test:shams:corejs","test:shams:corejs":"node test/shams/core-js.js","test:shams:getownpropertysymbols":"node test/shams/get-own-property-symbols.js","lint":"eslint *.js","version":"auto-changelog && git add CHANGELOG.md","postversion":"auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\""},"repository":{"type":"git","url":"git://github.com/ljharb/has-symbols.git"},"keywords":["Symbol","symbols","typeof","sham","polyfill","native","core-js","ES6"],"dependencies":{},"devDependencies":{"@ljharb/eslint-config":"^15.0.1","auto-changelog":"^1.16.2","core-js":"^2.6.10","eslint":"^6.6.0","get-own-property-symbols":"^0.9.4","safe-publish-latest":"^1.1.4","tape":"^4.11.0"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"auto-changelog":{"output":"CHANGELOG.md","template":"keepachangelog","unreleased":false,"commitLimit":false,"backfillLimit":false},"gitHead":"132fe9ce5c2e443e0570606d4568a242eb86b5f5","bugs":{"url":"https://github.com/ljharb/has-symbols/issues"},"homepage":"https://github.com/ljharb/has-symbols#readme","_id":"has-symbols@1.0.1","_nodeVersion":"13.1.0","_npmVersion":"6.12.1","dist":{"integrity":"sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==","shasum":"9f5214758a44196c406d9bd76cebf81ec2dd31e8","tarball":"http://localhost:4545/npm/registry/has-symbols/has-symbols-1.0.1.tgz","fileCount":14,"unpackedSize":15474,"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.4\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJd0I28CRA9TVsSAnZWagAAt2AP/jan/+oerqF7TJJ1/7C0\nDib5YuePKj9dBimLNNxyNbDCo9+XGPPXoDd5OuGVQ8hePAe0pFxsenbtyT+Y\n+empPCZMrgUJfP7Umo6FYPE7EChp7ES7pPua2oeoKzMhK3xH+sBXj9MQ60Al\nunwIhq1k0idyeHT/9iJegP+wGF5pDe/EZUVbzt9r6JU4WhCNopdta+BZwIRP\nmE/NCdPjZ2jHbjYVJlT7b7uFrA4KXtRXtaJKhy97biek3xfgP4WLKvRaPTKo\nlTXtw/UXk8L42RYfJFlIJ2nyLeorwS6QObZPi9tB8BmIogvrSjthvcVL6DSN\nWJTjxpu43zbRS6mHK5nBAnXcshB/mvM9E8hTxrhG2jfSwAR3RexMLsOqgEsH\nTUTxRdB4Zox0nUD7rSahulvEtjl0bCRwo+oeuNfNDgf34sAjhIsaBMzH1rQY\nuTI8DKB4s1wKbth7YWUvacPay0+vvIbJnq4AMjoIDXezAKdDVIG6zB5rt5vN\nPxaAnRkB2htFP8MEHbqAvhk2ibqGdQvdVW2QPeTjDc7bXeymiXI93nrZyiw6\nq5alXDWN8ubB8A9A7HvKa+XU3dsOoYW/Ypx/h/ca17m0Gc0LwfJ2o0lC4sLQ\n3akrlFDjuBdlt2tWWzCRD5e7av8jJZ5C5ZKBeRkL2Xod4iyMFAD5wtefsXvR\nELxD\r\n=shCA\r\n-----END PGP SIGNATURE-----\r\n","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQCCFSo29lhXirX0Rs+Cuj11qyhheYjpANIVYXAMEcNrhwIgGt53b4yRyWBuEHwcnk8h23iA8dpWARLe+Ojp9kyWxO8="}]},"maintainers":[{"email":"ljharb@gmail.com","name":"ljharb"}],"_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"directories":{},"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/has-symbols_1.0.1_1573948860346_0.7408930604026625"},"_hasShrinkwrap":false},"1.0.2":{"name":"has-symbols","version":"1.0.2","author":{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},"funding":{"url":"https://github.com/sponsors/ljharb"},"contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"}],"description":"Determine if the JS environment has Symbol support. Supports spec, or shams.","license":"MIT","main":"index.js","scripts":{"prepublish":"safe-publish-latest","pretest":"npm run --silent lint","test":"npm run tests-only","posttest":"aud --production","tests-only":"npm run test:stock && npm run test:staging && npm run test:shams","test:stock":"nyc node test","test:staging":"nyc node --harmony --es-staging test","test:shams":"npm run --silent test:shams:getownpropertysymbols && npm run --silent test:shams:corejs","test:shams:corejs":"nyc node test/shams/core-js.js","test:shams:getownpropertysymbols":"nyc node test/shams/get-own-property-symbols.js","lint":"eslint --ext=js,mjs .","version":"auto-changelog && git add CHANGELOG.md","postversion":"auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\""},"repository":{"type":"git","url":"git://github.com/inspect-js/has-symbols.git"},"keywords":["Symbol","symbols","typeof","sham","polyfill","native","core-js","ES6"],"devDependencies":{"@ljharb/eslint-config":"^17.5.1","aud":"^1.1.4","auto-changelog":"^2.2.1","core-js":"^2.6.12","eslint":"^7.20.0","get-own-property-symbols":"^0.9.5","nyc":"^10.3.2","safe-publish-latest":"^1.1.4","tape":"^5.2.0"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"auto-changelog":{"output":"CHANGELOG.md","template":"keepachangelog","unreleased":false,"commitLimit":false,"backfillLimit":false,"hideCredit":true},"greenkeeper":{"ignore":["core-js"]},"gitHead":"32b16a3809db3bbb463df501c3984a333f1979f3","bugs":{"url":"https://github.com/inspect-js/has-symbols/issues"},"homepage":"https://github.com/inspect-js/has-symbols#readme","_id":"has-symbols@1.0.2","_nodeVersion":"15.10.0","_npmVersion":"7.5.6","dist":{"integrity":"sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==","shasum":"165d3070c00309752a1236a479331e3ac56f1423","tarball":"http://localhost:4545/npm/registry/has-symbols/has-symbols-1.0.2.tgz","fileCount":14,"unpackedSize":18056,"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.13\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJgOnPHCRA9TVsSAnZWagAA+tEP/296p4+KYc4qJLrX0uxY\nDA8r08WOCWwnIEHw6X1O12XSFB4d0bMTA1vIZEAl/GosbM4wvVdO4JWYaxL+\nAe8m2GxNjz3KXswq4SELhwf8c6xk3Q294qHiUeByfvxK4OwfyKyZ7iI4YnV/\n4jT/FE+AOlNqdAO3izGfm94UW8X1g/6S2X82JTxKngl1/YoAOraEjtD+XF2f\nPTcL4SmyoL7xTTMtPEOSXRAB73Y2KeNbFC8Ee1r/vU0C62MscsFD6Whc3lH4\naxT1ccSUO8YUftLzdMJY3R6jqF8ZKAx7rYdPPDEkm4fJ/MAsw9pQKKf71Dnf\nDAYBAGsFByUVGraRgWXx01w6NnVNbHqBBbLlTK1e0JpCvrSpkFpX/kfERpEB\nhaFUc8n7SDIeAFgBidagI5HYHwbUmlPkZ46NXZcL0xLMx8SNXyRvImsXOR5m\nnV6ReVLSJP/VmTFYjOVuFsteXU2Ot7ZtnHy/eOP8WXYuWn02CuqFC+i8xFxn\nEXCqT3wC1ObdFkV9E2WGVQpB6U2UoJCiRQJO0KjP+arqcS22zL+IXDgGxr/a\nw8f4erWtCTSJquDY9+P7VCSbjGTxfmkunAXUcwMCEiVfLc8wwrg+vHJF3Sgi\n2QqXRdNu1JKHGXfnpnJm1rj2oZV9/5ZjYQFB+CeWM70TyKrYeTgysG0pa8y5\nc8++\r\n=JCcn\r\n-----END PGP SIGNATURE-----\r\n","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCICmk/GnP0bgLMLKwOsqmi85pgPZF8i7IN0E1zAWz8kQIAiEA7agW7tNwD7vJ3WADcThU+35O/hHKm2DzykwgXewNWdU="}]},"_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"directories":{},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/has-symbols_1.0.2_1614443462507_0.38046500905605685"},"_hasShrinkwrap":false},"1.0.3":{"name":"has-symbols","version":"1.0.3","description":"Determine if the JS environment has Symbol support. Supports spec, or shams.","main":"index.js","scripts":{"prepublishOnly":"safe-publish-latest","prepublish":"not-in-publish || npm run prepublishOnly","pretest":"npm run --silent lint","test":"npm run tests-only","posttest":"aud --production","tests-only":"npm run test:stock && npm run test:staging && npm run test:shams","test:stock":"nyc node test","test:staging":"nyc node --harmony --es-staging test","test:shams":"npm run --silent test:shams:getownpropertysymbols && npm run --silent test:shams:corejs","test:shams:corejs":"nyc node test/shams/core-js.js","test:shams:getownpropertysymbols":"nyc node test/shams/get-own-property-symbols.js","lint":"eslint --ext=js,mjs .","version":"auto-changelog && git add CHANGELOG.md","postversion":"auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\""},"repository":{"type":"git","url":"git://github.com/inspect-js/has-symbols.git"},"keywords":["Symbol","symbols","typeof","sham","polyfill","native","core-js","ES6"],"author":{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},"contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"}],"funding":{"url":"https://github.com/sponsors/ljharb"},"license":"MIT","bugs":{"url":"https://github.com/ljharb/has-symbols/issues"},"homepage":"https://github.com/ljharb/has-symbols#readme","devDependencies":{"@ljharb/eslint-config":"^20.2.3","aud":"^2.0.0","auto-changelog":"^2.4.0","core-js":"^2.6.12","eslint":"=8.8.0","get-own-property-symbols":"^0.9.5","nyc":"^10.3.2","safe-publish-latest":"^2.0.0","tape":"^5.5.2"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"auto-changelog":{"output":"CHANGELOG.md","template":"keepachangelog","unreleased":false,"commitLimit":false,"backfillLimit":false,"hideCredit":true},"greenkeeper":{"ignore":["core-js"]},"gitHead":"444dc14d035df9891743a28cbc5d6ecdb0cb3b01","_id":"has-symbols@1.0.3","_nodeVersion":"17.6.0","_npmVersion":"8.5.2","dist":{"integrity":"sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==","shasum":"bb7b2c4349251dce87b125f7bdf874aa7c8b39f8","tarball":"http://localhost:4545/npm/registry/has-symbols/has-symbols-1.0.3.tgz","fileCount":13,"unpackedSize":20603,"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v4.10.10\r\nComment: https://openpgpjs.org\r\n\r\nwsFzBAEBCAAGBQJiHo7dACEJED1NWxICdlZqFiEECWMYAoorWMhJKdjhPU1b\r\nEgJ2VmoJTg//VKZyTT/GxVMPZNQFC6Q05AQ+zwFmm1ePSsyP3+hebhjz0KMZ\r\nZh8Z3oaFj53lk6p6hl6wJgJh8v+4H8tYi90zewuk2/sv/r4gS8KKOJkEU5hS\r\nExpiO/FlpW1EBW0kHcPOLiYkyvhm5iNX17o0qUXw62EVu9pFdzLuMLtoVch9\r\n0RC3armyFU5YXjpr4lQCbHCAK6okYFFh6BGQYB0k/to/o1YZ3QijFZ7cDlyl\r\nUSH33b6VFsD9gVT6pVYGmhwPfbxrUzvgpmMeJqdL940V3BgVDu9h/lXFDpvC\r\nyf9vmUEiVkcxeiIbJuusCQjMbPT31uYDaAYY+W+v4pbD552jb/7Gm2ttl1uV\r\n1yx9J3M5aKbjZWMVfRinlfGoyUIs0rpxhSsQTp84skwPLkXC1YfODYNhy4+o\r\nVR5GNTIDDOB4i4y7lGVvx7Vd4ySP+Tz9YpmFI9ZrCnEVXggUn9y+PU8R19UJ\r\nrOVAYikVzsyC5PT9PKr2lvITXDb8siGUNt8YmJhZupzv3K+I5sEojmpqCGvP\r\nW748lmzXQAFYUY/BL1/zChahtp6w5mBaX79uF/xO7h/owukFCK2Y1Seyz4HP\r\nFzn6kDQM+TcUD9GlOhy1OsSLVhuK+gbGupNtSG52OaR9JVtmxSzd83TujaTF\r\ncDmeevwmNQi4Gnt70AILlnaCxXanGrp0epk=\r\n=B5Gh\r\n-----END PGP SIGNATURE-----\r\n","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQDwzczMy98ycY151XrPoURp8chFzfXRYegRhpOydLT8UgIgb/6c33xTl81h3biIUwEWPJAVPlOf6E2AEaWCvLmPOck="}]},"_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"directories":{},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/has-symbols_1.0.3_1646169820978_0.6668045837242529"},"_hasShrinkwrap":false}},"readme":"# has-symbols [![Version Badge][2]][1]\n\n[![github actions][actions-image]][actions-url]\n[![coverage][codecov-image]][codecov-url]\n[![dependency status][5]][6]\n[![dev dependency status][7]][8]\n[![License][license-image]][license-url]\n[![Downloads][downloads-image]][downloads-url]\n\n[![npm badge][11]][1]\n\nDetermine if the JS environment has Symbol support. Supports spec, or shams.\n\n## Example\n\n```js\nvar hasSymbols = require('has-symbols');\n\nhasSymbols() === true; // if the environment has native Symbol support. Not polyfillable, not forgeable.\n\nvar hasSymbolsKinda = require('has-symbols/shams');\nhasSymbolsKinda() === true; // if the environment has a Symbol sham that mostly follows the spec.\n```\n\n## Supported Symbol shams\n - get-own-property-symbols [npm](https://www.npmjs.com/package/get-own-property-symbols) | [github](https://github.com/WebReflection/get-own-property-symbols)\n - core-js [npm](https://www.npmjs.com/package/core-js) | [github](https://github.com/zloirock/core-js)\n\n## Tests\nSimply clone the repo, `npm install`, and run `npm test`\n\n[1]: https://npmjs.org/package/has-symbols\n[2]: https://versionbadg.es/inspect-js/has-symbols.svg\n[5]: https://david-dm.org/inspect-js/has-symbols.svg\n[6]: https://david-dm.org/inspect-js/has-symbols\n[7]: https://david-dm.org/inspect-js/has-symbols/dev-status.svg\n[8]: https://david-dm.org/inspect-js/has-symbols#info=devDependencies\n[11]: https://nodei.co/npm/has-symbols.png?downloads=true&stars=true\n[license-image]: https://img.shields.io/npm/l/has-symbols.svg\n[license-url]: LICENSE\n[downloads-image]: https://img.shields.io/npm/dm/has-symbols.svg\n[downloads-url]: https://npm-stat.com/charts.html?package=has-symbols\n[codecov-image]: https://codecov.io/gh/inspect-js/has-symbols/branch/main/graphs/badge.svg\n[codecov-url]: https://app.codecov.io/gh/inspect-js/has-symbols/\n[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/inspect-js/has-symbols\n[actions-url]: https://github.com/inspect-js/has-symbols/actions\n","maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"time":{"modified":"2022-06-18T19:26:17.135Z","created":"2016-09-19T23:46:36.740Z","1.0.0":"2016-09-19T23:46:36.740Z","1.0.1":"2019-11-17T00:01:00.460Z","1.0.2":"2021-02-27T16:31:02.668Z","1.0.3":"2022-03-01T21:23:41.133Z"},"homepage":"https://github.com/ljharb/has-symbols#readme","keywords":["Symbol","symbols","typeof","sham","polyfill","native","core-js","ES6"],"repository":{"type":"git","url":"git://github.com/inspect-js/has-symbols.git"},"contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"}],"author":{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},"bugs":{"url":"https://github.com/ljharb/has-symbols/issues"},"license":"MIT","readmeFilename":"README.md"} \ No newline at end of file diff --git a/cli/tests/testdata/npm/registry/has/has-1.0.3.tgz b/cli/tests/testdata/npm/registry/has/has-1.0.3.tgz new file mode 100644 index 0000000000000000000000000000000000000000..90c33297e278b4dc275fa886d433a2b2af655d38 GIT binary patch literal 1553 zcmV+s2JZPEiwFP!000006YW@SZ`(Ey?q~gqgZm)~ph$LJ*A6YJ*mAASkqlXB77QCI zOSH{Q7B!Me8Wj2OJ5pAnq)F3aX}V%&i^$}?Ja;GF9jPKt{)%VBE~=+>Dp>yPkw?4T z-rL!MyVh}Z_V>2o+4k;UyVKd*Ya<{2*xlX5=VKdk*C>UIIhyis^;FKl)$<1#9}R$3 z9xq7iCA8+TXqjpuB_iRpkd)UHR_AO%3fyKIOv*yMwCx#{^KxP(Y+>V@;AI-yS9ZjuM6JlJ%xoAM z&1pPikP>jTgyu#%S;RC`46=w*n$6>%cHYd?no{3Fo=aqGB`lYmPD;tRK<3}IY=zLk z_=WK_&S4PqNt~xkgeFFII!9VV{}oHgv)V&+u;N1wjA~1>T;kt9w1b=!Oi(oB;@U|S zVI_`%sr_0i#FcvkW-0cihtQgoGjXl4VbA>$x2X_iWnrZwqY24{Hs#QZTFtdP!d6>d zvwj8eBx4hM5erGUJ@mS+KXx~jnp?5LIP3jn*@7qaJ}pR|l02cL;qJ7|6SZ|WClp^C zO8nWe@ZYs(DLL!iDfy;)S2htD&E+O;lqsE68n&!0R8j9UP1l4StMlTTAmb_uxctIu zUn7!yM)M|8j=MoIzJ3L5Yp2OtWnP1|hN?ips<8@ABoUgF7YpEJ4w|(#Wl2*efu1M` z;)2uJT*7*?0bA|PHry_*hA|?1 zfz<(13aEfN;bd|FGal!Xq$W%`A;6}PVCu|>36epaUjViSJWCjCA~A1iK7$w%1XG-H zj%oy(%JY~LWKJO#f+aLYeUP%GT##JGDusFC|(V8YCrg1~dmnt4VN< zC5DyK;JD#A)n&j$rX(v{i5K3t&h-oN0O<1s$PSulWw~BI-Q870m zrJ`ydxG2=3E{Ue{!)6==$ugw@3rpqewRM_Dp%f~hav36xdycJ9@fug2V2GwA&k>Sn znUrD3G@@$8YDKA@vMgig=o&)iDODbcmxd7`DW0%1qGMjY;G9V`wK74yfa3bCE=gjp z8u^46RlpDz!>g8RcesMbgvlkv*caIF72a((EyN$VFb?|Bu@k!BjbRi9@4TMdgO)SK zdCP=jFB$|#5g{yCU|~#c+~U!w{U=R zeh|UX!*Yd6qW~1(QkCb94ODjMhTQ>9oda*^MJFcoy~tNO`>51`krPH<_h{&ZFgglH z!PrIkp5X_+=l4TIaSvTTvJe%azjtBf(0e*fqfky)l|<^uqwvt8k)3pp~GU?^aq=sNu#NK^8SX8mpg1WlPU>hNx?-gqzHk z@o*JcjURQ?{XcY_-l1zP(rNk(MEjlaR28`-4`u(T9`u_7IV+{|yP#u-mF!5AQs#|k2DQ{e-b$a5-xXg$}KByC*N?=9N6=J== zWY#nDp{jz)zIOor=cCSl^?~496@Ulwf4B4L_uu{Y_A39MV%(AcYAZDJUwuGQX@5t* zwk%s01SL{+ZxiZrftU6T{RvB2$R20w_cnF2>gY&e)!jCul5o6WrVKBgcsE5@JV>zf z9Ezoo{#M;k(o$yJg!+QBfvFD$NVv_b{rob#JYC11m#a#)8mqAyKic>Qp8fO^03HAU D#c~j$ literal 0 HcmV?d00001 diff --git a/cli/tests/testdata/npm/registry/has/registry.json b/cli/tests/testdata/npm/registry/has/registry.json new file mode 100644 index 0000000000..137122e4a7 --- /dev/null +++ b/cli/tests/testdata/npm/registry/has/registry.json @@ -0,0 +1 @@ +{"_id":"has","_rev":"28-a7978fcc3beffd13dd518b65fdb20142","name":"has","description":"Object.prototype.hasOwnProperty.call shortcut","dist-tags":{"latest":"1.0.3"},"versions":{"0.0.1":{"name":"has","description":"Object.prototype.hasOwnProperty.call shortcut","version":"0.0.1","homepage":"https://github.com/tarruda/has","author":{"name":"Thiago de Arruda","email":"tpadilha84@gmail.com"},"repository":{"type":"git","url":"git://github.com/tarruda/has.git"},"bugs":{"url":"https://github.com/tarruda/has/issues"},"licenses":[{"type":"MIT","url":"https://github.com/tarruda/has/blob/master/LICENSE-MIT"}],"main":"./src/index","devDependencies":{"chai":"~1.7.2","grunt":"~0.4.1","grunt-contrib-watch":"~0.5.3","grunt-mocha-debug":"~0.0.6","grunt-exec-jshint":"~0.0.0","grunt-release":"~0.5.1","grunt-newer":"~0.5.4"},"engines":{"node":">= 0.8.0"},"_id":"has@0.0.1","dist":{"shasum":"66639c14eaf559f139da2be0e438910ef3fd5b1b","tarball":"http://localhost:4545/npm/registry/has/has-0.0.1.tgz","integrity":"sha512-Ulo9uG05SN7r55LqJxpU84yWzVPfJGv+GZSaEnm5mKO/jtwV5KODce9bPEDJh1uoYGJpsy5pKi4dQOdDSFzCvw==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQDfgu84CAfHIBVCPe26sam0TSBJ85TxQTylJ60gIU80NgIgf47PIcgnILXdcQJnGDT+j5EpCT2kN392mwWhJQz21gg="}]},"_from":".","_npmVersion":"1.3.8","_npmUser":{"name":"tarruda","email":"tpadilha84@gmail.com"},"maintainers":[{"name":"tarruda","email":"tpadilha84@gmail.com"}],"directories":{}},"1.0.0":{"name":"has","description":"Object.prototype.hasOwnProperty.call shortcut","version":"1.0.0","homepage":"https://github.com/tarruda/has","author":{"name":"Thiago de Arruda","email":"tpadilha84@gmail.com"},"repository":{"type":"git","url":"git://github.com/tarruda/has.git"},"bugs":{"url":"https://github.com/tarruda/has/issues"},"licenses":[{"type":"MIT","url":"https://github.com/tarruda/has/blob/master/LICENSE-MIT"}],"main":"./src/index","devDependencies":{"chai":"~1.7.2","mocha":"^1.21.4"},"engines":{"node":">= 0.8.0"},"scripts":{"test":"node_modules/mocha/bin/mocha"},"gitHead":"3113c5ff93ec8befffd9cf23c4dbab7a9d429c20","_id":"has@1.0.0","_shasum":"56c6582d23b40f3a5458f68ba79bc6c4bef203b3","_from":".","_npmVersion":"1.4.28","_npmUser":{"name":"tarruda","email":"tpadilha84@gmail.com"},"maintainers":[{"name":"tarruda","email":"tpadilha84@gmail.com"}],"dist":{"shasum":"56c6582d23b40f3a5458f68ba79bc6c4bef203b3","tarball":"http://localhost:4545/npm/registry/has/has-1.0.0.tgz","integrity":"sha512-pZW9uw/9635RZCMUO1nIiZ8Ue8fJP6GlegyXWsFmqp1asx44TMS+K+ffoKnhdFt/piqIpvHG1h6qXmyVEiXCfg==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIFCoV79Swa7ogDk0FPLZ8kwt4fygO2aUdChkivfxg5juAiAjZm1GE4R6dpMqLcAafe1QcBaMYvhuK5jkjQfoULHq/A=="}]},"directories":{}},"1.0.1":{"name":"has","description":"Object.prototype.hasOwnProperty.call shortcut","version":"1.0.1","homepage":"https://github.com/tarruda/has","author":{"name":"Thiago de Arruda","email":"tpadilha84@gmail.com"},"repository":{"type":"git","url":"git://github.com/tarruda/has.git"},"bugs":{"url":"https://github.com/tarruda/has/issues"},"licenses":[{"type":"MIT","url":"https://github.com/tarruda/has/blob/master/LICENSE-MIT"}],"main":"./src/index","dependencies":{"function-bind":"^1.0.2"},"devDependencies":{"chai":"~1.7.2","mocha":"^1.21.4"},"engines":{"node":">= 0.8.0"},"scripts":{"test":"node_modules/mocha/bin/mocha"},"gitHead":"535c5c8ed1dc255c9e223829e702548dd514d2a5","_id":"has@1.0.1","_shasum":"8461733f538b0837c9361e39a9ab9e9704dc2f28","_from":".","_npmVersion":"2.11.0","_nodeVersion":"2.2.1","_npmUser":{"name":"tarruda","email":"tpadilha84@gmail.com"},"dist":{"shasum":"8461733f538b0837c9361e39a9ab9e9704dc2f28","tarball":"http://localhost:4545/npm/registry/has/has-1.0.1.tgz","integrity":"sha512-8wpov6mGFPJ/SYWGQIFo6t0yuNWoO9MkSq3flX8LhiGmbIUhDETp9knPMcIm0Xig1ybWsw6gq2w0gCz1JHD+Qw==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIGwsArRhXrhj+qjKhTjYer8IcOloz5NSf90mKsSbjDTCAiEAikyzkXCsz1Xr2d+L0/QJVYD+vLNZUGu/gQpngIRstPM="}]},"maintainers":[{"name":"tarruda","email":"tpadilha84@gmail.com"}],"directories":{}},"1.0.2":{"name":"has","description":"Object.prototype.hasOwnProperty.call shortcut","version":"1.0.2","homepage":"https://github.com/tarruda/has","author":{"name":"Thiago de Arruda","email":"tpadilha84@gmail.com"},"contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"}],"repository":{"type":"git","url":"git://github.com/tarruda/has.git"},"bugs":{"url":"https://github.com/tarruda/has/issues"},"license":"MIT","licenses":[{"type":"MIT","url":"https://github.com/tarruda/has/blob/master/LICENSE-MIT"}],"main":"./src","dependencies":{"function-bind":"^1.1.1"},"devDependencies":{"@ljharb/eslint-config":"^12.2.1","eslint":"^4.19.1","tape":"^4.9.0"},"engines":{"node":">= 0.4.0"},"scripts":{"lint":"eslint .","pretest":"npm run lint","test":"tape test"},"gitHead":"5becaf997373b548e790e8c5ec0b718e20da6097","_id":"has@1.0.2","_npmVersion":"6.1.0","_nodeVersion":"10.3.0","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"integrity":"sha512-D5/WxwX+SrGfs/fiQn34RAoIZkCLJBDEfBWS1kmTI6G/1mtjhxTBiIiJi8EsKhwaQqKqj7lpKOi3i69tg3P+OQ==","shasum":"1a64bfe4b52e67fb87b9822503d97c019fb6ba42","tarball":"http://localhost:4545/npm/registry/has/has-1.0.2.tgz","fileCount":6,"unpackedSize":2854,"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.4\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJbEwZkCRA9TVsSAnZWagAALQAP+wSiRGrAWvCQnVLAKjxZ\nYQebEuC2gpCQX1p/eG0RGxdRC+rC1+LOG2CJR41wtMtXXIcnC6wrXb2TnHMw\nyHMqSyQIjRxVAmR2CPOlh36FXAp9efcgPKyTAfXEw0/3iZMjJ0jcZCbaDckQ\ngLFB0fkv1T0uzudvjUqXVbSXihMe17qi3/vXQbQNtnrFkHfGLstp+cdXN33+\nh0Hwv6FsE+tdfRM2q2xhYzIvDbDQ3mGCe0nsMYj3nRFoSOsAnftsOrZnIGr0\n4VuFxe66oYGaokH/GI/JR9AikEj9iEizknW4TB3d9KDSHtfVbsHeptDQ/CdA\nXUpyKlALK0VZvHGC+lKQsllCb4D1uJ6isnoqL3rV20/v1X1tjKIm9/P1tBsB\nKGS7mMlR+vSFzB8iGNhYVvE5p7Du5FXJElGI8qj0AjCaCbvech30WbHyIJlx\n26/ywIE5/m+HJ+wnFAUDW3VQWakzZDPqhyc0GVo+yCixxjLGv++UslsdSncn\nx9g0k5l5bGQ2SCn8XbZRCG/jHm+bdD0NOjqvaE7bu7RvGPfniqZDpH6TzJmI\nlb55w8iPpR+GZ5vhypDdpQBT//ty33rNHb+Q9U4e9ZHnQNnwK15WaImfE7J5\nxfmA+8JRj3FBiMGpKU++NHOtI9Y+aK+CqCmVwlhlkpZT0z6oQphcsl2gqe4F\nuujs\r\n=q25B\r\n-----END PGP SIGNATURE-----\r\n","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIFVCO11i1G9/dgjkto0mkv30OIZOjyqKhV8oPKebk69aAiAipxVf8Vs0ptvMfLEdGwLLLc7iQxf6hreeLjoOsrhRgg=="}]},"maintainers":[{"name":"tarruda","email":"tpadilha84@gmail.com"}],"directories":{},"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/has_1.0.2_1527973476308_0.8578208238940064"},"_hasShrinkwrap":false},"1.0.3":{"name":"has","description":"Object.prototype.hasOwnProperty.call shortcut","version":"1.0.3","homepage":"https://github.com/tarruda/has","author":{"name":"Thiago de Arruda","email":"tpadilha84@gmail.com"},"contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"}],"repository":{"type":"git","url":"git://github.com/tarruda/has.git"},"bugs":{"url":"https://github.com/tarruda/has/issues"},"license":"MIT","licenses":[{"type":"MIT","url":"https://github.com/tarruda/has/blob/master/LICENSE-MIT"}],"main":"./src","dependencies":{"function-bind":"^1.1.1"},"devDependencies":{"@ljharb/eslint-config":"^12.2.1","eslint":"^4.19.1","tape":"^4.9.0"},"engines":{"node":">= 0.4.0"},"scripts":{"lint":"eslint .","pretest":"npm run lint","test":"tape test"},"gitHead":"4edf96f2dec87ad6b6e68482e8f6d7c8eb7e07e6","_id":"has@1.0.3","_npmVersion":"6.1.0","_nodeVersion":"10.3.0","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"integrity":"sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==","shasum":"722d7cbfc1f6aa8241f16dd814e011e1f41e8796","tarball":"http://localhost:4545/npm/registry/has/has-1.0.3.tgz","fileCount":5,"unpackedSize":2770,"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.4\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJbFWeCCRA9TVsSAnZWagAAGqoP/3/GpNmBbr2IzYG0v9Rg\n0jDcyA0p95I8Tc1GbOsB0YWMY+VMs5I3tggHG4yjB8OskeR7GyItfcZpe9b5\nEGNr2Yq/eOKI602MTnWzHaAczxF887EVcXDISg1qDlzjTYWFMNms5jDxH6OT\nKD0SLHE1qRCASPxNZsJLoxT/dPaVfRC5QMIz8msEaI+qUu8p54cO0/DSLSlT\n5kCGKA5CfbfIODAmyvddsKgOW0at16XJ97f+qHhrI5q6HoYdM2jcLzPXxPiw\nSgPKXICus5TjFvRdBoOXa2diz9urPjQGzVsGoKs8W0Z9EeoxpiJInVRN1Hec\nabW1NzLvINbuFcG1I/CvNVXKGKwIafKg25ba/T/PHa/5rHY2+S4Y9Mj0SLFv\n8V7HWSQMGaG86kZjH9vgd7MPP39lBZXw5msjftiRVxiDiZYutbGzBymwHcYd\nzPVrzCfdNg9o2OsG8mjIBGntCwz9/Yrx7npK9mP97nZQ4EDDoDoCga5efkMP\nmT02Vru1cCdHOfRnM/tlr4Onf0umRhgzUfsbjdSsrGapGCTeVvBvnm6XKznZ\nw9HWfGyNaZiT1J0/pmnRMwxEp/xcKMaAOa1c2pisTGpjkbQloGQx77IwfBiO\nAVBfV3yBDWCgutiwEb4zg0RPPjlS1RCJDMzg+CsXT1u9H6St8MUtuBikkNr4\ndlxV\r\n=Dlp+\r\n-----END PGP SIGNATURE-----\r\n","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEYCIQDi2aj8+sqrebhyEkv7F23LOzXMTe0zUrFZqtxhj4ubhQIhAM8njQhxRA7zu5nYQHE3+EN4rJa5+9EpwlJiBeEkLn54"}]},"maintainers":[{"name":"tarruda","email":"tpadilha84@gmail.com"}],"directories":{},"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/has_1.0.3_1528129409940_0.08921093934264301"},"_hasShrinkwrap":false}},"readme":"# has\n\n> Object.prototype.hasOwnProperty.call shortcut\n\n## Installation\n\n```sh\nnpm install --save has\n```\n\n## Usage\n\n```js\nvar has = require('has');\n\nhas({}, 'hasOwnProperty'); // false\nhas(Object.prototype, 'hasOwnProperty'); // true\n```\n","maintainers":[{"email":"tpadilha84@gmail.com","name":"tarruda"},{"email":"ljharb@gmail.com","name":"ljharb"}],"time":{"modified":"2022-11-08T10:38:33.269Z","created":"2013-10-08T00:54:01.609Z","0.0.1":"2013-10-08T00:54:07.309Z","1.0.0":"2014-10-07T18:41:58.615Z","1.0.1":"2015-07-24T08:49:30.633Z","1.0.2":"2018-06-02T21:04:36.393Z","1.0.3":"2018-06-04T16:23:29.998Z"},"author":{"name":"Thiago de Arruda","email":"tpadilha84@gmail.com"},"repository":{"type":"git","url":"git://github.com/tarruda/has.git"},"users":{"getify":true,"bradleymeck":true,"akiva":true,"rsp":true,"nickeltobias":true,"tobiasnickel":true,"ahmed-dinar":true,"maximusx":true,"tjfwalker":true,"iori20091101":true},"homepage":"https://github.com/tarruda/has","bugs":{"url":"https://github.com/tarruda/has/issues"},"readmeFilename":"README.md","contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"}],"license":"MIT"} \ No newline at end of file diff --git a/cli/tests/testdata/npm/registry/object-keys/object-keys-1.1.1.tgz b/cli/tests/testdata/npm/registry/object-keys/object-keys-1.1.1.tgz new file mode 100644 index 0000000000000000000000000000000000000000..3e52f10dfe268a2f01ff9c9cdad2968d5011d7be GIT binary patch literal 7677 zcmaKwWm6jr*rjpT;-x_GA`KLZyA^i`ZVm2k#odETDK5n=xH}Ygf=h6h7KiLJyEFS? zXZAf`t}}D}f%`r*u^0&dJ4ELqb~7S~?|SWgBOI&sArS12)Yc|?E0APMC=EccnBhKu!WXOi4DN!2 z6zs1+uU63V^b%szF7}DX0&}+^0ykELf_P=F?`3Xv_8Y8`n|Sg0;;ixr7f0_4dntyK zXu@Xx68qB5{iqiWyF=^@^F~yhZohqTCiE3A;lX`5*<=QOMScHcspK@SG*(Tx;#J`0 zb=1|!d$)9?h$mlg8KDe+*LUB91IblV{q3WOQ}uSj>wJu4fC!{hhLec-$;VZu2@?ml zLNSHLX-&+3E2l6RJkrj*h{K zfy2)4B{R{*h2veQEiI_i`^&kpcS_~G7`%E#F2cWt-RB^ZAkIq1z*b&J*~v(-=%g6) zZ-Zoo3-8)M7W0mTg`aowGRDzT*S6@q8X_FMfe5cG5I+F(E%|&DTPm97`&+Z`%GFQzgOEzap zi3jlVr8J*ZZqn%8MyJ^V6m+Ocn>4 zV*=h(2iAG)Vpu*=200dD(1dzxER)dj;{Kw9L9VSjo=X%f>{s@9Pks}eIK{!<9%|>xc+z)!&V6rL-kzy-5R%Z>tkenI z2KfDbEc1P?3U4{v-2{NT10tW0UBFW#307@~)IEr>ev98CuofuzL15MGQ^?WG7Pn*~ zYk*@sJ@qex+{=y07k*)ykxh)ENY3MmmLk(>FT^ipA0h~`C;Q#lu0YeG%&)_VoaK=p zT|B8#33P*Ig?h3C^C3_R!XQ`5i<&fo=#=GQ6m710O z6_KBM1`dQY7sD6F?o!?Y!PvKAjfmUjYC17!f(>gJjUjYs6u0(jJ_H@*KSH7Q7dHmX z{fQTiK=Y=jtSF7MchUnX)~w@!R+QW^^OZ8K5XYSwb+bGfkat8rcb#Q{y^-p;rea+X zt$`lW4>ieui&uo{KP7h`8Ob$FX_-c8>x_8HT{`W?QGXVdL)3_Z4)K4f51wmyAIhK* z5!!b~Us76E!-A)AM2E{QCKZztO=yTstJ-T*7=|5r6Q%_ z!D7oQ)|->-P6;sM*II4cMT^(k7JbzxxCmhRhUp7NFI&X*$)kn!!wy?$8mB;l6RsWx zqgtbS^-m$n8`6g4(#|cON^PHxlypZadk!Z9%O*xA+UN2=2BUg=IWUum61!15u4x`f zmXAu3j>g9{Vnx)`$PaE6Zdp56?bwav_1DW==v|>Eg0HtJ7wXS18J@LU$GLu+y^Aq1 zkFK#2yD%wB|C^oHN3;7q<->EIz?stzcvH`>F#i}*=q5F+fQJ>MZqp-KAw#-QEd-^_ z)I3QDbGC4dD9xh@pyT7zlU>KDH4RmArkRX6kKq z_S3Hw;q|=3iqKV=jnudy7>hLiF>`^hkjIm}=No0{YKBozw_d+SQ+3XwqqF2La)#7} z?zC3fUJ;(mW3+6hv}|KO!rA<)L&qC7bES{-;m4(BlrW^1asB%-6pMw`1Tl;?cQONZ2%`y{*G-KitXYS~S@b!9Ps(dN{f0 zX!E_?JIRq%W@}q$Oji7V;j`e*mr;CiK)A3W?Ao3!tK@YnckD<_yD>>$)ZruT&Ib^)Vg@=$Rec z=hJU{`NjqYw%k~I{7>M{aOZQA_u>Y-^WDn*kDv!9e!anv|No(^MMWS==c6l*MDVCc zqJHkE*0y-^!|HnE^{0&mdoXuMPuC+Oci)--&ea=x4Gv|y7S0EH%taeA1kui{mO1UK3H!3wjN`e z0)lEGppfZDa$Bn4YiCvU9qP?lS`Uc75EG*R; z79CnPq6!*&$92&pP&uNhFsMQIA7-YG%+Tw{CL!ub;dZFxujHfHRA~Z^D>-r5uF|8j zBloY=G9yE&FxpgXOCMgk`l93Tj~9r=i=T1zM?g8eb|gfbyT&WMkacWcNKH_ZURF#{ z8sB}i#&}f{bUOrVL!2~&gl4^o(Mu~W#{BFRk%#pV=3#Wl4(!; ziY-hZmamR*%`ry$LipTu?YQ4o8Y|<^fORpJZ&GBFg!p)-MP-N*y&<~hH$gbkFf+jA z4%MHeWpNrTYvwc$XS8qP7y@b<{*&#U?dFs>53Px`9Bi{2%4Mla6~+~AJd4H*NuY}w zi)&T{eyM_0!N=FM=`J4*ETXtf_)ZxjHBjH@Bb}D==4dbuRsIG>w`A@*@C=z!QFCUY zYwSUxZ6&nYEe0@!E@P*gjhKxO=TMR#zxD0(J33&icPbuUjY8{GE9tnOy2Iw1&Y7IY zV=~WgXV|3U?tBJP$eOymfe|iY1gJ%qk{UrhczQpnH0hg}wbUMs_a1PRL@s>8mgem7 zFN4;utz!zyeQe5=HTpgz)U>azk11(cp)D@&7PH>Mtf{dTA>UAgrj+gxJ#HJbY6`v- zvDnyXYP4;!`k{nc`6zvOFt52Z2}VFA9et1WVWIJP5{X=2a0E(avMv68CAjg~qZE&q z4jjCtiH+vzLV&WAa~$@U&&~>wZ;WB$r%Wyaz7TO1YE6<9CsN?&FSL@6pBqAY?Ocw) zIh{7#vqX=0%95Hmo#ptQ0hhf=NU`xocKmyNgIKfezdt#dzHlNf)95+U45ElF<;%xo zYLY$f5-+`p&S%T>5d&103N4q!xyiJzi?oKS)(ArF7((sRZBNmdBNl&4WmaLD(Z-{} z;;U*0w@-)hM@wUD*)j*+2c-x~_7o#n=7JK1$msKdG-RD2Tp!9=lIUH+=4TQ_(&@Im z%C$c1rWI8&7gg2ZgMa)=uyE>({1jwl{X318^U7=&DG(zI4Z=ivQCs>2B@>|Gz zJ}E}d>DF>d>+RR4I+VNUGK+`T>hw2{eKoqN>VK$l-oT>RTv8V1G zVLUHw%+sV-RqOSB;cA>YX$A!9U2E2w2}PWoXaOmyI`ixNnr0b}{WKvp-^OUIcv!Dr zQ9JHfPP@_M^aa(8ZX0j##$}tajR2=b&I%wNrKUb_JKcXg?k5dHn+6qc7Ix2nFbs6IV$G4@#ei~Bc%xCg z6Z|rpC?`vBwjC#dqjYX^ixHnoM~?D&*Zuc2DpIlumfBeH%SY{TFgNEDs}#^#T?;Z7 z-Fwx6U#?^&ADNF*L{YLT>BAKwkt9yH4-Gm9kFd6!ou(a4wz$;%yMUUVZQFj@Ke5Dv zX;XvKSQ3VziFb$48DD-Y3HB8T>-)~rKWIC%K2J88c06%ICZUzOv93?|MN0jy}rR>&`V47Q_bj-mNO0McAmd@CXb?R_c;c_C?=F z3fC0#bRY~S_>RrZh1gNCWKE4oxvCKV3njfvpHVDAvP@DR5pQs8GNji`1*t)}sh>V8 zxhLWs3HGtG^y-~svhS^Z#RahnshrAVS$7)7`tSIxu`^FD1fvQZ|LMT%w|E-uKT>&LrW1;05- zEN?(GK5g%y{I>;nJjS_>tSq zEPw}a&UhLmJsm<~sL!10uH(^Oi9ajyv1k%c$nu+RVz)LD{c@D}KXt_3=E&t9JvJ##b_Vzw_h@wVha5e<(MKkY*LEC2|`8%q|T3;pCLDeUW{ zyA+oRc7uX~(YfE*%nqVlOaMyHn2Nx^TM|#&Z?bUm42=R;2?n#JJ=Y#*DM9LM8yuzd z1?f_}f^IJQWFnua=$N0}E_O^0Hx@{fngb6~ui@(1V1wp!~$LG;V`ED7ZS1$H$|HG00lM|s4B zQg=NBtKJvYm5cmMxYbs|&x{?uK^ORapnlWp+`X7>_3-?CPxrWXudv=HH?wI@=Ra7C z?f7#YR^r2*kA~Q5%~$hc`&_nN+@8IFIGSfnRTzb2#8u3BB|qmMI;Bq+o8C0$QxN9W zZvkRhz@}0xHLaC|ITIAhtF77C^YmDNORg3V}f@rf7^5}mv z;LOdDZ#W{+@XlQFm)kNdR%FW-#iaa~pX#0W`1Fj2aHqtc7JWh!V9I73`Y>|_>1X3b zg2D%1KzTuM4;QPjCk#?8TENz#;zDIWb=2lCKyLjH4u8%}LcHy?{8r&~Q9;=#x!2!#JC`Ra*>O&zDhADZaQpzCWGc5tx0k zzD_$65p58h9k%D^lD_GiiZxVn6FcV{ed;+fGS@NCWzSV9m~{IXHi4CDDL1H4sDbhs zHCk8p^=!p=Xk95k-=Xi6yXY0qK7~0yRgkVnn~nYv?ix9CkT=v)nrESlPSn!Y^e=z^ zC{FbS!_vjIWh}yPUwD@+E4MaY#DsfI0`{s`lyv?2g|yFK5f_A zYbtr!%IBzge{C$?wG1B}eQ#uZm;7ccPt{W$eIboe+%M&?!icc=SLrktpna)0UHw$* z5GGZKmaJcVj%Z$}8H06$WOJlCohT#O$bYHs)4X}K zOnDZWzqIh(3IT_navJ>7PJSdWhw#sV67%5UT!>Zi}N(Y!4G zv4&Fj=yk`SNbT?qgpXq8$!JQ-TH;rq>`IPu^Ten=yy5hmRm$=yuAl&ZnS1rIh?!v; z+g|ad=+yNWE0C&5)y+TX_?R^A$=mG{%-lHwW2=}tNgu-&Qx#VKmRULwv^mnEW@pUE zrdyKN?gEJRqZ_^$?@BbXXx~EQchjP;+`SqaVo3!xLSzwL-t(BFr}>iGGxZ%Se_U5x zGc|nO+AesymkIRjazzsrKBW55o6)vhgE1?i%A-13);&jrw<3~V=H{CvohwpGlp>azq zYLs`R@My2fO@Y6{eA#oq>2dn)^Sk{8-ss>YmM7SO$uZpJu+D$Z2~Nv@u?*L@=x88@ zFY3oLzrv}>98rC?DC$7hx3=~~)Xg}snY=C3R_h5~{Fb98!1x<9*UWFbZvs=Q;1)-f z8RrtpP}do?Cc}%zKa#Q9s^iu_6fAX#*CHG)YI)h(W^`eQs)g2AJjq=kmf&a&;dIo) zlM39Mxh7eo89H83_Klq6wQ(~yxr0LK7%mqJ z`*@1=lN^2)!y|7z zqWXmsV6nk>w!NowH$C-7J*f%)uVDG&Ua!TlX~w57tyel)^^z+x}^wD2wK#f$)W^_;!%>)W_j zo3Zs;CREi-7`}FVJ8!a=Te;k-dU>{xJ+0tM31sQa{gsnbLGu)ce1Gy8WO=w8s% zkTS4LCBmxvEVGyt572@lXMW;)K7E|0H~)K7Y_%*%*YICA9+2&L7u?o9;k zLjLOU2;VpfFSpo-l{=;k+hFpH+_fgTWO79~D!Mr(eO>@w%pju+$o62^C$3!)>ns!< z)n8?|H!7xjblSmV=DSs|=Sv8Fh#Tp-!yz&weUj;@FV ze0PdS>-a|OshXsHm2(KEhKAegU$JB zs>IS(E`^6mZzTy#gyOQ5@uGllTRTh|VCFT=*j;*#o0c?y^aNN;TzbeF%@l{?`I>b_ z2mLcZY961-V!x^>nmfG;a)j~PKx41_PikyteP+@tkzEJV(x#*cfkMH^@z~YQaI9{^4EkiccXU7{97`Ss-9A8Bw7mPTfHQ8>`jm8Hu z&l&!2du^C|J8?0oBX57J6NdChq01NeP;~P<^4gXhOw|;y^($l+8ozxZ?JQ+F?4G1L z3PS4PKj~pk$P^{LM@DnlUD9>FihdA+VCqv}tQH(&9nbc%G3oThmB?+2mfhKh zms##c6!YOx4aGo16zSEiRf@8ny~&-QP{O=TUwaxh4BuW4$24Ey rMm6F8;6FAGZMChD>dvN_-j03Sb8CK|9zUI&!V&mkn2HE-2nhcRU?Cip literal 0 HcmV?d00001 diff --git a/cli/tests/testdata/npm/registry/object-keys/registry.json b/cli/tests/testdata/npm/registry/object-keys/registry.json new file mode 100644 index 0000000000..bbfedec12b --- /dev/null +++ b/cli/tests/testdata/npm/registry/object-keys/registry.json @@ -0,0 +1 @@ +{"_id":"object-keys","_rev":"104-fff9f09b12add81f4389e3e50a2ff098","name":"object-keys","description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","dist-tags":{"latest":"1.1.1"},"versions":{"0.0.1":{"name":"object-keys","version":"0.0.1","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/kriskowal/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test.js"},"repository":{"type":"git","url":"git://github.com/ljharb/objectkeys.git"},"keywords":["Object.keys","keys","ES5","shim"],"devDependencies":{"tap":"~0.4.1"},"testling":{"files":"test/index.js","browsers":["ie/6..latest","firefox/3..latest","firefox/nightly","chrome/4..latest","chrome/canary","opera/10..latest","opera/next","safari/5..latest","ipad/6..latest","iphone/6..latest"]},"_id":"object-keys@0.0.1","dist":{"shasum":"ab917307b1042981453e094c41049246e99602d6","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.0.1.tgz","integrity":"sha512-/aM4V/OT388JOkoQJ57Gxeg43O8qI89rybO5CgLo1i4Z1rI/LXnC8RTdZZxmpxC273gOECNPb2qW9jerijQAwA==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIHpOEk4wRRe8+XOD49ps98iZzl4U4078lTFmqufTj3eQAiBw262wqFt1AZUWcLtoA6F+KvHVEkwE97aLaycw/EDPYw=="}]},"_from":".","_npmVersion":"1.2.15","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"deprecated":"Please update to the latest object-keys"},"0.1.0":{"name":"object-keys","version":"0.1.0","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/kriskowal/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"devDependencies":{"tap":"~0.4.1","tape":"~0.3.2"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0","firefox/15.0..latest","firefox/nightly","chrome/4.0","chrome/22.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/5.0.5..latest","ipad/6.0..latest","iphone/6.0..latest"]},"_id":"object-keys@0.1.0","dist":{"shasum":"f60a5d0b3f878089a4b9645b26e43df09436dbb8","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.1.0.tgz","integrity":"sha512-nmv/hFMWJmfEUuMUjE2m2ZDmwi4Q9RDeZto0S04PfD8wnwINgJT5Raib18UT/EAa/A3tIhpEPHewLX83OCRSzQ==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQDvuUuQtK1x8psnRhTMpgczIFTWOSlTu0hV+851vuWrKQIgYdB+mrlzs3w/Bw74j9ju2BZEFbhiDMpRVvbTsHBJodc="}]},"_from":".","_npmVersion":"1.2.14","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"deprecated":"Please update to the latest object-keys"},"0.1.1":{"name":"object-keys","version":"0.1.1","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/kriskowal/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{"indexof":"~0.0.1","is-extended":"~0.0.4"},"devDependencies":{"tap":"~0.4.1","tape":"~0.3.3"},"testling":{"files":"test.js","browsers":["iexplore/6.0..latest","firefox/3.0","firefox/15.0..latest","firefox/nightly","chrome/4.0","chrome/22.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/5.0.5..latest","ipad/6.0..latest","iphone/6.0..latest"]},"_id":"object-keys@0.1.1","dist":{"shasum":"e35f1c8d9cbc5fe503c1b13ad57c334e3f637b3e","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.1.1.tgz","integrity":"sha512-0YAQMhYdszhy3qw0CZHKp2/+pw0VIBSbb5G5oMItAXW384Qbi6XRg4J8Q9O8kg43WVcFyFUT+GCCTt/rz6890w==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIBRuwzoWqmGMBA4CCDak783BfOsQ6ycfmHlEnZqC5gWnAiAKsWT0JVhP/+dBICcDXulO75XJTJjG4yGibvL1UpxT6A=="}]},"_from":".","_npmVersion":"1.2.14","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"deprecated":"Please update to the latest object-keys"},"0.1.2":{"name":"object-keys","version":"0.1.2","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/kriskowal/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{"indexof":"~0.0.1","is-extended":"~0.0.5"},"devDependencies":{"tap":"~0.4.1","tape":"~0.3.3"},"testling":{"files":"test.js","browsers":["iexplore/6.0..latest","firefox/3.0","firefox/15.0..latest","firefox/nightly","chrome/4.0","chrome/22.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/5.0.5..latest","ipad/6.0..latest","iphone/6.0..latest"]},"_id":"object-keys@0.1.2","dist":{"shasum":"df74e8662eb0e8b5ee64fc8eda750c2db4debc7b","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.1.2.tgz","integrity":"sha512-WMWSee5aYXB5Iu7bfsD3wSdO9TaYqwrIfqHWoQQHIx3XbvhslTBAyqY+tOp9DpaNGjE75vM9IhwMFbDcEs0Ntw==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIHy+GeOlh7SUtU0NRAUk02ZvdJF+bUX7/XN9IrbewUc3AiEAmkuTbEEx+bYIAsEWhbMxFynWo+j5mtMl0weHc2vIqvE="}]},"_from":".","_npmVersion":"1.2.15","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"deprecated":"Please update to the latest object-keys"},"0.1.3":{"name":"object-keys","version":"0.1.3","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/kriskowal/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{"foreach":"~2.0.1","indexof":"~0.0.1","is-extended":"~0.0.5"},"devDependencies":{"tape":"~0.3.3"},"testling":{"files":"test.js","browsers":["iexplore/6.0..latest","firefox/3.0","firefox/15.0..latest","firefox/nightly","chrome/4.0","chrome/22.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/5.0.5..latest","ipad/6.0..latest","iphone/6.0..latest"]},"_id":"object-keys@0.1.3","dist":{"shasum":"201972597dfdbaef2512144a969351b67340966d","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.1.3.tgz","integrity":"sha512-P40wNJQL4FoACelJjjI0N0iO3oRfiy0Pvym34FvBmJbArXAmIj0u8p8dLPFjKtN3Bikqb2I3kYJLjS2RnIP2KQ==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIEr/gqaTEuK7tXhDHdY5SxqAScNngeW1qXRGkAYdsqLEAiEAtFJBjZIJKSFL9yK6M4lUIVRqPyeLc4o5JBFufjryS9A="}]},"_from":".","_npmVersion":"1.2.14","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"deprecated":"Please update to the latest object-keys"},"0.1.4":{"name":"object-keys","version":"0.1.4","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/kriskowal/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{"foreach":"~2.0.1","indexof":"~0.0.1","is":"~0.2.0"},"devDependencies":{"tape":"~0.3.3"},"testling":{"files":"test.js","browsers":["iexplore/6.0..latest","firefox/3.0","firefox/15.0..latest","firefox/nightly","chrome/4.0","chrome/22.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/5.0.5..latest","ipad/6.0..latest","iphone/6.0..latest"]},"_id":"object-keys@0.1.4","dist":{"shasum":"094b203cdc23c0d61b04f13cc8135fe964cc314a","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.1.4.tgz","integrity":"sha512-EhLn1BDThRMKDUnB4a9Pu99R0V7FvciLi4M2Y7fyoa/qnl202sd4RhLuYCL6IfR0f133TaWpP4JgNPRpMBac6Q==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEYCIQDCCt8XSKs0l1ykra1eRaTbBjfgyO/RAqXJZUWBu0LmewIhAP0IMkVWwACYSzm5FboLyb096r0WXAhEQaQS2m74C3E7"}]},"_from":".","_npmVersion":"1.2.14","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"deprecated":"Please update to the latest object-keys"},"0.1.5":{"name":"object-keys","version":"0.1.5","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/kriskowal/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{"foreach":"~2.0.1","indexof":"~0.0.1","is":"~0.2.1"},"devDependencies":{"tape":"~0.3.3"},"testling":{"files":"test.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest"]},"_id":"object-keys@0.1.5","dist":{"shasum":"ff9b7518e468804c4066ac553c5d452ec8ffbb27","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.1.5.tgz","integrity":"sha512-FWlklzi/z7zzTVU/hnBrUUyiMRw894gIwpgUCkeFqWSXD/m3y7KUzbcWe6oJWPr+PEZ/ACLa/lDWLIQsYmY0ng==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIBdydC523dr2nuIF2D77vsdvVS6m7etNCltX7XRKBrgKAiAzGtetgYZd9SXs1ixPt+EqqAMeabvo92SNOZHVRFqXbQ=="}]},"_from":".","_npmVersion":"1.2.14","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"deprecated":"Please update to the latest object-keys"},"0.1.6":{"name":"object-keys","version":"0.1.6","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/kriskowal/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{"foreach":"~2.0.1","indexof":"~0.0.1","is":"~0.2.2"},"devDependencies":{"tape":"~0.3.3"},"testling":{"files":"test.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest"]},"_id":"object-keys@0.1.6","dist":{"shasum":"2d8d2c3e3f57979d08e56c5a72532750e8fc9aae","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.1.6.tgz","integrity":"sha512-0EVnJ1F9QV4PMwv/hVwr1Ww/4AmGuHC4Wk1TlkWzUjvxZOZsD/I3jYrP3Cj1wX0C4i5fmAlopJdnQ0XiHOup+Q==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIB42TE8/g3tYNAcNlrGjmxVF0slnQzgQqHN6Ozb32j5cAiAb8eJ+WLFiVR5jEpIgg7FddzSK6CcDOrZS7fhOEaEVAA=="}]},"_from":".","_npmVersion":"1.2.14","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"deprecated":"Please update to the latest object-keys"},"0.1.7":{"name":"object-keys","version":"0.1.7","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/kriskowal/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{"foreach":"~2.0.1","indexof":"~0.0.1","is":"~0.2.3"},"devDependencies":{"tape":"~0.3.3"},"testling":{"files":"test.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest"]},"_id":"object-keys@0.1.7","dist":{"shasum":"fefce99868aeb040f357b3d3aa29ad26ec30bbd2","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.1.7.tgz","integrity":"sha512-q2+Sfmxqz5jDT7Ri0GZmZog2DCmsYzUo39+ESQFgE6AYSTITCZnrhp5thlTTWKxP0ilN23pvE5voVH2SAQp73Q==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQC0ZbU1lhEEgxMCvgRZhnhW4CeB2kRvMvEeAQGqoxfxHgIgHu6pVbufE1cs9nnihYjZrfi6oEN4sQDd1+IVeMnv9us="}]},"_from":".","_npmVersion":"1.2.14","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"deprecated":"Please update to the latest object-keys"},"0.1.8":{"name":"object-keys","version":"0.1.8","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/kriskowal/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{"foreach":"~2.0.1","indexof":"~0.0.1","is":"~0.2.6"},"devDependencies":{"tape":"~1.0.2"},"testling":{"files":"test.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest"]},"_id":"object-keys@0.1.8","dist":{"shasum":"d40164df81104b0da49edfa6aba9dd29eb480293","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.1.8.tgz","integrity":"sha512-QVLwfAl2DJtsOVW8BXxa8g9gjzqwAJijFj/hTCOknQ5uIfonbZIEeX+asYCgq93HYkfcMkWL51H6z3XLwALVaw==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIEstxBPiFXo98Vg3f5JaR5PY3HzLTVOKyPP3xJxIHeCHAiAJIjSWZJuR7zKuGcfALY9bv20LttxAEtWMngbhCcVELA=="}]},"_from":".","_npmVersion":"1.2.18","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"deprecated":"Please update to the latest object-keys"},"0.2.0":{"name":"object-keys","version":"0.2.0","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/kriskowal/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{"foreach":"~2.0.1","indexof":"~0.0.1","is":"~0.2.6"},"devDependencies":{"tape":"~1.0.2"},"testling":{"files":"test.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest"]},"_id":"object-keys@0.2.0","dist":{"shasum":"cddec02998b091be42bf1035ae32e49f1cb6ea67","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.2.0.tgz","integrity":"sha512-XODjdR2pBh/1qrjPcbSeSgEtKbYo7LqYNq64/TPuCf7j9SfDD3i21yatKoIy39yIWNvVM59iutfQQpCv1RfFzA==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQDHqI4i8TCwYU0W7hvKd5jX2WFPHuJ0kESFyw/as3++xgIgaKT/CU6g2wUXjGaGccKcj5U4akUaDasKizs8P3yDewE="}]},"_from":".","_npmVersion":"1.2.18","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"deprecated":"Please update to the latest object-keys"},"0.3.0":{"name":"object-keys","version":"0.3.0","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/kriskowal/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{"foreach":"~2.0.3","is":"~0.2.6"},"devDependencies":{"tape":"~1.0.2","indexof":"~0.0.1"},"testling":{"files":"test.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest"]},"bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"_id":"object-keys@0.3.0","dist":{"shasum":"4ce2945fee6669cf98424bbaa0f59c244ff97f1d","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.3.0.tgz","integrity":"sha512-5NWmqk9N0NPSzhUAjJwjA1fbpYkmCyc3DRpIObOIsOTEz98JZg8fiJUbnxKofPrRXXW/J5Sh0M4pku7my7KHWw==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIDt2X4Q2m0E/f+ITcYDQdhb9WZQobOe3l/s8X+WttvWEAiAn0ThWjlLuWOUW3FrAinp3k15grW86MXXMLNCLKBiOpg=="}]},"_from":".","_npmVersion":"1.2.21","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"deprecated":"Please update to the latest object-keys"},"0.4.0":{"name":"object-keys","version":"0.4.0","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/kriskowal/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test/index.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"~2.0.3","is":"~0.2.6","tape":"~1.0.4","indexof":"~0.0.1"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"_id":"object-keys@0.4.0","dist":{"shasum":"28a6aae7428dd2c3a92f3d95f21335dd204e0336","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.4.0.tgz","integrity":"sha512-ncrLw+X55z7bkl5PnUvHwFK9FcGuFYo9gtjws2XtSzL+aZ8tm830P60WJ0dSmFVaSalWieW5MD7kEdnXda9yJw==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIHUDMLh3fWS5OpydQINZqo8WFrJ3lqEJiDuN+YFRsxG3AiBbbYCG5+dD0UXyu+R6+L4BfEXZJeODELgzbKLRDEEWOw=="}]},"_from":".","_npmVersion":"1.3.5","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"deprecated":""},"0.5.0":{"name":"object-keys","version":"0.5.0","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test/index.js","coverage":"covert test/index.js","coverage-quiet":"covert test/index.js --quiet"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"~2.0.4","is":"~0.2.7","tape":"~2.3.2","indexof":"~0.0.1","covert":"~0.3.0"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys","_id":"object-keys@0.5.0","dist":{"shasum":"09e211f3e00318afc4f592e36e7cdc10d9ad7293","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.5.0.tgz","integrity":"sha512-2GU36PPj0BVaGl9JDw1zY5vkLMV1hQ1QtI+PoBq7f5bZKY2j/7IO0uQDv0UcuBhimMYnditq7dz+uO9C1TXV4w==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEYCIQDMwauwpEiIxU1RlG+eAIRnOLrboadeDQRORnvEQufqswIhANVI50TQxUwOhs2291FQ2NIdlE1uCKDjOx8jTVsGXZEn"}]},"_from":".","_npmVersion":"1.3.24","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{}},"0.5.1":{"name":"object-keys","version":"0.5.1","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"node test/index.js","coverage":"covert test/index.js","coverage-quiet":"covert test/index.js --quiet"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"~2.0.4","is":"~0.3.0","tape":"~2.10.2","indexof":"~0.0.1","covert":"~0.3.1"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys","_id":"object-keys@0.5.1","dist":{"shasum":"0eb20ffa0ce7c01977648681b42c515f297d2cc1","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.5.1.tgz","integrity":"sha512-VVh5OqHlY0N4Hueq9KteojSoj8BmEZeKC+nFyAmQFGF37dJSbcFB4jNhV7+6Xnn6t4t3jh0P0Cuy0hEA+xq+Mg==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEYCIQDoseRCnmntQ8ISi56+7YstplMr/rq01BE5OsLrcg/b6gIhAJKIVTCp4DmdXm8LavjKRr4lG/KH6m/RyVA72NGkvDSc"}]},"_from":".","_npmVersion":"1.4.3","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{}},"0.6.0":{"name":"object-keys","version":"0.6.0","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"jscs test/*.js *.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"~2.0.4","is":"~0.3.0","tape":"~2.13.3","indexof":"~0.0.1","covert":"~0.4.0","jscs":"~1.5.8"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"3cbf74b330bb04f263a96d59925db5704c08968c","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys","_id":"object-keys@0.6.0","_shasum":"4638690dfaf1e65a63d43b5855d2f6ce04aeef6d","_from":".","_npmVersion":"1.4.21","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"dist":{"shasum":"4638690dfaf1e65a63d43b5855d2f6ce04aeef6d","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.6.0.tgz","integrity":"sha512-NwTyBxMHbTVCd46WsQlY4WMwYoJ+PXkIkU6x/S22usMJQewtKMrwPAV9jtB6HBXnL4+EzaXQrtllK0MPl+V4PQ==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIEAekrpsVGBhFATo6EM1rcjVMSHjzwnEm8OSWZY5YhYqAiEA1YnRXNwRpg9sHlQweTFu1/6zpLR4rTQ50u+odWruJ+o="}]},"directories":{}},"0.6.1":{"name":"object-keys","version":"0.6.1","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"jscs test/*.js *.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"~2.0.4","is":"~2.0.0","tape":"~2.14.0","indexof":"~0.0.1","covert":"~1.0.0","jscs":"~1.5.8"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"cfa534edc801eef5a3fd01512b30b025d177a79a","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys","_id":"object-keys@0.6.1","_shasum":"ed8d052b3662b093c9ee00152c259815c0db4d3c","_from":".","_npmVersion":"1.4.23","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"dist":{"shasum":"ed8d052b3662b093c9ee00152c259815c0db4d3c","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-0.6.1.tgz","integrity":"sha512-yFH+vVBczUKglNkPAb96wIWXv1AqdR4PCdoL8fYt6+uqm/Ucn4G7NVOgI54GG6Pai8yswIqzZIz0kLq4/3egQQ==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEYCIQDhFPCvJ7VZ/jc+5VfjHDrTEIbXXMDA+p7qQPbB7D7QhgIhAIpfJj4sqHZfgrQO3bYBRwxqahD1d23Zea/rSaIZhqJ+"}]},"directories":{}},"1.0.0":{"name":"object-keys","version":"1.0.0","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"jscs test/*.js *.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"~2.0.4","is":"~2.0.0","tape":"~2.14.0","indexof":"~0.0.1","covert":"~1.0.0","jscs":"~1.5.8"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"f78356a5eda9b059acdc841607edbd3940aed477","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys","_id":"object-keys@1.0.0","_shasum":"1b66cc8cafc27391944098216726f746b15c2a30","_from":".","_npmVersion":"1.4.23","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"dist":{"shasum":"1b66cc8cafc27391944098216726f746b15c2a30","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.0.0.tgz","integrity":"sha512-7zE2Pyy6jZ30PT8LSB/J+WfBvd8gw6PClm9Ilhq/S42rZ32NiDgBD0GtBDcmeObLtRIAC087WNyCW4QLAF/F1A==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQD4P78vN3qENInRoJidkqsanNRVgDGq1o1IDbclEaeAugIgGF9eS40md3HbBTo2TP+LbsCZhL+mjAqG91O7hwXfVPY="}]},"directories":{}},"1.0.1":{"name":"object-keys","version":"1.0.1","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"jscs test/*.js *.js"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"~2.0.4","is":"~2.0.1","tape":"~2.14.0","indexof":"~0.0.1","covert":"~1.0.0","jscs":"~1.6.1"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"2ecbaaa0405c2f03e8b669ccf4b70376318a8f8b","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys","_id":"object-keys@1.0.1","_shasum":"55802e85842c26bbb5ebbc157abf3be302569ba8","_from":".","_npmVersion":"1.4.23","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"dist":{"shasum":"55802e85842c26bbb5ebbc157abf3be302569ba8","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.0.1.tgz","integrity":"sha512-DsJ69TA3wPICBmxYj6rij6uGKvKb9s2mtebzhuN/eI1GabJ3xC7fZ7PWjW0GS06hSclD0GxKGGAHQo5P7R2ZTg==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEYCIQD4gYBznqPY/77jmrmzAiN5nRqHR25mrZuveDAAkyBi/wIhANnwIwT2H5eNkTIWUt3c+j4p5ovDyUM83vj0pvCHuFBL"}]},"directories":{}},"1.0.2":{"name":"object-keys","version":"1.0.2","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js && npm run security","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run jscs && npm run eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","eccheck":"editorconfig-tools check *.js **/*.js > /dev/null","security":"nsp package"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"~2.0.5","is":"~2.2.0","tape":"~3.0.3","indexof":"~0.0.1","covert":"1.0.0","jscs":"~1.9.0","editorconfig-tools":"~0.0.1","nsp":"~0.5.2","eslint":"~0.10.2"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"06f2d46a85a0be12fc9e0377e3ce7bef32be5eb3","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys","_id":"object-keys@1.0.2","_shasum":"810205bc58367a1d9dcf9e8b7b8c099ef2503c6c","_from":".","_npmVersion":"1.4.28","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"dist":{"shasum":"810205bc58367a1d9dcf9e8b7b8c099ef2503c6c","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.0.2.tgz","integrity":"sha512-QaJ3L+WfJ2mCirdIvDbXRW8q76+WnsITenRbpAAJ2Z/fPcKaXvRAn94rv1YzwUGqxj/m08vu3HBvR6WdxXXRsw==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIAh4SRfmAsWSFsJGW/zwtEkL5i6WpjFvxOkmwnfDW/LTAiBr8+G5luLEkszDkl+ANwlTeCyO/PceL8aRv/UO/XqUtw=="}]},"directories":{}},"1.0.3":{"name":"object-keys","version":"1.0.3","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js && npm run security","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run jscs && npm run eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","eccheck":"editorconfig-tools check *.js **/*.js > /dev/null","security":"nsp package"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"~2.0.5","is":"~2.2.0","tape":"~3.0.3","indexof":"~0.0.1","covert":"1.0.0","jscs":"~1.9.0","editorconfig-tools":"~0.0.1","nsp":"~0.5.2","eslint":"~0.11.0"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"f0fc8ccdf81843fa7aa88c85777cf717c3ead129","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys","_id":"object-keys@1.0.3","_shasum":"1b679dbec65103da488edb32f782bd9a15e3de0a","_from":".","_npmVersion":"1.4.28","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"dist":{"shasum":"1b679dbec65103da488edb32f782bd9a15e3de0a","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.0.3.tgz","integrity":"sha512-C9AHglIN4DeikXJitZAmcls7Ics4QJr0QnVXFtK4wVly8zo0udlW96Hfw0kLQ0LqiE21Z2HgBMIS7C6/s4L2Tg==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIGYBo/Zp8ilkQTBiGuDQvrpFHmLCZGxdimx6CQPuVK4PAiEAofwl6l/SVKlk89+QpAy6VRVczBPULX48M5hGH78V7Vc="}]},"directories":{}},"1.0.4":{"name":"object-keys","version":"1.0.4","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js && npm run security","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run jscs && npm run eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","eccheck":"editorconfig-tools check *.js **/*.js > /dev/null","security":"nsp package"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"^2.0.5","is":"^3.0.1","tape":"^4.0.0","indexof":"^0.0.1","covert":"^1.1.0","jscs":"^1.13.1","editorconfig-tools":"^0.1.1","nsp":"^1.0.1","eslint":"^0.21.0"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"fc869b3088d6047bcbf42e534304ffe034b06cb0","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys#readme","_id":"object-keys@1.0.4","_shasum":"b115f96a7ee837df1517fbc5bd91ea965e37685c","_from":".","_npmVersion":"2.9.0","_nodeVersion":"2.0.2","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"shasum":"b115f96a7ee837df1517fbc5bd91ea965e37685c","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.0.4.tgz","integrity":"sha512-+MtQIw3zdFntcjAKeWGPRbCj0SZeCSN1Yhp1jAI1GmPgF6wCHTJkhJgfPE3kHgryFpX2MgFWQLcKsqHlSlPD9A==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEYCIQDKrpjpmWkxUnjvqfhVyWqDoQh7rExWokqaM7GWI3do6wIhAONj4OP7k0W21ye/Mzi92MX8ageuQydTsWFK4cB75Zik"}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{}},"1.0.5":{"name":"object-keys","version":"1.0.5","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js && npm run security","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run jscs && npm run eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","eccheck":"editorconfig-tools check *.js **/*.js > /dev/null","security":"nsp package"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"^2.0.5","is":"^3.0.1","tape":"^4.0.0","indexof":"^0.0.1","covert":"^1.1.0","jscs":"^1.13.1","editorconfig-tools":"^0.1.1","nsp":"^1.0.3","eslint":"^0.24.0"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"a6fb624febfdbde087b5637bedd5233054520b18","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys#readme","_id":"object-keys@1.0.5","_shasum":"84fe12516867496e97796a49db0a89399053fe06","_from":".","_npmVersion":"2.11.3","_nodeVersion":"2.3.2","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"shasum":"84fe12516867496e97796a49db0a89399053fe06","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.0.5.tgz","integrity":"sha512-ads8edXgDSXcILPLzQa0i8HaXMSPoCj1SYW8C+W+fL8cTIcpxp8M3/wFu4ODfegdiKP9LEatqLbcd7noEtoL2g==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIAdMlFebBqVB9aq9/VQPFrEI72Ai91euXsNekOZS67lKAiB6/Y3cE8bKzOVtc/erkWzyFt9rjyc4HBMVfRy2SN9V2A=="}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{}},"1.0.6":{"name":"object-keys","version":"1.0.6","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js && npm run security","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run jscs && npm run eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","eccheck":"editorconfig-tools check *.js **/*.js > /dev/null","security":"nsp package"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"^2.0.5","is":"^3.0.1","tape":"^4.0.0","indexof":"^0.0.1","covert":"^1.1.0","jscs":"^1.13.1","editorconfig-tools":"^0.1.1","nsp":"^1.0.3","eslint":"^0.24.0"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"3b0fbe74b40b5d78661461339f09a82f45a0a345","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys#readme","_id":"object-keys@1.0.6","_shasum":"f910c99bb3f57d8ba29b6580e1508eb0ebbfc177","_from":".","_npmVersion":"2.11.3","_nodeVersion":"2.3.3","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"shasum":"f910c99bb3f57d8ba29b6580e1508eb0ebbfc177","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.0.6.tgz","integrity":"sha512-JFO9tB3N/R17IA/IVKb3K0amIIpaR5T7CSg9z47uRXOFv9Kw1LOm1t3NB6FjosNIuKqNwpExODZqNnJb8zIZgQ==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIHoYa/sXYRU7F0BxiOvK1r/U1E2lj0iXpqZwIHQMjc29AiA66y8fVi2dNIyHyihDxm0gL/8pAm04MHpq25c6K5c33g=="}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{}},"1.0.7":{"name":"object-keys","version":"1.0.7","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js && npm run security","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run jscs && npm run eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","eccheck":"editorconfig-tools check *.js **/*.js > /dev/null","security":"nsp package"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"^2.0.5","is":"^3.0.1","tape":"^4.0.0","indexof":"^0.0.1","covert":"^1.1.0","jscs":"^1.13.1","editorconfig-tools":"^0.1.1","nsp":"^1.0.3","eslint":"^1.0.0-rc-1"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"c0c183e0aaed86487218f46127fcebec9258e84e","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys#readme","_id":"object-keys@1.0.7","_shasum":"e7d117261139d6acac8f0afabf261d700ebb0b93","_from":".","_npmVersion":"2.13.0","_nodeVersion":"2.4.0","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"shasum":"e7d117261139d6acac8f0afabf261d700ebb0b93","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.0.7.tgz","integrity":"sha512-SLdJAA8lTumufd2VJDOEXwfb81eE/ujQccVmFsofTnoPv1RvHqSlrMjDkq06lTaqnJxCDaY3d8rUwUJIeFk5sA==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQCkZcFwsc8+AngNfDPYrHnGBGkQHFSyW+hmnv8O33Ng3AIgSwHqYxg+rHqOlyoYbs/OqwzctpcSXfGjgbwpsa34dDw="}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{}},"1.0.8":{"name":"object-keys","version":"1.0.8","author":{"name":"Jordan Harband"},"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js && npm run security","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run jscs && npm run eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","eccheck":"editorconfig-tools check *.js **/*.js > /dev/null","security":"nsp package"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"^2.0.5","is":"^3.1.0","tape":"^4.2.1","indexof":"^0.0.1","covert":"^1.1.0","jscs":"^2.3.1","editorconfig-tools":"^0.1.1","nsp":"^1.1.0","eslint":"^1.6.0","@ljharb/eslint-config":"^1.3.0"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"f094a4832583959d0a0a132ea80efa2f44a5d58e","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys#readme","_id":"object-keys@1.0.8","_shasum":"9a71ce236e200a943d7fbddba25332fba057c205","_from":".","_npmVersion":"2.14.7","_nodeVersion":"4.2.1","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"shasum":"9a71ce236e200a943d7fbddba25332fba057c205","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.0.8.tgz","integrity":"sha512-yMyMdHyEjnPMnRpKnwOQLtTcS/2DQCItvwFh/A0RFvorh1aWqsIO46ZzfkaT0CmPXcKjCtrq7DhZo+unsR99hA==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIFHd2Fd8fYxiukgf0PCCQ4pAuKxhEwsecMScXYwTs/ntAiEA4b6t2m3zTXFek1FJZm3TEuhYZFwPhYCknyORUWHNXXo="}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{}},"1.0.9":{"name":"object-keys","version":"1.0.9","author":{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},"contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},{"name":"Raynos","email":"raynos2@gmail.com"},{"name":"Nathan Rajlich","email":"nathan@tootallnate.net"},{"name":"Ivan Starkov","email":"istarkov@gmail.com"},{"name":"Gary Katsevman","email":"git@gkatsev.com"}],"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"test":"npm run lint && node test/index.js && npm run security","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run jscs && npm run eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","eccheck":"editorconfig-tools check *.js **/*.js > /dev/null","security":"nsp package"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"^2.0.5","is":"^3.1.0","tape":"^4.2.1","indexof":"^0.0.1","covert":"^1.1.0","jscs":"^2.3.4","editorconfig-tools":"^0.1.1","nsp":"^1.1.0","eslint":"^1.7.2","@ljharb/eslint-config":"^1.4.1"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"e4331f920ff49824ad999b3449005349e31139f9","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys#readme","_id":"object-keys@1.0.9","_shasum":"cabb1202d9a7af29b50edface8094bb46da5ea21","_from":".","_npmVersion":"2.14.7","_nodeVersion":"4.2.1","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"shasum":"cabb1202d9a7af29b50edface8094bb46da5ea21","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.0.9.tgz","integrity":"sha512-xRGFTKkyFuP9AilRkEw4KfMPqaD9spcc6PVVPiOxAau61l+m/4zHUW6crXGtSt8lBfXD2vgnqNFFY8cr8NOBTQ==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIBJQxUXhh6hPZURj6mH0fOxfW7ePLUq0TXl/tfNBFT3aAiAsoqWESRjpTfRmFkKnYvuJgqI1ovXv42EHD2LzxAs71A=="}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{}},"1.0.10":{"name":"object-keys","version":"1.0.10","author":{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},"contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},{"name":"Raynos","email":"raynos2@gmail.com"},{"name":"Nathan Rajlich","email":"nathan@tootallnate.net"},{"name":"Ivan Starkov","email":"istarkov@gmail.com"},{"name":"Gary Katsevman","email":"git@gkatsev.com"}],"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"pretest":"npm run --silent lint","test":"npm run --silent tests-only","posttest":"npm run --silent security","tests-only":"node test/index.js","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run --silent jscs && npm run --silent eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","security":"nsp check"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"^2.0.5","is":"^3.1.0","tape":"^4.6.0","indexof":"^0.0.1","covert":"^1.1.0","jscs":"^3.0.6","nsp":"^2.5.0","eslint":"^3.0.0","@ljharb/eslint-config":"^6.0.0"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"a12ae2c01a443afb43414ab844175d2b6d5cd50a","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys#readme","_id":"object-keys@1.0.10","_shasum":"57e67f7041b66d145c45136fa8040a32717f7465","_from":".","_npmVersion":"3.9.5","_nodeVersion":"6.2.2","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"shasum":"57e67f7041b66d145c45136fa8040a32717f7465","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.0.10.tgz","integrity":"sha512-fKnqZ/+BvdAsCto14RQRo1q0W9ObXswVgq2Vc/y/OQXfGVom9jEJ193KpHjgkO7QJNCxy8hBWTDBYUsSBExYFA==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQCUJa2v4dH/fEuWBmVFTYGyt6k+uRH9k63SnIhS07UPggIgarq6DuufB4ttn5xtTjnxChN0qzqHsyhVN2mhkaF1IBk="}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmOperationalInternal":{"host":"packages-16-east.internal.npmjs.com","tmp":"tmp/object-keys-1.0.10.tgz_1467655315616_0.8326317083556205"},"directories":{}},"1.0.11":{"name":"object-keys","version":"1.0.11","author":{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},"contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},{"name":"Raynos","email":"raynos2@gmail.com"},{"name":"Nathan Rajlich","email":"nathan@tootallnate.net"},{"name":"Ivan Starkov","email":"istarkov@gmail.com"},{"name":"Gary Katsevman","email":"git@gkatsev.com"}],"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"pretest":"npm run --silent lint","test":"npm run --silent tests-only","posttest":"npm run --silent security","tests-only":"node test/index.js","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run --silent jscs && npm run --silent eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","security":"nsp check"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"foreach":"^2.0.5","is":"^3.1.0","tape":"^4.6.0","indexof":"^0.0.1","covert":"^1.1.0","jscs":"^3.0.6","nsp":"^2.5.0","eslint":"^3.0.0","@ljharb/eslint-config":"^6.0.0"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"3f869cc4b9f0f0489b2af7e80964f90d6c4403a4","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys#readme","_id":"object-keys@1.0.11","_shasum":"c54601778ad560f1142ce0e01bcca8b56d13426d","_from":".","_npmVersion":"3.9.5","_nodeVersion":"6.2.2","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"shasum":"c54601778ad560f1142ce0e01bcca8b56d13426d","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.0.11.tgz","integrity":"sha512-I0jUsqFqmQFOIhQQFlW8QDuX3pVqUWkiiavYj8+TBiS7m+pM9hPCxSnYWqL1hHMBb7BbQ2HidT+6CZ8/BT/ilw==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEQCIGVatYL5nqFjnyTPO0/FYHebFDZUNL6H4evuOwJXOd20AiAVQtHX+GpfjVa90v7F8y+Z0Nkf/bKGSVeNf/Sqys+gRg=="}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmOperationalInternal":{"host":"packages-16-east.internal.npmjs.com","tmp":"tmp/object-keys-1.0.11.tgz_1467740975903_0.8028358130250126"},"directories":{}},"1.0.12":{"name":"object-keys","version":"1.0.12","author":{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},"contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},{"name":"Raynos","email":"raynos2@gmail.com"},{"name":"Nathan Rajlich","email":"nathan@tootallnate.net"},{"name":"Ivan Starkov","email":"istarkov@gmail.com"},{"name":"Gary Katsevman","email":"git@gkatsev.com"}],"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"pretest":"npm run --silent lint","test":"npm run --silent tests-only","posttest":"npm run --silent security","tests-only":"node test/index.js","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"npm run --silent jscs && npm run --silent eslint","jscs":"jscs test/*.js *.js","eslint":"eslint test/*.js *.js","security":"nsp check"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"@ljharb/eslint-config":"^12.2.1","covert":"^1.1.0","eslint":"^4.19.1","foreach":"^2.0.5","indexof":"^0.0.1","is":"^3.2.1","jscs":"^3.0.7","nsp":"^3.2.1","tape":"^4.9.1"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"e3acd057c5b7be1029b3b9f6f69133292d77d558","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys#readme","_id":"object-keys@1.0.12","_npmVersion":"6.1.0","_nodeVersion":"10.4.1","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"integrity":"sha512-FTMyFUm2wBcGHnH2eXmz7tC6IwlqQZ6mVZ+6dm6vZ4IQIHjs6FdNsQBuKGPuUUUY6NfJw2PshC08Tn6LzLDOag==","shasum":"09c53855377575310cca62f55bb334abff7b3ed2","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.0.12.tgz","fileCount":11,"unpackedSize":28233,"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.4\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJbKChkCRA9TVsSAnZWagAAVoIP/1jqNQKOVOx4jOpaSivw\nSRxdpzt236t7t9D5YnDgXhT5hrfbfajBz59CVtikezyYS3+ccurC/M2fTHno\nAT8VGxLmemptEoH7woqX27cFdWDFqyMlyfYZkC29w59+cXq44+J3+VFtyd8s\nV09lwj934D/DxdSCKZ/BVuZoffigow37yg7kIC9+VVS0em2XG3W633V8LQAF\nrRiUVSk0ne/BlO1TWV5fTmPQwranmUMnodZAqarVn2/vl0wN8rCTM9qGHdGH\nYWJNQC0ed73ZWOJN+C+OeQqtRdmjS/s5MbLrnMC7JdSQqEDFr6cuLf6TXYa8\nQmy4MCwN7IN1+XeUbDLsOQ1NdjIg9TVlybL5HjKiBjL5FYcjiZQHvtLYTOLa\n/x4eteDcVF8WObCLsUfrB3XuwH2sJX1tACds7IalOS0WLR2bHeBGjejQFyKK\n6k8strtCWMxaWt/nRSTOpZZfMz/HMtHmqVJ3C/VZGYvoexpt6EXqZm4Yemtx\n7AS82sEnfnKF92m/EXZbdP5Gz0fnAksKtzOncsFCOk7qomkD0PLNZkhIadfX\nWTOz9FW+gQNA+im76POpLk8EwQBFYIfTQesLVYB243Z3jH6O5EuTTkzclkWU\nVNXHIoouGL3S+1gPactA2lr6PM4G0hTkco98HSEvHMZpdgoHtz4Jx8xMtX6N\nGJLk\r\n=/f2l\r\n-----END PGP SIGNATURE-----\r\n","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQDBTqK5jg1fMSZC7viYJn8AgFqfaNKFJrUlPYMQVnIReQIgUtgVmrbrFyXy3Qupn7eBPqwBkzKQXN6D+aQtkn5/tEk="}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/object-keys_1.0.12_1529358434802_0.4383878957043432"},"_hasShrinkwrap":false},"1.1.0":{"name":"object-keys","version":"1.1.0","author":{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},"contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},{"name":"Raynos","email":"raynos2@gmail.com"},{"name":"Nathan Rajlich","email":"nathan@tootallnate.net"},{"name":"Ivan Starkov","email":"istarkov@gmail.com"},{"name":"Gary Katsevman","email":"git@gkatsev.com"}],"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"pretest":"npm run --silent lint","test":"npm run --silent tests-only","posttest":"npm run --silent audit","tests-only":"node test/index.js","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"eslint .","preaudit":"npm install --package-lock --package-lock-only","audit":"npm audit","postaudit":"rm package-lock.json"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"@ljharb/eslint-config":"^13.1.1","covert":"^1.1.1","eslint":"^5.13.0","foreach":"^2.0.5","indexof":"^0.0.1","is":"^3.3.0","tape":"^4.9.2"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"abd4ff039708a0166a57388b348730cbda4a1593","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys#readme","_id":"object-keys@1.1.0","_npmVersion":"6.5.0","_nodeVersion":"11.8.0","_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"dist":{"integrity":"sha512-6OO5X1+2tYkNyNEx6TsCxEqFfRWaqx6EtMiSbGrw8Ob8v9Ne+Hl8rBAgLBZn5wjEz3s/s6U1WXFUFOcxxAwUpg==","shasum":"11bd22348dd2e096a045ab06f6c85bcc340fa032","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.1.0.tgz","fileCount":11,"unpackedSize":26395,"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.4\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJcYQZGCRA9TVsSAnZWagAAu9oP/3ed1S6D9BtHLFImT804\nudV4Mm/anfUE0jBXkJXLab4+qVIgkkqQOnEIg/Wl1ea/UHN/r21cRAaxVcdx\nqH6c5bcRpIr5gWNpcXnCgUVzOZHz2woa0jgZ4YQsAvt67m1lPAZBSppp98tx\nvw//RLPRQB0MNppFXRMLQEMABvIyP7bvNy6wK/SHp21hLuxzKdEmkwYwgKDN\nlhKdtpJQDrRjAmD6w2i8GW71S4K+Kis/ugzDGYX5eM4iwxJYQaKhtsgRJe34\nohUKHsVDe54wAYo4ZfJp+oJFLdJoY5DtYZM8VRkFlIya8X339oietwezlVoB\n0t3/8LuwJJZrG3tprTl6ek4d2AFL2Mf/xJhxSwKLY8B6UpFD+yNn4PuUt70u\ndea9T4zC9KE0swRdxLNRkKDTexCMlq3I3LZ28rH5MyPfcgdZMs1v9tGlaWhL\n7lXxQ5DIq/MSoMDKgOh8T0OBbvapnIfb6f1cqJgreZ0W0mpTm2Fu6joBfJKp\n7rp4erjunUXk4vNTsUxB9R/DMJgiefPRf7XPMG7evhO+MLlh3380Hf5DTf6E\nAqLqOZnAYLBzjigMxK8C3F0jxLVTFuwjA0Z3qejpWgwdl7RF7qyxekeSI1/g\n02ewe6HLdJilSbcev4i1zF8IKjGXFMoCWkVhpiz/q4DlNJbIv25C1482NbrF\nw+qE\r\n=/QeE\r\n-----END PGP SIGNATURE-----\r\n","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCICDx3IvajHE/2cYYP/a/E25efhfP7DH10u0fg5s8927jAiEAmE9cIJNUB1ec8+cg2AkIDXhtz8ipkE80rlnTC4g6Pps="}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"directories":{},"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/object-keys_1.1.0_1549862469286_0.18723271962423693"},"_hasShrinkwrap":false},"1.1.1":{"name":"object-keys","version":"1.1.1","author":{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},"contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},{"name":"Raynos","email":"raynos2@gmail.com"},{"name":"Nathan Rajlich","email":"nathan@tootallnate.net"},{"name":"Ivan Starkov","email":"istarkov@gmail.com"},{"name":"Gary Katsevman","email":"git@gkatsev.com"}],"description":"An Object.keys replacement, in case Object.keys is not available. From https://github.com/es-shims/es5-shim","license":"MIT","main":"index.js","scripts":{"pretest":"npm run --silent lint","test":"npm run --silent tests-only","posttest":"npm run --silent audit","tests-only":"node test/index.js","coverage":"covert test/*.js","coverage-quiet":"covert test/*.js --quiet","lint":"eslint .","preaudit":"npm install --package-lock --package-lock-only","audit":"npm audit","postaudit":"rm package-lock.json"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"keywords":["Object.keys","keys","ES5","shim"],"dependencies":{},"devDependencies":{"@ljharb/eslint-config":"^13.1.1","covert":"^1.1.1","eslint":"^5.13.0","foreach":"^2.0.5","indexof":"^0.0.1","is":"^3.3.0","tape":"^4.9.2"},"testling":{"files":"test/index.js","browsers":["iexplore/6.0..latest","firefox/3.0..6.0","firefox/15.0..latest","firefox/nightly","chrome/4.0..10.0","chrome/20.0..latest","chrome/canary","opera/10.0..latest","opera/next","safari/4.0..latest","ipad/6.0..latest","iphone/6.0..latest","android-browser/4.2"]},"engines":{"node":">= 0.4"},"gitHead":"ba2c1989270c7de969aa8498fc3b7c8e677806f3","bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"homepage":"https://github.com/ljharb/object-keys#readme","_id":"object-keys@1.1.1","_nodeVersion":"11.13.0","_npmVersion":"6.7.0","dist":{"integrity":"sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==","shasum":"1c47f272df277f3b1daf061677d9c82e2322c60e","tarball":"http://localhost:4545/npm/registry/object-keys/object-keys-1.1.1.tgz","fileCount":11,"unpackedSize":26544,"npm-signature":"-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.4\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJcqWC7CRA9TVsSAnZWagAApOoQAIGgpq1xnaDCEe3hqJFt\n1fjwFbEKHTyK59hA/zVmvvR4ikMeltZc5KAIKNt5XyDBO4NtuRcA7E1b3D7C\nFX/IMtPwIq5OvZLWhEnGBNTmwlVq8PI9DwZ6AE2hWM4JAmkT5tay7QtjDAur\nYRdTEEB3eqWETNiaybnF9d1GLKuH4dKcM/v9yiHMp+qa9Ivpe9VtWRj7WTr+\nkxc39JZdSVGFbVYNCFkZ8oyj5VbLOtyMB++6JxbR9fYlZ06ibmT+XrFsz7CF\nr7hQ/XFHlyodg0pi34+YhlyDAsPIvk8DOxDoKGs4aFZ6EqZm3hVnWaAlKqgX\n3ikZAT9Z/4d9icoRkEhVMj7INySL4bSd7lFDIlwGruc4j6U6b6phhwgIlhQE\nMsnWmnLL7/AAaPB8oiNhb8Lt/9/jRJsAHwRBRH9NN/DH2VyP0F2hzp66L5dF\niIVw9YUIBCOzfRg5Gr0qd0GCGbIefcq4AomxsJEdBbV+3AFkJvj7dibMrSb7\nJcBVC/TwJCjNv+Ols7VZE+Yj6ZYbNrsuh5KbPkdFchg6qNgds1Dh1tH8GwrJ\nULdSyACz/0stHNGr8p+Boa85mDseApgozr42UUHdEQyohO1/meNonjDGJl9w\ncLlcHMcR7hnLzp4v54jcv+q74EDZa15iEk/ckLdYFoUXXhRhBJo0XyG43zf8\nEO6O\r\n=PzOq\r\n-----END PGP SIGNATURE-----\r\n","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEYCIQDEXnivOzyezLnJgG1VUzWQj/PurnTkz1ZGYX4uYuK0JgIhAKKg/wGdPhn67UneiElcVANcbwapE+3GGhHJftwDOncL"}]},"maintainers":[{"name":"ljharb","email":"ljharb@gmail.com"}],"_npmUser":{"name":"ljharb","email":"ljharb@gmail.com"},"directories":{},"_npmOperationalInternal":{"host":"s3://npm-registry-packages","tmp":"tmp/object-keys_1.1.1_1554604218505_0.17078310534837748"},"_hasShrinkwrap":false}},"readme":"#object-keys [![Version Badge][npm-version-svg]][package-url]\n\n[![Build Status][travis-svg]][travis-url]\n[![dependency status][deps-svg]][deps-url]\n[![dev dependency status][dev-deps-svg]][dev-deps-url]\n[![License][license-image]][license-url]\n[![Downloads][downloads-image]][downloads-url]\n\n[![npm badge][npm-badge-png]][package-url]\n\n[![browser support][testling-svg]][testling-url]\n\nAn Object.keys shim. Invoke its \"shim\" method to shim Object.keys if it is unavailable.\n\nMost common usage:\n```js\nvar keys = Object.keys || require('object-keys');\n```\n\n## Example\n\n```js\nvar keys = require('object-keys');\nvar assert = require('assert');\nvar obj = {\n\ta: true,\n\tb: true,\n\tc: true\n};\n\nassert.deepEqual(keys(obj), ['a', 'b', 'c']);\n```\n\n```js\nvar keys = require('object-keys');\nvar assert = require('assert');\n/* when Object.keys is not present */\ndelete Object.keys;\nvar shimmedKeys = keys.shim();\nassert.equal(shimmedKeys, keys);\nassert.deepEqual(Object.keys(obj), keys(obj));\n```\n\n```js\nvar keys = require('object-keys');\nvar assert = require('assert');\n/* when Object.keys is present */\nvar shimmedKeys = keys.shim();\nassert.equal(shimmedKeys, Object.keys);\nassert.deepEqual(Object.keys(obj), keys(obj));\n```\n\n## Source\nImplementation taken directly from [es5-shim][es5-shim-url], with modifications, including from [lodash][lodash-url].\n\n## Tests\nSimply clone the repo, `npm install`, and run `npm test`\n\n[package-url]: https://npmjs.org/package/object-keys\n[npm-version-svg]: http://versionbadg.es/ljharb/object-keys.svg\n[travis-svg]: https://travis-ci.org/ljharb/object-keys.svg\n[travis-url]: https://travis-ci.org/ljharb/object-keys\n[deps-svg]: https://david-dm.org/ljharb/object-keys.svg\n[deps-url]: https://david-dm.org/ljharb/object-keys\n[dev-deps-svg]: https://david-dm.org/ljharb/object-keys/dev-status.svg\n[dev-deps-url]: https://david-dm.org/ljharb/object-keys#info=devDependencies\n[testling-svg]: https://ci.testling.com/ljharb/object-keys.png\n[testling-url]: https://ci.testling.com/ljharb/object-keys\n[es5-shim-url]: https://github.com/es-shims/es5-shim/blob/master/es5-shim.js#L542-589\n[lodash-url]: https://github.com/lodash/lodash\n[npm-badge-png]: https://nodei.co/npm/object-keys.png?downloads=true&stars=true\n[license-image]: http://img.shields.io/npm/l/object-keys.svg\n[license-url]: LICENSE\n[downloads-image]: http://img.shields.io/npm/dm/object-keys.svg\n[downloads-url]: http://npm-stat.com/charts.html?package=object-keys\n\n","maintainers":[{"email":"ljharb@gmail.com","name":"ljharb"}],"time":{"modified":"2022-06-22T16:42:21.741Z","created":"2013-03-29T20:44:12.281Z","0.0.1":"2013-03-29T20:44:12.881Z","0.0.2":"2013-03-30T16:13:52.880Z","0.1.0":"2013-03-30T20:58:48.065Z","0.1.1":"2013-04-02T06:16:54.290Z","0.1.2":"2013-04-03T16:43:21.243Z","0.1.3":"2013-04-08T01:18:51.713Z","0.1.4":"2013-04-09T00:47:37.900Z","0.1.5":"2013-04-14T12:27:20.913Z","0.1.6":"2013-04-17T07:18:02.522Z","0.1.7":"2013-04-18T02:23:24.367Z","0.1.8":"2013-05-10T17:32:12.476Z","0.2.0":"2013-05-10T18:52:03.655Z","0.3.0":"2013-05-18T22:06:13.036Z","0.4.0":"2013-08-14T08:10:10.483Z","0.5.0":"2014-01-30T09:28:17.465Z","0.5.1":"2014-03-10T06:43:32.469Z","0.6.0":"2014-08-01T07:22:33.482Z","0.6.1":"2014-08-26T05:51:23.007Z","1.0.0":"2014-08-26T19:21:11.757Z","1.0.1":"2014-09-03T07:19:08.654Z","1.0.2":"2014-12-28T09:03:12.859Z","1.0.3":"2015-01-06T22:27:00.343Z","1.0.4":"2015-05-23T20:19:48.735Z","1.0.5":"2015-07-03T23:43:33.872Z","1.0.6":"2015-07-09T15:41:54.153Z","1.0.7":"2015-07-18T19:23:11.235Z","1.0.8":"2015-10-14T22:21:16.304Z","1.0.9":"2015-10-19T22:07:23.370Z","1.0.10":"2016-07-04T18:01:59.134Z","1.0.11":"2016-07-05T17:49:39.399Z","1.0.12":"2018-06-18T21:47:14.916Z","1.1.0":"2019-02-11T05:21:09.393Z","1.1.1":"2019-04-07T02:30:18.674Z"},"author":{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},"repository":{"type":"git","url":"git://github.com/ljharb/object-keys.git"},"users":{"claudiopro":true,"brostoch":true,"rocket0191":true},"readmeFilename":"README.md","homepage":"https://github.com/ljharb/object-keys#readme","keywords":["Object.keys","keys","ES5","shim"],"bugs":{"url":"https://github.com/ljharb/object-keys/issues"},"license":"MIT","contributors":[{"name":"Jordan Harband","email":"ljharb@gmail.com","url":"http://ljharb.codes"},{"name":"Raynos","email":"raynos2@gmail.com"},{"name":"Nathan Rajlich","email":"nathan@tootallnate.net"},{"name":"Ivan Starkov","email":"istarkov@gmail.com"},{"name":"Gary Katsevman","email":"git@gkatsev.com"}]} \ No newline at end of file diff --git a/cli/util/fs.rs b/cli/util/fs.rs index 7cfd0ced79..9d3c6fccbd 100644 --- a/cli/util/fs.rs +++ b/cli/util/fs.rs @@ -81,11 +81,7 @@ pub fn write_file_2>( /// Similar to `std::fs::canonicalize()` but strips UNC prefixes on Windows. pub fn canonicalize_path(path: &Path) -> Result { - let path = path.canonicalize()?; - #[cfg(windows)] - return Ok(strip_unc_prefix(path)); - #[cfg(not(windows))] - return Ok(path); + Ok(deno_core::strip_unc_prefix(path.canonicalize()?)) } /// Canonicalizes a path which might be non-existent by going up the @@ -117,47 +113,6 @@ pub fn canonicalize_path_maybe_not_exists( } } -#[cfg(windows)] -fn strip_unc_prefix(path: PathBuf) -> PathBuf { - use std::path::Component; - use std::path::Prefix; - - let mut components = path.components(); - match components.next() { - Some(Component::Prefix(prefix)) => { - match prefix.kind() { - // \\?\device - Prefix::Verbatim(device) => { - let mut path = PathBuf::new(); - path.push(format!(r"\\{}\", device.to_string_lossy())); - path.extend(components.filter(|c| !matches!(c, Component::RootDir))); - path - } - // \\?\c:\path - Prefix::VerbatimDisk(_) => { - let mut path = PathBuf::new(); - path.push(prefix.as_os_str().to_string_lossy().replace(r"\\?\", "")); - path.extend(components); - path - } - // \\?\UNC\hostname\share_name\path - Prefix::VerbatimUNC(hostname, share_name) => { - let mut path = PathBuf::new(); - path.push(format!( - r"\\{}\{}\", - hostname.to_string_lossy(), - share_name.to_string_lossy() - )); - path.extend(components.filter(|c| !matches!(c, Component::RootDir))); - path - } - _ => path, - } - } - _ => path, - } -} - pub fn resolve_from_cwd(path: &Path) -> Result { let resolved_path = if path.is_absolute() { path.to_owned() @@ -921,41 +876,6 @@ mod tests { assert_eq!(result, expected); } - #[cfg(windows)] - #[test] - fn test_strip_unc_prefix() { - run_test(r"C:\", r"C:\"); - run_test(r"C:\test\file.txt", r"C:\test\file.txt"); - - run_test(r"\\?\C:\", r"C:\"); - run_test(r"\\?\C:\test\file.txt", r"C:\test\file.txt"); - - run_test(r"\\.\C:\", r"\\.\C:\"); - run_test(r"\\.\C:\Test\file.txt", r"\\.\C:\Test\file.txt"); - - run_test(r"\\?\UNC\localhost\", r"\\localhost"); - run_test(r"\\?\UNC\localhost\c$\", r"\\localhost\c$"); - run_test( - r"\\?\UNC\localhost\c$\Windows\file.txt", - r"\\localhost\c$\Windows\file.txt", - ); - run_test(r"\\?\UNC\wsl$\deno.json", r"\\wsl$\deno.json"); - - run_test(r"\\?\server1", r"\\server1"); - run_test(r"\\?\server1\e$\", r"\\server1\e$\"); - run_test( - r"\\?\server1\e$\test\file.txt", - r"\\server1\e$\test\file.txt", - ); - - fn run_test(input: &str, expected: &str) { - assert_eq!( - strip_unc_prefix(PathBuf::from(input)), - PathBuf::from(expected) - ); - } - } - #[tokio::test] async fn lax_fs_lock() { let temp_dir = TempDir::new(); diff --git a/core/lib.rs b/core/lib.rs index cb16c26548..1b2841a661 100644 --- a/core/lib.rs +++ b/core/lib.rs @@ -17,6 +17,7 @@ mod ops; mod ops_builtin; mod ops_builtin_v8; mod ops_metrics; +mod path; mod realm; mod resources; mod runtime; @@ -101,6 +102,7 @@ pub use crate::ops_builtin::op_resources; pub use crate::ops_builtin::op_void_async; pub use crate::ops_builtin::op_void_sync; pub use crate::ops_metrics::OpsTracker; +pub use crate::path::strip_unc_prefix; pub use crate::realm::JsRealm; pub use crate::resources::AsyncResult; pub use crate::resources::Resource; diff --git a/core/path.rs b/core/path.rs new file mode 100644 index 0000000000..fd8b1a9b64 --- /dev/null +++ b/core/path.rs @@ -0,0 +1,91 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +use std::path::PathBuf; + +#[cfg(not(windows))] +#[inline] +pub fn strip_unc_prefix(path: PathBuf) -> PathBuf { + path +} + +/// Strips the unc prefix (ex. \\?\) from Windows paths. +#[cfg(windows)] +pub fn strip_unc_prefix(path: PathBuf) -> PathBuf { + use std::path::Component; + use std::path::Prefix; + + let mut components = path.components(); + match components.next() { + Some(Component::Prefix(prefix)) => { + match prefix.kind() { + // \\?\device + Prefix::Verbatim(device) => { + let mut path = PathBuf::new(); + path.push(format!(r"\\{}\", device.to_string_lossy())); + path.extend(components.filter(|c| !matches!(c, Component::RootDir))); + path + } + // \\?\c:\path + Prefix::VerbatimDisk(_) => { + let mut path = PathBuf::new(); + path.push(prefix.as_os_str().to_string_lossy().replace(r"\\?\", "")); + path.extend(components); + path + } + // \\?\UNC\hostname\share_name\path + Prefix::VerbatimUNC(hostname, share_name) => { + let mut path = PathBuf::new(); + path.push(format!( + r"\\{}\{}\", + hostname.to_string_lossy(), + share_name.to_string_lossy() + )); + path.extend(components.filter(|c| !matches!(c, Component::RootDir))); + path + } + _ => path, + } + } + _ => path, + } +} + +#[cfg(test)] +mod test { + #[cfg(windows)] + #[test] + fn test_strip_unc_prefix() { + use std::path::PathBuf; + + run_test(r"C:\", r"C:\"); + run_test(r"C:\test\file.txt", r"C:\test\file.txt"); + + run_test(r"\\?\C:\", r"C:\"); + run_test(r"\\?\C:\test\file.txt", r"C:\test\file.txt"); + + run_test(r"\\.\C:\", r"\\.\C:\"); + run_test(r"\\.\C:\Test\file.txt", r"\\.\C:\Test\file.txt"); + + run_test(r"\\?\UNC\localhost\", r"\\localhost"); + run_test(r"\\?\UNC\localhost\c$\", r"\\localhost\c$"); + run_test( + r"\\?\UNC\localhost\c$\Windows\file.txt", + r"\\localhost\c$\Windows\file.txt", + ); + run_test(r"\\?\UNC\wsl$\deno.json", r"\\wsl$\deno.json"); + + run_test(r"\\?\server1", r"\\server1"); + run_test(r"\\?\server1\e$\", r"\\server1\e$\"); + run_test( + r"\\?\server1\e$\test\file.txt", + r"\\server1\e$\test\file.txt", + ); + + fn run_test(input: &str, expected: &str) { + assert_eq!( + super::strip_unc_prefix(PathBuf::from(input)), + PathBuf::from(expected) + ); + } + } +} diff --git a/ext/fs/std_fs.rs b/ext/fs/std_fs.rs index 49d113c011..4bdbf49432 100644 --- a/ext/fs/std_fs.rs +++ b/ext/fs/std_fs.rs @@ -647,15 +647,7 @@ fn metadata_to_fsstat(metadata: fs::Metadata) -> FsStat { } fn realpath(path: impl AsRef) -> FsResult { - let canonicalized_path = path.as_ref().canonicalize()?; - #[cfg(windows)] - let canonicalized_path = PathBuf::from( - canonicalized_path - .display() - .to_string() - .trim_start_matches("\\\\?\\"), - ); - Ok(canonicalized_path) + Ok(deno_core::strip_unc_prefix(path.as_ref().canonicalize()?)) } fn read_dir(path: impl AsRef) -> FsResult> { diff --git a/ext/node/ops/require.rs b/ext/node/ops/require.rs index 34eac84759..1c8647bab7 100644 --- a/ext/node/ops/require.rs +++ b/ext/node/ops/require.rs @@ -287,15 +287,7 @@ where let path = PathBuf::from(request); ensure_read_permission::(state, &path)?; let fs = state.borrow::>(); - let mut canonicalized_path = fs.canonicalize(&path)?; - if cfg!(windows) { - canonicalized_path = PathBuf::from( - canonicalized_path - .display() - .to_string() - .trim_start_matches("\\\\?\\"), - ); - } + let canonicalized_path = deno_core::strip_unc_prefix(fs.canonicalize(&path)?); Ok(canonicalized_path.to_string_lossy().to_string()) } diff --git a/ext/node/polyfills/01_require.js b/ext/node/polyfills/01_require.js index 8fbe5078cf..ce7312ee85 100644 --- a/ext/node/polyfills/01_require.js +++ b/ext/node/polyfills/01_require.js @@ -861,9 +861,11 @@ Module.prototype.load = function (filename) { throw Error("Module already loaded"); } - this.filename = filename; + // Canonicalize the path so it's not pointing to the symlinked directory + // in `node_modules` directory of the referrer. + this.filename = ops.op_require_real_path(filename); this.paths = Module._nodeModulePaths( - pathDirname(filename), + pathDirname(this.filename), ); const extension = findLongestRegisteredExtension(filename); // allow .mjs to be overriden diff --git a/runtime/fs_util.rs b/runtime/fs_util.rs index a29a57b397..eb4a2f8997 100644 --- a/runtime/fs_util.rs +++ b/runtime/fs_util.rs @@ -10,16 +10,7 @@ use std::path::PathBuf; /// Similar to `std::fs::canonicalize()` but strips UNC prefixes on Windows. pub fn canonicalize_path(path: &Path) -> Result { - let mut canonicalized_path = path.canonicalize()?; - if cfg!(windows) { - canonicalized_path = PathBuf::from( - canonicalized_path - .display() - .to_string() - .trim_start_matches("\\\\?\\"), - ); - } - Ok(canonicalized_path) + Ok(deno_core::strip_unc_prefix(path.canonicalize()?)) } #[inline] From cf893741c3206f55eaac1999f50f1018122f7b85 Mon Sep 17 00:00:00 2001 From: Yarden Shoham Date: Tue, 2 May 2023 09:05:10 +0300 Subject: [PATCH 104/320] fix(ext/node): add missing `release` property to node's `process` (#18923) --- ext/node/polyfills/process.ts | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/ext/node/polyfills/process.ts b/ext/node/polyfills/process.ts index 5e45fecfda..d2f220734f 100644 --- a/ext/node/polyfills/process.ts +++ b/ext/node/polyfills/process.ts @@ -331,6 +331,17 @@ class Process extends EventEmitter { super(); } + /** https://nodejs.org/api/process.html#processrelease */ + get release() { + return { + name: "node", + sourceUrl: + `https://nodejs.org/download/release/${version}/node-${version}.tar.gz`, + headersUrl: + `https://nodejs.org/download/release/${version}/node-${version}-headers.tar.gz`, + }; + } + /** https://nodejs.org/api/process.html#process_process_arch */ get arch() { if (!arch) { From 49eb887cc6325e14211ec4a241ffc4ac98f0f7a9 Mon Sep 17 00:00:00 2001 From: Kenta Moriuchi Date: Tue, 2 May 2023 19:15:45 +0900 Subject: [PATCH 105/320] refactor(core): Use `ObjectHasOwn` instead of `ObjectPrototypeHasOwnProperty` (#18952) ES2022 `Object.hasOwn` can be used in snapshot, so I migrate to use it. --- cli/js/40_testing.js | 4 ++-- core/internal.d.ts | 3 ++- ext/console/01_console.js | 12 ++++++------ ext/crypto/00_crypto.js | 6 +++--- ext/fetch/20_headers.js | 4 ++-- ext/ffi/00_ffi.js | 4 ++-- ext/node/polyfills/01_require.js | 4 ++-- ext/node/polyfills/internal/child_process.ts | 4 ++-- ext/node/polyfills/internal/primordials.mjs | 2 +- ext/webidl/00_webidl.js | 6 +++--- 10 files changed, 25 insertions(+), 24 deletions(-) diff --git a/cli/js/40_testing.js b/cli/js/40_testing.js index 8afcb74ee2..e269b9c9f2 100644 --- a/cli/js/40_testing.js +++ b/cli/js/40_testing.js @@ -21,7 +21,7 @@ const { MapPrototypeSet, MathCeil, ObjectKeys, - ObjectPrototypeHasOwnProperty, + ObjectHasOwn, ObjectPrototypeIsPrototypeOf, Promise, SafeArrayIterator, @@ -166,7 +166,7 @@ function assertOps(fn) { const details = []; for (const key in post.ops) { - if (!ObjectPrototypeHasOwnProperty(post.ops, key)) { + if (!ObjectHasOwn(post.ops, key)) { continue; } const preOp = pre.ops[key] ?? diff --git a/core/internal.d.ts b/core/internal.d.ts index c78310aeb6..b09d188d8f 100644 --- a/core/internal.d.ts +++ b/core/internal.d.ts @@ -637,7 +637,6 @@ declare namespace __bootstrap { export const Object: typeof globalThis.Object; export const ObjectLength: typeof Object.length; export const ObjectName: typeof Object.name; - export const ObjectPrototype: typeof Object.prototype; export const ObjectAssign: typeof Object.assign; export const ObjectGetOwnPropertyDescriptor: typeof Object.getOwnPropertyDescriptor; @@ -646,6 +645,7 @@ declare namespace __bootstrap { export const ObjectGetOwnPropertyNames: typeof Object.getOwnPropertyNames; export const ObjectGetOwnPropertySymbols: typeof Object.getOwnPropertySymbols; + export const ObjectHasOwn: typeof Object.hasOwn; export const ObjectIs: typeof Object.is; export const ObjectPreventExtensions: typeof Object.preventExtensions; export const ObjectSeal: typeof Object.seal; @@ -662,6 +662,7 @@ declare namespace __bootstrap { export const ObjectEntries: typeof Object.entries; export const ObjectFromEntries: typeof Object.fromEntries; export const ObjectValues: typeof Object.values; + export const ObjectPrototype: typeof Object.prototype; export const ObjectPrototype__defineGetter__: UncurryThis< typeof Object.prototype.__defineGetter__ >; diff --git a/ext/console/01_console.js b/ext/console/01_console.js index 3b2f449178..31431f120a 100644 --- a/ext/console/01_console.js +++ b/ext/console/01_console.js @@ -68,10 +68,10 @@ const { ObjectGetOwnPropertyNames, ObjectGetOwnPropertySymbols, ObjectGetPrototypeOf, + ObjectHasOwn, ObjectIs, ObjectKeys, ObjectPrototype, - ObjectPrototypeHasOwnProperty, ObjectPrototypeIsPrototypeOf, ObjectPrototypePropertyIsEnumerable, ObjectPrototypeToString, @@ -710,7 +710,7 @@ function formatValue( } function getClassBase(value, constructor, tag) { - const hasName = ObjectPrototypeHasOwnProperty(value, "name"); + const hasName = ObjectHasOwn(value, "name"); const name = (hasName && value.name) || "(anonymous)"; let base = `class ${name}`; if (constructor !== "Function" && constructor !== null) { @@ -1148,7 +1148,7 @@ function addPrototypeProperties( // Ignore the `constructor` property and keys that exist on layers above. if ( key === "constructor" || - ObjectPrototypeHasOwnProperty(main, key) || + ObjectHasOwn(main, key) || (depth !== 0 && SetPrototypeHas(keySet, key)) ) { continue; @@ -1315,7 +1315,7 @@ function formatArray(ctx, value, recurseTimes) { const output = []; for (let i = 0; i < len; i++) { // Special handle sparse arrays. - if (!ObjectPrototypeHasOwnProperty(value, i)) { + if (!ObjectHasOwn(value, i)) { return formatSpecialArray(ctx, value, recurseTimes, len, output, i); } ArrayPrototypePush( @@ -2291,7 +2291,7 @@ function hasOwnProperty(obj, v) { if (obj == null) { return false; } - return ObjectPrototypeHasOwnProperty(obj, v); + return ObjectHasOwn(obj, v); } // Copyright Joyent, Inc. and other Node contributors. MIT license. @@ -3603,7 +3603,7 @@ function wrapConsole(consoleFromDeno, consoleFromV8) { const keys = ObjectKeys(consoleFromV8); for (let i = 0; i < keys.length; ++i) { const key = keys[i]; - if (ObjectPrototypeHasOwnProperty(consoleFromDeno, key)) { + if (ObjectHasOwn(consoleFromDeno, key)) { consoleFromDeno[key] = FunctionPrototypeBind( callConsole, consoleFromDeno, diff --git a/ext/crypto/00_crypto.js b/ext/crypto/00_crypto.js index 5be2e0c1c2..1008f4cf6d 100644 --- a/ext/crypto/00_crypto.js +++ b/ext/crypto/00_crypto.js @@ -27,7 +27,7 @@ const { JSONStringify, MathCeil, ObjectAssign, - ObjectPrototypeHasOwnProperty, + ObjectHasOwn, ObjectPrototypeIsPrototypeOf, SafeArrayIterator, SafeWeakMap, @@ -211,7 +211,7 @@ function normalizeAlgorithm(algorithm, op) { // 5. let desiredType = undefined; for (const key in registeredAlgorithms) { - if (!ObjectPrototypeHasOwnProperty(registeredAlgorithms, key)) { + if (!ObjectHasOwn(registeredAlgorithms, key)) { continue; } if ( @@ -246,7 +246,7 @@ function normalizeAlgorithm(algorithm, op) { const dict = simpleAlgorithmDictionaries[desiredType]; // 10. for (const member in dict) { - if (!ObjectPrototypeHasOwnProperty(dict, member)) { + if (!ObjectHasOwn(dict, member)) { continue; } const idlType = dict[member]; diff --git a/ext/fetch/20_headers.js b/ext/fetch/20_headers.js index 7ec6751fae..89b9e1a2bc 100644 --- a/ext/fetch/20_headers.js +++ b/ext/fetch/20_headers.js @@ -28,8 +28,8 @@ const { ArrayPrototypeJoin, ArrayPrototypeSplice, ArrayPrototypeFilter, - ObjectPrototypeHasOwnProperty, ObjectEntries, + ObjectHasOwn, RegExpPrototypeTest, SafeArrayIterator, SafeRegExp, @@ -79,7 +79,7 @@ function fillHeaders(headers, object) { } } else { for (const key in object) { - if (!ObjectPrototypeHasOwnProperty(object, key)) { + if (!ObjectHasOwn(object, key)) { continue; } appendHeader(headers, key, object[key]); diff --git a/ext/ffi/00_ffi.js b/ext/ffi/00_ffi.js index f36690226e..2091a55b38 100644 --- a/ext/ffi/00_ffi.js +++ b/ext/ffi/00_ffi.js @@ -10,7 +10,7 @@ const { ArrayPrototypeJoin, DataViewPrototypeGetByteLength, ObjectDefineProperty, - ObjectPrototypeHasOwnProperty, + ObjectHasOwn, ObjectPrototypeIsPrototypeOf, Number, NumberIsSafeInteger, @@ -439,7 +439,7 @@ class DynamicLibrary { constructor(path, symbols) { ({ 0: this.#rid, 1: this.symbols } = ops.op_ffi_load({ path, symbols })); for (const symbol in symbols) { - if (!ObjectPrototypeHasOwnProperty(symbols, symbol)) { + if (!ObjectHasOwn(symbols, symbol)) { continue; } diff --git a/ext/node/polyfills/01_require.js b/ext/node/polyfills/01_require.js index ce7312ee85..c73701ba80 100644 --- a/ext/node/polyfills/01_require.js +++ b/ext/node/polyfills/01_require.js @@ -16,7 +16,7 @@ const { ArrayPrototypeSplice, ObjectGetOwnPropertyDescriptor, ObjectGetPrototypeOf, - ObjectPrototypeHasOwnProperty, + ObjectHasOwn, ObjectSetPrototypeOf, ObjectKeys, ObjectEntries, @@ -433,7 +433,7 @@ const CircularRequirePrototypeWarningProxy = new Proxy({}, { getOwnPropertyDescriptor(target, prop) { if ( - ObjectPrototypeHasOwnProperty(target, prop) || prop === "__esModule" + ObjectHasOwn(target, prop) || prop === "__esModule" ) { return ObjectGetOwnPropertyDescriptor(target, prop); } diff --git a/ext/node/polyfills/internal/child_process.ts b/ext/node/polyfills/internal/child_process.ts index 7c72cb0ca3..edc4caa5e5 100644 --- a/ext/node/polyfills/internal/child_process.ts +++ b/ext/node/polyfills/internal/child_process.ts @@ -34,7 +34,7 @@ import { ArrayPrototypeSlice, ArrayPrototypeSort, ArrayPrototypeUnshift, - ObjectPrototypeHasOwnProperty, + ObjectHasOwn, StringPrototypeToUpperCase, } from "ext:deno_node/internal/primordials.mjs"; import { kEmptyObject } from "ext:deno_node/internal/util.mjs"; @@ -429,7 +429,7 @@ function copyProcessEnvToEnv( if ( Deno.env.get(name) && (!optionEnv || - !ObjectPrototypeHasOwnProperty(optionEnv, name)) + !ObjectHasOwn(optionEnv, name)) ) { env[name] = Deno.env.get(name); } diff --git a/ext/node/polyfills/internal/primordials.mjs b/ext/node/polyfills/internal/primordials.mjs index 1639efdb50..8127eebace 100644 --- a/ext/node/polyfills/internal/primordials.mjs +++ b/ext/node/polyfills/internal/primordials.mjs @@ -12,7 +12,7 @@ export const ArrayPrototypeSort = (that, ...args) => that.sort(...args); export const ArrayPrototypeUnshift = (that, ...args) => that.unshift(...args); export const ObjectAssign = Object.assign; export const ObjectCreate = Object.create; -export const ObjectPrototypeHasOwnProperty = Object.hasOwn; +export const ObjectHasOwn = Object.hasOwn; export const RegExpPrototypeTest = (that, ...args) => that.test(...args); export const RegExpPrototypeExec = RegExp.prototype.exec; export const StringFromCharCode = String.fromCharCode; diff --git a/ext/webidl/00_webidl.js b/ext/webidl/00_webidl.js index 71b7982b75..247ebfe0d2 100644 --- a/ext/webidl/00_webidl.js +++ b/ext/webidl/00_webidl.js @@ -47,7 +47,7 @@ const { ObjectGetOwnPropertyDescriptor, ObjectGetOwnPropertyDescriptors, ObjectGetPrototypeOf, - ObjectPrototypeHasOwnProperty, + ObjectHasOwn, ObjectPrototypeIsPrototypeOf, ObjectIs, PromisePrototypeThen, @@ -920,7 +920,7 @@ function createRecordConverter(keyConverter, valueConverter) { // Fast path for common case (not a Proxy) if (!core.isProxy(V)) { for (const key in V) { - if (!ObjectPrototypeHasOwnProperty(V, key)) { + if (!ObjectHasOwn(V, key)) { continue; } const typedKey = keyConverter(key, prefix, context, opts); @@ -1133,7 +1133,7 @@ function mixinPairIterable(name, prototype, dataSymbol, keyKey, valueKey) { function configurePrototype(prototype) { const descriptors = ObjectGetOwnPropertyDescriptors(prototype.prototype); for (const key in descriptors) { - if (!ObjectPrototypeHasOwnProperty(descriptors, key)) { + if (!ObjectHasOwn(descriptors, key)) { continue; } if (key === "constructor") continue; From 97147faf891dce8d77bc829cbd0ebc1afebb2575 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Tue, 2 May 2023 13:30:11 +0200 Subject: [PATCH 106/320] chore: release extension crates, unpin tokio (#18954) --- Cargo.lock | 50 +++++++++++++++--------------- Cargo.toml | 52 ++++++++++++++++---------------- bench_util/Cargo.toml | 2 +- cli/napi/sym/Cargo.toml | 2 +- core/Cargo.toml | 2 +- ext/broadcast_channel/Cargo.toml | 2 +- ext/cache/Cargo.toml | 2 +- ext/console/Cargo.toml | 2 +- ext/crypto/Cargo.toml | 2 +- ext/fetch/Cargo.toml | 2 +- ext/ffi/Cargo.toml | 2 +- ext/fs/Cargo.toml | 2 +- ext/http/Cargo.toml | 2 +- ext/io/Cargo.toml | 2 +- ext/kv/Cargo.toml | 2 +- ext/napi/Cargo.toml | 2 +- ext/net/Cargo.toml | 2 +- ext/node/Cargo.toml | 2 +- ext/tls/Cargo.toml | 2 +- ext/url/Cargo.toml | 2 +- ext/web/Cargo.toml | 2 +- ext/webidl/Cargo.toml | 2 +- ext/websocket/Cargo.toml | 2 +- ext/webstorage/Cargo.toml | 2 +- ops/Cargo.toml | 2 +- runtime/Cargo.toml | 2 +- serde_v8/Cargo.toml | 2 +- 27 files changed, 76 insertions(+), 76 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 99a51ffc47..77b6c0cbb7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -818,7 +818,7 @@ dependencies = [ [[package]] name = "deno_bench_util" -version = "0.95.0" +version = "0.96.0" dependencies = [ "bencher", "deno_core", @@ -828,7 +828,7 @@ dependencies = [ [[package]] name = "deno_broadcast_channel" -version = "0.95.0" +version = "0.96.0" dependencies = [ "async-trait", "deno_core", @@ -838,7 +838,7 @@ dependencies = [ [[package]] name = "deno_cache" -version = "0.33.0" +version = "0.34.0" dependencies = [ "async-trait", "deno_core", @@ -850,14 +850,14 @@ dependencies = [ [[package]] name = "deno_console" -version = "0.101.0" +version = "0.102.0" dependencies = [ "deno_core", ] [[package]] name = "deno_core" -version = "0.183.0" +version = "0.184.0" dependencies = [ "anyhow", "bytes", @@ -882,7 +882,7 @@ dependencies = [ [[package]] name = "deno_crypto" -version = "0.115.0" +version = "0.116.0" dependencies = [ "aes", "aes-gcm", @@ -950,7 +950,7 @@ dependencies = [ [[package]] name = "deno_fetch" -version = "0.125.0" +version = "0.126.0" dependencies = [ "bytes", "data-url", @@ -967,7 +967,7 @@ dependencies = [ [[package]] name = "deno_ffi" -version = "0.88.0" +version = "0.89.0" dependencies = [ "deno_core", "dlopen", @@ -982,7 +982,7 @@ dependencies = [ [[package]] name = "deno_fs" -version = "0.11.0" +version = "0.12.0" dependencies = [ "async-trait", "deno_core", @@ -1022,7 +1022,7 @@ dependencies = [ [[package]] name = "deno_http" -version = "0.96.0" +version = "0.97.0" dependencies = [ "async-compression", "base64 0.13.1", @@ -1055,7 +1055,7 @@ dependencies = [ [[package]] name = "deno_io" -version = "0.11.0" +version = "0.12.0" dependencies = [ "deno_core", "nix", @@ -1066,7 +1066,7 @@ dependencies = [ [[package]] name = "deno_kv" -version = "0.9.0" +version = "0.10.0" dependencies = [ "anyhow", "async-trait", @@ -1120,7 +1120,7 @@ dependencies = [ [[package]] name = "deno_napi" -version = "0.31.0" +version = "0.32.0" dependencies = [ "deno_core", "libloading", @@ -1128,7 +1128,7 @@ dependencies = [ [[package]] name = "deno_net" -version = "0.93.0" +version = "0.94.0" dependencies = [ "deno_core", "deno_tls", @@ -1143,7 +1143,7 @@ dependencies = [ [[package]] name = "deno_node" -version = "0.38.0" +version = "0.39.0" dependencies = [ "aes", "cbc", @@ -1212,7 +1212,7 @@ dependencies = [ [[package]] name = "deno_ops" -version = "0.61.0" +version = "0.62.0" dependencies = [ "lazy-regex", "once_cell", @@ -1230,7 +1230,7 @@ dependencies = [ [[package]] name = "deno_runtime" -version = "0.109.0" +version = "0.110.0" dependencies = [ "atty", "console_static_text", @@ -1311,7 +1311,7 @@ dependencies = [ [[package]] name = "deno_tls" -version = "0.88.0" +version = "0.89.0" dependencies = [ "deno_core", "once_cell", @@ -1325,7 +1325,7 @@ dependencies = [ [[package]] name = "deno_url" -version = "0.101.0" +version = "0.102.0" dependencies = [ "deno_bench_util", "deno_core", @@ -1337,7 +1337,7 @@ dependencies = [ [[package]] name = "deno_web" -version = "0.132.0" +version = "0.133.0" dependencies = [ "async-trait", "base64-simd", @@ -1355,7 +1355,7 @@ dependencies = [ [[package]] name = "deno_webidl" -version = "0.101.0" +version = "0.102.0" dependencies = [ "deno_bench_util", "deno_core", @@ -1363,7 +1363,7 @@ dependencies = [ [[package]] name = "deno_websocket" -version = "0.106.0" +version = "0.107.0" dependencies = [ "bytes", "deno_core", @@ -1379,7 +1379,7 @@ dependencies = [ [[package]] name = "deno_webstorage" -version = "0.96.0" +version = "0.97.0" dependencies = [ "deno_core", "deno_web", @@ -2981,7 +2981,7 @@ dependencies = [ [[package]] name = "napi_sym" -version = "0.31.0" +version = "0.32.0" dependencies = [ "proc-macro2 1.0.56", "quote 1.0.26", @@ -4250,7 +4250,7 @@ dependencies = [ [[package]] name = "serde_v8" -version = "0.94.0" +version = "0.95.0" dependencies = [ "bencher", "bytes", diff --git a/Cargo.toml b/Cargo.toml index f989fceb04..be03237a22 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -44,12 +44,12 @@ repository = "https://github.com/denoland/deno" v8 = { version = "0.71.0", default-features = false } deno_ast = { version = "0.26.0", features = ["transpiling"] } -deno_core = { version = "0.183.0", path = "./core" } -deno_ops = { version = "0.61.0", path = "./ops" } -serde_v8 = { version = "0.94.0", path = "./serde_v8" } -deno_runtime = { version = "0.109.0", path = "./runtime" } -napi_sym = { version = "0.31.0", path = "./cli/napi/sym" } -deno_bench_util = { version = "0.95.0", path = "./bench_util" } +deno_core = { version = "0.184.0", path = "./core" } +deno_ops = { version = "0.62.0", path = "./ops" } +serde_v8 = { version = "0.95.0", path = "./serde_v8" } +deno_runtime = { version = "0.110.0", path = "./runtime" } +napi_sym = { version = "0.32.0", path = "./cli/napi/sym" } +deno_bench_util = { version = "0.96.0", path = "./bench_util" } test_util = { path = "./test_util" } deno_lockfile = "0.13.0" deno_media_type = { version = "0.1.0", features = ["module_specifier"] } @@ -57,25 +57,25 @@ deno_npm = "0.3.0" deno_semver = "0.2.1" # exts -deno_broadcast_channel = { version = "0.95.0", path = "./ext/broadcast_channel" } -deno_cache = { version = "0.33.0", path = "./ext/cache" } -deno_console = { version = "0.101.0", path = "./ext/console" } -deno_crypto = { version = "0.115.0", path = "./ext/crypto" } -deno_fetch = { version = "0.125.0", path = "./ext/fetch" } -deno_ffi = { version = "0.88.0", path = "./ext/ffi" } -deno_fs = { version = "0.11.0", path = "./ext/fs" } -deno_http = { version = "0.96.0", path = "./ext/http" } -deno_io = { version = "0.11.0", path = "./ext/io" } -deno_net = { version = "0.93.0", path = "./ext/net" } -deno_node = { version = "0.38.0", path = "./ext/node" } -deno_kv = { version = "0.9.0", path = "./ext/kv" } -deno_tls = { version = "0.88.0", path = "./ext/tls" } -deno_url = { version = "0.101.0", path = "./ext/url" } -deno_web = { version = "0.132.0", path = "./ext/web" } -deno_webidl = { version = "0.101.0", path = "./ext/webidl" } -deno_websocket = { version = "0.106.0", path = "./ext/websocket" } -deno_webstorage = { version = "0.96.0", path = "./ext/webstorage" } -deno_napi = { version = "0.31.0", path = "./ext/napi" } +deno_broadcast_channel = { version = "0.96.0", path = "./ext/broadcast_channel" } +deno_cache = { version = "0.34.0", path = "./ext/cache" } +deno_console = { version = "0.102.0", path = "./ext/console" } +deno_crypto = { version = "0.116.0", path = "./ext/crypto" } +deno_fetch = { version = "0.126.0", path = "./ext/fetch" } +deno_ffi = { version = "0.89.0", path = "./ext/ffi" } +deno_fs = { version = "0.12.0", path = "./ext/fs" } +deno_http = { version = "0.97.0", path = "./ext/http" } +deno_io = { version = "0.12.0", path = "./ext/io" } +deno_net = { version = "0.94.0", path = "./ext/net" } +deno_node = { version = "0.39.0", path = "./ext/node" } +deno_kv = { version = "0.10.0", path = "./ext/kv" } +deno_tls = { version = "0.89.0", path = "./ext/tls" } +deno_url = { version = "0.102.0", path = "./ext/url" } +deno_web = { version = "0.133.0", path = "./ext/web" } +deno_webidl = { version = "0.102.0", path = "./ext/webidl" } +deno_websocket = { version = "0.107.0", path = "./ext/websocket" } +deno_webstorage = { version = "0.97.0", path = "./ext/webstorage" } +deno_napi = { version = "0.32.0", path = "./ext/napi" } aes = "=0.8.2" anyhow = "1.0.57" @@ -132,7 +132,7 @@ socket2 = "0.4.7" tar = "=0.4.38" tempfile = "3.4.0" thiserror = "=1.0.38" -tokio = { version = "=1.25.0", features = ["full"] } +tokio = { version = "1.25.0", features = ["full"] } tokio-rustls = "0.23.3" tokio-util = "0.7.4" tower-lsp = { version = "=0.17.0", features = ["proposed"] } diff --git a/bench_util/Cargo.toml b/bench_util/Cargo.toml index bc9c02cffa..4595c3d8ad 100644 --- a/bench_util/Cargo.toml +++ b/bench_util/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_bench_util" -version = "0.95.0" +version = "0.96.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/cli/napi/sym/Cargo.toml b/cli/napi/sym/Cargo.toml index e722892dc3..2ecdbf2b45 100644 --- a/cli/napi/sym/Cargo.toml +++ b/cli/napi/sym/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "napi_sym" -version = "0.31.0" +version = "0.32.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/core/Cargo.toml b/core/Cargo.toml index 1bb6f520da..141583710b 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_core" -version = "0.183.0" +version = "0.184.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/broadcast_channel/Cargo.toml b/ext/broadcast_channel/Cargo.toml index 0bb15a19a0..dcff0ade30 100644 --- a/ext/broadcast_channel/Cargo.toml +++ b/ext/broadcast_channel/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_broadcast_channel" -version = "0.95.0" +version = "0.96.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/cache/Cargo.toml b/ext/cache/Cargo.toml index 257b792a18..ddbec38346 100644 --- a/ext/cache/Cargo.toml +++ b/ext/cache/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_cache" -version = "0.33.0" +version = "0.34.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/console/Cargo.toml b/ext/console/Cargo.toml index aff3557f17..c38b537f4b 100644 --- a/ext/console/Cargo.toml +++ b/ext/console/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_console" -version = "0.101.0" +version = "0.102.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/crypto/Cargo.toml b/ext/crypto/Cargo.toml index eccd114aaa..d6642733ba 100644 --- a/ext/crypto/Cargo.toml +++ b/ext/crypto/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_crypto" -version = "0.115.0" +version = "0.116.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/fetch/Cargo.toml b/ext/fetch/Cargo.toml index 063ec23b6a..b46159b7ed 100644 --- a/ext/fetch/Cargo.toml +++ b/ext/fetch/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fetch" -version = "0.125.0" +version = "0.126.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/ffi/Cargo.toml b/ext/ffi/Cargo.toml index 12100326b8..291935e1ca 100644 --- a/ext/ffi/Cargo.toml +++ b/ext/ffi/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_ffi" -version = "0.88.0" +version = "0.89.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/fs/Cargo.toml b/ext/fs/Cargo.toml index 655d479602..5675a483c3 100644 --- a/ext/fs/Cargo.toml +++ b/ext/fs/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fs" -version = "0.11.0" +version = "0.12.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/http/Cargo.toml b/ext/http/Cargo.toml index ea8a6eef78..73f9f6ef28 100644 --- a/ext/http/Cargo.toml +++ b/ext/http/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_http" -version = "0.96.0" +version = "0.97.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/io/Cargo.toml b/ext/io/Cargo.toml index 8562731814..e11dc833c4 100644 --- a/ext/io/Cargo.toml +++ b/ext/io/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_io" -version = "0.11.0" +version = "0.12.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/kv/Cargo.toml b/ext/kv/Cargo.toml index 3bead8be17..3a060f9c4f 100644 --- a/ext/kv/Cargo.toml +++ b/ext/kv/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_kv" -version = "0.9.0" +version = "0.10.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/napi/Cargo.toml b/ext/napi/Cargo.toml index b2e6a999f4..331876615e 100644 --- a/ext/napi/Cargo.toml +++ b/ext/napi/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_napi" -version = "0.31.0" +version = "0.32.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/net/Cargo.toml b/ext/net/Cargo.toml index 7948cd487f..5d185e2d2d 100644 --- a/ext/net/Cargo.toml +++ b/ext/net/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_net" -version = "0.93.0" +version = "0.94.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/node/Cargo.toml b/ext/node/Cargo.toml index 21716d7ca2..23d4ba1723 100644 --- a/ext/node/Cargo.toml +++ b/ext/node/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_node" -version = "0.38.0" +version = "0.39.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/tls/Cargo.toml b/ext/tls/Cargo.toml index 9ca0091500..13808a22a0 100644 --- a/ext/tls/Cargo.toml +++ b/ext/tls/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_tls" -version = "0.88.0" +version = "0.89.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/url/Cargo.toml b/ext/url/Cargo.toml index da4d37c003..43c6748e66 100644 --- a/ext/url/Cargo.toml +++ b/ext/url/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_url" -version = "0.101.0" +version = "0.102.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/web/Cargo.toml b/ext/web/Cargo.toml index 20cc2e5286..6f1185b5b3 100644 --- a/ext/web/Cargo.toml +++ b/ext/web/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_web" -version = "0.132.0" +version = "0.133.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/webidl/Cargo.toml b/ext/webidl/Cargo.toml index 2ae4948e74..754db9ee18 100644 --- a/ext/webidl/Cargo.toml +++ b/ext/webidl/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_webidl" -version = "0.101.0" +version = "0.102.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/websocket/Cargo.toml b/ext/websocket/Cargo.toml index 436c64c6bd..0cc549dad9 100644 --- a/ext/websocket/Cargo.toml +++ b/ext/websocket/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_websocket" -version = "0.106.0" +version = "0.107.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/webstorage/Cargo.toml b/ext/webstorage/Cargo.toml index 52d12a9ccd..10561b20ad 100644 --- a/ext/webstorage/Cargo.toml +++ b/ext/webstorage/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_webstorage" -version = "0.96.0" +version = "0.97.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ops/Cargo.toml b/ops/Cargo.toml index 05d4c2793d..2d091b2ee3 100644 --- a/ops/Cargo.toml +++ b/ops/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_ops" -version = "0.61.0" +version = "0.62.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index cc2494ce08..88813e4872 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_runtime" -version = "0.109.0" +version = "0.110.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/serde_v8/Cargo.toml b/serde_v8/Cargo.toml index ef94b1f284..ad384347dc 100644 --- a/serde_v8/Cargo.toml +++ b/serde_v8/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "serde_v8" -version = "0.94.0" +version = "0.95.0" authors.workspace = true edition.workspace = true license.workspace = true From 022aae9854bed6219d75eeb82fcf46652c21050d Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Tue, 2 May 2023 20:17:11 +0530 Subject: [PATCH 107/320] perf(core): use jemalloc for V8 array buffer allocator (#18875) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This commits changes "deno_core" to use jemalloc allocator as an allocator for V8 array buffers. This greatly improves our GC characteristics as we are using a lot of short lived array buffers. They no longer go through the expensive malloc/free cycle using the default Rust allocator, but instead use jemallocator's memory pool. As a result the flamegraphs for WS/HTTP server flamegraphs no longer show stacks for malloc/free around ops that use ZeroCopyBuf and &[u8]. --------- Co-authored-by: Bartek Iwańczuk --- Cargo.lock | 11 +++++++++ core/Cargo.toml | 3 +++ core/runtime.rs | 64 +++++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 78 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index 77b6c0cbb7..f4647350b0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -875,6 +875,7 @@ dependencies = [ "serde_v8", "smallvec", "sourcemap", + "tikv-jemalloc-sys", "tokio", "url", "v8", @@ -5130,6 +5131,16 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "tikv-jemalloc-sys" +version = "0.5.3+5.3.0-patched" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a678df20055b43e57ef8cddde41cdfda9a3c1a060b67f4c5836dfb1d78543ba8" +dependencies = [ + "cc", + "libc", +] + [[package]] name = "time" version = "0.3.20" diff --git a/core/Cargo.toml b/core/Cargo.toml index 141583710b..0e0b1d2c7b 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -39,6 +39,9 @@ sourcemap = "6.1" url.workspace = true v8.workspace = true +[target.'cfg(not(target_env = "msvc"))'.dependencies] +tikv-jemalloc-sys = "0.5" + [[example]] name = "http_bench_json_ops" path = "examples/http_bench_json_ops/main.rs" diff --git a/core/runtime.rs b/core/runtime.rs index 46256b8d8e..5470a89b9b 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -72,6 +72,48 @@ struct IsolateAllocations { Option<(Box>, v8::NearHeapLimitCallback)>, } +/// A custom allocator for array buffers for V8. It uses `jemalloc` so it's +/// not available on Windows. +#[cfg(not(target_env = "msvc"))] +mod custom_allocator { + use std::ffi::c_void; + + pub struct RustAllocator; + + pub unsafe extern "C" fn allocate( + _alloc: &RustAllocator, + n: usize, + ) -> *mut c_void { + tikv_jemalloc_sys::calloc(1, n) + } + + pub unsafe extern "C" fn allocate_uninitialized( + _alloc: &RustAllocator, + n: usize, + ) -> *mut c_void { + tikv_jemalloc_sys::malloc(n) + } + + pub unsafe extern "C" fn free( + _alloc: &RustAllocator, + data: *mut c_void, + _n: usize, + ) { + tikv_jemalloc_sys::free(data) + } + + pub unsafe extern "C" fn reallocate( + _alloc: &RustAllocator, + prev: *mut c_void, + _oldlen: usize, + newlen: usize, + ) -> *mut c_void { + tikv_jemalloc_sys::realloc(prev, newlen) + } + + pub unsafe extern "C" fn drop(_alloc: *const RustAllocator) {} +} + /// A single execution context of JavaScript. Corresponds roughly to the "Web /// Worker" concept in the DOM. A JsRuntime is a Future that can be used with /// an event loop (Tokio, async_std). @@ -393,6 +435,20 @@ impl JsRuntime { } (isolate, snapshot_options) } else { + #[cfg(not(target_env = "msvc"))] + let vtable: &'static v8::RustAllocatorVtable< + custom_allocator::RustAllocator, + > = &v8::RustAllocatorVtable { + allocate: custom_allocator::allocate, + allocate_uninitialized: custom_allocator::allocate_uninitialized, + free: custom_allocator::free, + reallocate: custom_allocator::reallocate, + drop: custom_allocator::drop, + }; + #[cfg(not(target_env = "msvc"))] + let allocator = Arc::new(custom_allocator::RustAllocator); + + #[allow(unused_mut)] let mut params = options .create_params .take() @@ -404,6 +460,14 @@ impl JsRuntime { }) .external_references(&**refs); + #[cfg(not(target_env = "msvc"))] + // SAFETY: We are leaking the created `allocator` variable so we're sure + // it will outlive the created isolate. We also made sure that the vtable + // is correct. + let mut params = params.array_buffer_allocator(unsafe { + v8::new_rust_allocator(Arc::into_raw(allocator), vtable) + }); + if let Some(snapshot) = options.startup_snapshot { params = match snapshot { Snapshot::Static(data) => params.snapshot_blob(data), From 341fc11e2443e7075d96ef8c73ff15e36d2d60a2 Mon Sep 17 00:00:00 2001 From: Yarden Shoham Date: Tue, 2 May 2023 18:51:50 +0300 Subject: [PATCH 108/320] docs(lib): document `Deno.Command` requires the `allow-run` permission (#18958) --- cli/tsc/dts/lib.deno.ns.d.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/cli/tsc/dts/lib.deno.ns.d.ts b/cli/tsc/dts/lib.deno.ns.d.ts index 90a12ad420..408580737e 100644 --- a/cli/tsc/dts/lib.deno.ns.d.ts +++ b/cli/tsc/dts/lib.deno.ns.d.ts @@ -4047,6 +4047,7 @@ declare namespace Deno { * console.assert("world\n" === new TextDecoder().decode(stderr)); * ``` * + * @tags allow-run * @category Sub Process */ export class Command { From adcda4fa640939682076e793f12a5b22e3de1f50 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Tue, 2 May 2023 17:55:10 -0400 Subject: [PATCH 109/320] refactor(ext/io): move tty metadata to separate collection (#18959) This removes the tty stuff that's hanging on the file resources and instead stores them in a separate `TtyModeStore`. Although this will cause the tty store items to not be removed when the resource is removed, I think this is ok to do because there will be a small number of resources this is every done with and usually those resources won't ever be closed. --- ext/io/lib.rs | 84 ++++++---------------------------- runtime/ops/tty.rs | 111 ++++++++++++++++++++++++++++----------------- 2 files changed, 83 insertions(+), 112 deletions(-) diff --git a/ext/io/lib.rs b/ext/io/lib.rs index 23c087e162..73ce725780 100644 --- a/ext/io/lib.rs +++ b/ext/io/lib.rs @@ -3,7 +3,6 @@ use deno_core::error::resource_unavailable; use deno_core::error::AnyError; use deno_core::op; -use deno_core::parking_lot::Mutex; use deno_core::AsyncMutFuture; use deno_core::AsyncRefCell; use deno_core::AsyncResult; @@ -25,7 +24,6 @@ use std::io::ErrorKind; use std::io::Read; use std::io::Write; use std::rc::Rc; -use std::sync::Arc; use tokio::io::AsyncRead; use tokio::io::AsyncReadExt; use tokio::io::AsyncWrite; @@ -159,20 +157,6 @@ pub struct Stdio { pub stderr: StdioPipe, } -#[cfg(unix)] -use nix::sys::termios; - -#[derive(Default)] -pub struct TtyMetadata { - #[cfg(unix)] - pub mode: Option, -} - -#[derive(Default)] -pub struct FileMetadata { - pub tty: TtyMetadata, -} - #[derive(Debug)] pub struct WriteOnlyResource { stream: AsyncRefCell, @@ -405,26 +389,12 @@ impl Read for StdFileResourceInner { } } -struct StdFileResourceCellValue { - inner: StdFileResourceInner, - meta_data: Arc>, -} - -impl StdFileResourceCellValue { - pub fn try_clone(&self) -> Result { - Ok(Self { - inner: self.inner.try_clone()?, - meta_data: self.meta_data.clone(), - }) - } -} - pub struct StdFileResource { name: String, // We can't use an AsyncRefCell here because we need to allow // access to the resource synchronously at any time and // asynchronously one at a time in order - cell: RefCell>, + cell: RefCell>, // Used to keep async actions in order and only allow one // to occur at a time cell_async_task_queue: TaskQueue, @@ -433,10 +403,7 @@ pub struct StdFileResource { impl StdFileResource { fn stdio(inner: StdFileResourceInner, name: &str) -> Self { Self { - cell: RefCell::new(Some(StdFileResourceCellValue { - inner, - meta_data: Default::default(), - })), + cell: RefCell::new(Some(inner)), cell_async_task_queue: Default::default(), name: name.to_string(), } @@ -444,26 +411,20 @@ impl StdFileResource { pub fn fs_file(fs_file: StdFile) -> Self { Self { - cell: RefCell::new(Some(StdFileResourceCellValue { - inner: StdFileResourceInner::file(fs_file), - meta_data: Default::default(), - })), + cell: RefCell::new(Some(StdFileResourceInner::file(fs_file))), cell_async_task_queue: Default::default(), name: "fsFile".to_string(), } } - fn with_inner_and_metadata( + fn with_inner( &self, - action: impl FnOnce( - &mut StdFileResourceInner, - &Arc>, - ) -> Result, + action: impl FnOnce(&mut StdFileResourceInner) -> Result, ) -> Option> { match self.cell.try_borrow_mut() { Ok(mut cell) if cell.is_some() => { let mut file = cell.take().unwrap(); - let result = action(&mut file.inner, &file.meta_data); + let result = action(&mut file); cell.replace(file); Some(result) } @@ -491,7 +452,7 @@ impl StdFileResource { } }; let (cell_value, result) = tokio::task::spawn_blocking(move || { - let result = action(&mut cell_value.inner); + let result = action(&mut cell_value); (cell_value, result) }) .await @@ -539,14 +500,14 @@ impl StdFileResource { fn read_byob_sync(self: Rc, buf: &mut [u8]) -> Result { self - .with_inner_and_metadata(|inner, _| inner.read(buf)) + .with_inner(|inner| inner.read(buf)) .ok_or_else(resource_unavailable)? .map_err(Into::into) } fn write_sync(self: Rc, data: &[u8]) -> Result { self - .with_inner_and_metadata(|inner, _| inner.write_and_maybe_flush(data)) + .with_inner(|inner| inner.write_and_maybe_flush(data)) .ok_or_else(resource_unavailable)? } @@ -572,7 +533,7 @@ impl StdFileResource { { Self::with_resource(state, rid, move |resource| { resource - .with_inner_and_metadata(move |inner, _| inner.with_file(f)) + .with_inner(move |inner| inner.with_file(f)) .ok_or_else(resource_unavailable)? }) } @@ -581,24 +542,7 @@ impl StdFileResource { where F: FnOnce(&mut StdFile) -> Result, { - self.with_inner_and_metadata(move |inner, _| inner.with_file(f)) - } - - pub fn with_file_and_metadata( - state: &mut OpState, - rid: ResourceId, - f: F, - ) -> Result - where - F: FnOnce(&mut StdFile, &Arc>) -> Result, - { - Self::with_resource(state, rid, move |resource| { - resource - .with_inner_and_metadata(move |inner, metadata| { - inner.with_file(move |file| f(file, metadata)) - }) - .ok_or_else(resource_unavailable)? - }) + self.with_inner(move |inner| inner.with_file(f)) } pub async fn with_file_blocking_task( @@ -646,7 +590,7 @@ impl StdFileResource { ) -> Result { Self::with_resource(state, rid, |resource| { resource - .with_inner_and_metadata(|inner, _| match inner.kind { + .with_inner(|inner| match inner.kind { StdFileResourceKind::File => { let file = inner.file.try_clone()?; Ok(file.into()) @@ -712,7 +656,7 @@ impl Resource for StdFileResource { fn backing_fd(self: Rc) -> Option { use std::os::unix::io::AsRawFd; self - .with_inner_and_metadata(move |std_file, _| { + .with_inner(move |std_file| { Ok::<_, ()>(std_file.with_file(|f| f.as_raw_fd())) })? .ok() @@ -729,7 +673,7 @@ pub fn op_print( let rid = if is_err { 2 } else { 1 }; StdFileResource::with_resource(state, rid, move |resource| { resource - .with_inner_and_metadata(|inner, _| { + .with_inner(|inner| { inner.write_all_and_maybe_flush(msg.as_bytes())?; Ok(()) }) diff --git a/runtime/ops/tty.rs b/runtime/ops/tty.rs index 3146f22e22..a3dc03a6fa 100644 --- a/runtime/ops/tty.rs +++ b/runtime/ops/tty.rs @@ -6,8 +6,35 @@ use deno_core::OpState; use deno_io::StdFileResource; use std::io::Error; +#[cfg(unix)] +use deno_core::ResourceId; #[cfg(unix)] use nix::sys::termios; +#[cfg(unix)] +use std::cell::RefCell; +#[cfg(unix)] +use std::collections::HashMap; +#[cfg(unix)] +use std::rc::Rc; + +#[cfg(unix)] +#[derive(Default, Clone)] +struct TtyModeStore(Rc>>); + +#[cfg(unix)] +impl TtyModeStore { + pub fn get(&self, id: ResourceId) -> Option { + self.0.borrow().get(&id).map(ToOwned::to_owned) + } + + pub fn take(&self, id: ResourceId) -> Option { + self.0.borrow_mut().remove(&id) + } + + pub fn set(&self, id: ResourceId, mode: termios::Termios) { + self.0.borrow_mut().insert(id, mode); + } +} #[cfg(windows)] use deno_core::error::custom_error; @@ -35,6 +62,10 @@ fn get_windows_handle( deno_core::extension!( deno_tty, ops = [op_stdin_set_raw, op_isatty, op_console_size], + state = |state| { + #[cfg(unix)] + state.put(TtyModeStore::default()); + }, customizer = |ext: &mut deno_core::ExtensionBuilder| { ext.force_op_registration(); }, @@ -118,53 +149,49 @@ fn op_stdin_set_raw( { use std::os::unix::io::AsRawFd; - StdFileResource::with_file_and_metadata( - state, - rid, - move |std_file, meta_data| { - let raw_fd = std_file.as_raw_fd(); + let tty_mode_store = state.borrow::().clone(); + let previous_mode = tty_mode_store.get(rid); - if is_raw { - let mut raw = { - let mut meta_data = meta_data.lock(); - let maybe_tty_mode = &mut meta_data.tty.mode; - if maybe_tty_mode.is_none() { - // Save original mode. - let original_mode = termios::tcgetattr(raw_fd)?; - maybe_tty_mode.replace(original_mode); - } - maybe_tty_mode.clone().unwrap() - }; + StdFileResource::with_file(state, rid, move |std_file| { + let raw_fd = std_file.as_raw_fd(); - raw.input_flags &= !(termios::InputFlags::BRKINT - | termios::InputFlags::ICRNL - | termios::InputFlags::INPCK - | termios::InputFlags::ISTRIP - | termios::InputFlags::IXON); - - raw.control_flags |= termios::ControlFlags::CS8; - - raw.local_flags &= !(termios::LocalFlags::ECHO - | termios::LocalFlags::ICANON - | termios::LocalFlags::IEXTEN); - if !cbreak { - raw.local_flags &= !(termios::LocalFlags::ISIG); - } - raw.control_chars[termios::SpecialCharacterIndices::VMIN as usize] = - 1; - raw.control_chars[termios::SpecialCharacterIndices::VTIME as usize] = - 0; - termios::tcsetattr(raw_fd, termios::SetArg::TCSADRAIN, &raw)?; - } else { - // Try restore saved mode. - if let Some(mode) = meta_data.lock().tty.mode.take() { - termios::tcsetattr(raw_fd, termios::SetArg::TCSADRAIN, &mode)?; + if is_raw { + let mut raw = match previous_mode { + Some(mode) => mode, + None => { + // Save original mode. + let original_mode = termios::tcgetattr(raw_fd)?; + tty_mode_store.set(rid, original_mode.clone()); + original_mode } + }; + + raw.input_flags &= !(termios::InputFlags::BRKINT + | termios::InputFlags::ICRNL + | termios::InputFlags::INPCK + | termios::InputFlags::ISTRIP + | termios::InputFlags::IXON); + + raw.control_flags |= termios::ControlFlags::CS8; + + raw.local_flags &= !(termios::LocalFlags::ECHO + | termios::LocalFlags::ICANON + | termios::LocalFlags::IEXTEN); + if !cbreak { + raw.local_flags &= !(termios::LocalFlags::ISIG); } + raw.control_chars[termios::SpecialCharacterIndices::VMIN as usize] = 1; + raw.control_chars[termios::SpecialCharacterIndices::VTIME as usize] = 0; + termios::tcsetattr(raw_fd, termios::SetArg::TCSADRAIN, &raw)?; + } else { + // Try restore saved mode. + if let Some(mode) = tty_mode_store.take(rid) { + termios::tcsetattr(raw_fd, termios::SetArg::TCSADRAIN, &mode)?; + } + } - Ok(()) - }, - ) + Ok(()) + }) } } From 798c1ad0f1de80ff0e7196b6140a3f74e31fe111 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Wed, 3 May 2023 00:36:33 +0200 Subject: [PATCH 110/320] perf: use jemalloc as global allocator (#18957) Follow up to https://github.com/denoland/deno/pull/18875 that enables `jemalloc` as a global allocator for the Deno CLI. --- Cargo.lock | 11 +++++++++++ Cargo.toml | 2 ++ cli/Cargo.toml | 3 +++ cli/main.rs | 7 +++++++ cli/napi/async.rs | 3 ++- core/Cargo.toml | 2 +- test_napi/src/async.rs | 10 +++++++--- test_napi/typedarray_test.js | 15 +++++++++------ 8 files changed, 42 insertions(+), 11 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f4647350b0..8c4f0f6ac5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -770,6 +770,7 @@ dependencies = [ "text-size", "text_lines", "thiserror", + "tikv-jemallocator", "tokio", "tokio-util", "tower-lsp", @@ -5141,6 +5142,16 @@ dependencies = [ "libc", ] +[[package]] +name = "tikv-jemallocator" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20612db8a13a6c06d57ec83953694185a367e16945f66565e8028d2c0bd76979" +dependencies = [ + "libc", + "tikv-jemalloc-sys", +] + [[package]] name = "time" version = "0.3.20" diff --git a/Cargo.toml b/Cargo.toml index be03237a22..911f86bd2a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -133,6 +133,8 @@ tar = "=0.4.38" tempfile = "3.4.0" thiserror = "=1.0.38" tokio = { version = "1.25.0", features = ["full"] } +tikv-jemallocator = "0.5.0" +tikv-jemalloc-sys = "0.5.3" tokio-rustls = "0.23.3" tokio-util = "0.7.4" tower-lsp = { version = "=0.17.0", features = ["proposed"] } diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 319c8cb56c..63842a6a3f 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -120,6 +120,9 @@ winapi = { workspace = true, features = ["knownfolders", "mswsock", "objbase", " [target.'cfg(unix)'.dependencies] nix.workspace = true +[target.'cfg(not(target_env = "msvc"))'.dependencies] +tikv-jemallocator.workspace = true + [dev-dependencies] deno_bench_util.workspace = true dotenv = "=0.15.0" diff --git a/cli/main.rs b/cli/main.rs index 85942cbd82..c3421b0cd9 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -26,6 +26,13 @@ mod version; mod watcher; mod worker; +#[cfg(not(target_env = "msvc"))] +use tikv_jemallocator::Jemalloc; + +#[cfg(not(target_env = "msvc"))] +#[global_allocator] +static GLOBAL: Jemalloc = Jemalloc; + use crate::args::flags_from_vec; use crate::args::DenoSubcommand; use crate::args::Flags; diff --git a/cli/napi/async.rs b/cli/napi/async.rs index 8cbdb22204..e6695551a7 100644 --- a/cli/napi/async.rs +++ b/cli/napi/async.rs @@ -24,7 +24,8 @@ fn napi_create_async_work( execute, complete, }; - *result = transmute::, _>(Box::new(work)); + let work_box = Box::new(work); + *result = transmute::<*mut AsyncWork, _>(Box::into_raw(work_box)); Ok(()) } diff --git a/core/Cargo.toml b/core/Cargo.toml index 0e0b1d2c7b..e2ffca6579 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -40,7 +40,7 @@ url.workspace = true v8.workspace = true [target.'cfg(not(target_env = "msvc"))'.dependencies] -tikv-jemalloc-sys = "0.5" +tikv-jemalloc-sys.workspace = true [[example]] name = "http_bench_json_ops" diff --git a/test_napi/src/async.rs b/test_napi/src/async.rs index 51e6edac9e..970d34ce19 100644 --- a/test_napi/src/async.rs +++ b/test_napi/src/async.rs @@ -49,7 +49,6 @@ unsafe extern "C" fn complete( ptr::null(), &mut _result )); - assert_napi_ok!(napi_delete_reference(env, baton.func)); assert_napi_ok!(napi_delete_async_work(env, baton.task)); } @@ -73,7 +72,7 @@ extern "C" fn test_async_work( &mut resource_name, )); - let mut async_work: napi_async_work = ptr::null_mut(); + let async_work: napi_async_work = ptr::null_mut(); let mut func: napi_ref = ptr::null_mut(); assert_napi_ok!(napi_create_reference(env, args[0], 1, &mut func)); @@ -82,6 +81,8 @@ extern "C" fn test_async_work( func, task: async_work, }); + let mut async_work = baton.task; + let baton_ptr = Box::into_raw(baton) as *mut c_void; assert_napi_ok!(napi_create_async_work( env, @@ -89,9 +90,12 @@ extern "C" fn test_async_work( resource_name, Some(execute), Some(complete), - Box::into_raw(baton) as *mut c_void, + baton_ptr, &mut async_work, )); + let mut baton = unsafe { Box::from_raw(baton_ptr as *mut Baton) }; + baton.task = async_work; + Box::into_raw(baton); assert_napi_ok!(napi_queue_async_work(env, async_work)); ptr::null_mut() diff --git a/test_napi/typedarray_test.js b/test_napi/typedarray_test.js index f9b3466264..7a60a3ab4b 100644 --- a/test_napi/typedarray_test.js +++ b/test_napi/typedarray_test.js @@ -28,9 +28,12 @@ Deno.test("napi typedarray float64", function () { assertEquals(Math.round(10 * doubleResult[2]) / 10, -6.6); }); -Deno.test("napi typedarray external", function () { - assertEquals( - new Uint8Array(typedarray.test_external()), - new Uint8Array([0, 1, 2, 3]), - ); -}); +// TODO(bartlomieju): this test causes segfaults when used with jemalloc. +// Node documentation provides a hint that this function is not supported by +// other runtime like electron. +// Deno.test("napi typedarray external", function () { +// assertEquals( +// new Uint8Array(typedarray.test_external()), +// new Uint8Array([0, 1, 2, 3]), +// ); +// }); From 93a78d3d4aedb96cfc2641048532c95197f661bb Mon Sep 17 00:00:00 2001 From: Luca Casonato Date: Wed, 3 May 2023 12:44:00 +0100 Subject: [PATCH 111/320] fix(ext/kv): KvU64#valueOf and KvU64 inspect (#18656) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit `new Deno.KvU64(1n) + 2n == 3n` is now true. `new Deno.KvU64(1n)` is now inspected as `[Deno.KvU64: 1n]` (`Object(1n)` is inspected as `[BigInt: 1n]`). --------- Co-authored-by: Bartek Iwańczuk --- cli/tests/unit/kv_test.ts | 28 +++++++++++++++++------- ext/kv/01_db.ts | 45 +++++++++++++++++++++++++++++++-------- ext/kv/lib.rs | 3 +-- 3 files changed, 57 insertions(+), 19 deletions(-) diff --git a/cli/tests/unit/kv_test.ts b/cli/tests/unit/kv_test.ts index 5a202fb0be..3a3476857a 100644 --- a/cli/tests/unit/kv_test.ts +++ b/cli/tests/unit/kv_test.ts @@ -578,19 +578,31 @@ Deno.test("KvU64 underflow", () => { }, RangeError); }); -Deno.test("KvU64 frozen", () => { - const a = new Deno.KvU64(1n); - assertThrows(() => { - // @ts-expect-error value is readonly - a.value = 2n; - }, TypeError); -}); - Deno.test("KvU64 unbox", () => { const a = new Deno.KvU64(1n); assertEquals(a.value, 1n); }); +Deno.test("KvU64 unbox with valueOf", () => { + const a = new Deno.KvU64(1n); + assertEquals(a.valueOf(), 1n); +}); + +Deno.test("KvU64 auto-unbox", () => { + const a = new Deno.KvU64(1n); + assertEquals(a as unknown as bigint + 1n, 2n); +}); + +Deno.test("KvU64 toString", () => { + const a = new Deno.KvU64(1n); + assertEquals(a.toString(), "1"); +}); + +Deno.test("KvU64 inspect", () => { + const a = new Deno.KvU64(1n); + assertEquals(Deno.inspect(a), "[Deno.KvU64: 1n]"); +}); + async function collect( iter: Deno.KvListIterator, ): Promise[]> { diff --git a/ext/kv/01_db.ts b/ext/kv/01_db.ts index 72d3580051..ca37aa8401 100644 --- a/ext/kv/01_db.ts +++ b/ext/kv/01_db.ts @@ -2,8 +2,15 @@ // @ts-ignore internal api const { - ObjectGetPrototypeOf, AsyncGeneratorPrototype, + BigIntPrototypeToString, + ObjectFreeze, + ObjectGetPrototypeOf, + ObjectPrototypeIsPrototypeOf, + StringPrototypeReplace, + SymbolFor, + SymbolToStringTag, + Uint8ArrayPrototype, } = globalThis.__bootstrap.primordials; const core = Deno.core; const ops = core.ops; @@ -289,7 +296,7 @@ const MIN_U64 = BigInt("0"); const MAX_U64 = BigInt("0xffffffffffffffff"); class KvU64 { - readonly value: bigint; + value: bigint; constructor(value: bigint) { if (typeof value !== "bigint") { @@ -299,11 +306,31 @@ class KvU64 { throw new RangeError("value must be a positive bigint"); } if (value > MAX_U64) { - throw new RangeError("value must be a 64-bit unsigned integer"); + throw new RangeError("value must fit in a 64-bit unsigned integer"); } this.value = value; Object.freeze(this); } + + valueOf() { + return this.value; + } + + toString() { + return BigIntPrototypeToString(this.value); + } + + get [SymbolToStringTag]() { + return "Deno.KvU64"; + } + + [SymbolFor("Deno.privateCustomInspect")](inspect, inspectOptions) { + return StringPrototypeReplace( + inspect(Object(this.value), inspectOptions), + "BigInt", + "Deno.KvU64", + ); + } } function deserializeValue(entry: RawKvEntry): Deno.KvEntry { @@ -330,15 +357,15 @@ function deserializeValue(entry: RawKvEntry): Deno.KvEntry { } function serializeValue(value: unknown): RawValue { - if (value instanceof Uint8Array) { + if (ObjectPrototypeIsPrototypeOf(Uint8ArrayPrototype, value)) { return { kind: "bytes", value, }; - } else if (value instanceof KvU64) { + } else if (ObjectPrototypeIsPrototypeOf(KvU64.prototype, value)) { return { kind: "u64", - value: value.value, + value: value.valueOf(), }; } else { return { @@ -398,13 +425,13 @@ class KvListIterator extends AsyncIterator let start: Deno.KvKey | undefined; let end: Deno.KvKey | undefined; if ("prefix" in selector && selector.prefix !== undefined) { - prefix = Object.freeze([...selector.prefix]); + prefix = ObjectFreeze([...selector.prefix]); } if ("start" in selector && selector.start !== undefined) { - start = Object.freeze([...selector.start]); + start = ObjectFreeze([...selector.start]); } if ("end" in selector && selector.end !== undefined) { - end = Object.freeze([...selector.end]); + end = ObjectFreeze([...selector.end]); } if (prefix) { if (start && end) { diff --git a/ext/kv/lib.rs b/ext/kv/lib.rs index f17ed55e33..dbc626225f 100644 --- a/ext/kv/lib.rs +++ b/ext/kv/lib.rs @@ -53,8 +53,7 @@ impl UnstableChecker { } deno_core::extension!(deno_kv, - // TODO(bartlomieju): specify deps - deps = [ ], + deps = [ deno_console ], parameters = [ DBH: DatabaseHandler ], ops = [ op_kv_database_open, From 8e6c104907971a83f25c10b357dc7bd2b28a297f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Wed, 3 May 2023 17:05:53 +0200 Subject: [PATCH 112/320] chore: update release doc template (#18974) --- tools/release/release_doc_template.md | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/tools/release/release_doc_template.md b/tools/release/release_doc_template.md index 0acd4137a1..c0eb9e2140 100644 --- a/tools/release/release_doc_template.md +++ b/tools/release/release_doc_template.md @@ -162,19 +162,6 @@ verify on GitHub that everything looks correct. - [ ] Publish the release on Github -- [ ] Run the - https://github.com/denoland/dotland/actions/workflows/update_versions.yml - workflow. - - [ ] This should open a PR. Review and merge it. - -

    - Failure Steps - - 1. Update https://github.com/denoland/dotland/blob/main/versions.json - manually. - 2. Open a PR and merge. -
    - - [ ] Run the https://github.com/denoland/dotcom/actions/workflows/update_versions.yml workflow. From 246569f6d45852aa42d6f7fe6221fe4d9fa69e3c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Wed, 3 May 2023 18:35:39 +0200 Subject: [PATCH 113/320] fix(core): rebuild when JS sources for snapshotting change (#18976) --- core/runtime.rs | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/core/runtime.rs b/core/runtime.rs index 5470a89b9b..9676ce571f 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -543,6 +543,29 @@ impl JsRuntime { } } + #[cfg(feature = "include_js_files_for_snapshotting")] + { + let js_sources = options + .extensions + .iter() + .flat_map(|ext| match ext.get_js_sources() { + Some(s) => s.to_owned(), + None => vec![], + }) + .collect::>(); + + if snapshot_options != snapshot_util::SnapshotOptions::None { + for source in &js_sources { + use crate::ExtensionFileSourceCode; + if let ExtensionFileSourceCode::LoadedFromFsDuringSnapshot(path) = + &source.code + { + println!("cargo:rerun-if-changed={}", path.display()) + } + } + } + } + Rc::new(crate::modules::ExtModuleLoader::new( options.module_loader, esm_sources, From 632395da89c767913cb88edfb437d02772fe84b9 Mon Sep 17 00:00:00 2001 From: kang <1115610574@qq.com> Date: Thu, 4 May 2023 04:41:25 +0800 Subject: [PATCH 114/320] fix(docs): replace "e.g." with "i.e." in `seek()`/`seekSync()` comment (#18964) Clarify calculation of number "9" in `seek()`/`seekSync()` comment of lib.deno.ns.d.ts by replacing "e.g." with "i.e." --- cli/tsc/dts/lib.deno.ns.d.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/cli/tsc/dts/lib.deno.ns.d.ts b/cli/tsc/dts/lib.deno.ns.d.ts index 408580737e..4b8dbb7fdc 100644 --- a/cli/tsc/dts/lib.deno.ns.d.ts +++ b/cli/tsc/dts/lib.deno.ns.d.ts @@ -1832,7 +1832,7 @@ declare namespace Deno { * // Seek 2 more bytes from the current position * console.log(await Deno.seek(file.rid, 2, Deno.SeekMode.Current)); // "8" * // Seek backwards 2 bytes from the end of the file - * console.log(await Deno.seek(file.rid, -2, Deno.SeekMode.End)); // "9" (e.g. 11-2) + * console.log(await Deno.seek(file.rid, -2, Deno.SeekMode.End)); // "9" (i.e. 11-2) * file.close(); * ``` * @@ -1879,7 +1879,7 @@ declare namespace Deno { * // Seek 2 more bytes from the current position * console.log(Deno.seekSync(file.rid, 2, Deno.SeekMode.Current)); // "8" * // Seek backwards 2 bytes from the end of the file - * console.log(Deno.seekSync(file.rid, -2, Deno.SeekMode.End)); // "9" (e.g. 11-2) + * console.log(Deno.seekSync(file.rid, -2, Deno.SeekMode.End)); // "9" (i.e. 11-2) * file.close(); * ``` * @@ -2200,7 +2200,7 @@ declare namespace Deno { * // Seek 2 more bytes from the current position * console.log(await file.seek(2, Deno.SeekMode.Current)); // "8" * // Seek backwards 2 bytes from the end of the file - * console.log(await file.seek(-2, Deno.SeekMode.End)); // "9" (e.g. 11-2) + * console.log(await file.seek(-2, Deno.SeekMode.End)); // "9" (i.e. 11-2) * ``` */ seek(offset: number | bigint, whence: SeekMode): Promise; @@ -2238,7 +2238,7 @@ declare namespace Deno { * // Seek 2 more bytes from the current position * console.log(file.seekSync(2, Deno.SeekMode.Current)); // "8" * // Seek backwards 2 bytes from the end of the file - * console.log(file.seekSync(-2, Deno.SeekMode.End)); // "9" (e.g. 11-2) + * console.log(file.seekSync(-2, Deno.SeekMode.End)); // "9" (i.e. 11-2) * file.close(); * ``` */ From d905f20cadfd95b927027a3c597d578db606984e Mon Sep 17 00:00:00 2001 From: Luca Casonato Date: Wed, 3 May 2023 23:08:42 +0200 Subject: [PATCH 115/320] fix(ext/kv): throw on the Kv constructor (#18978) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #18963 --------- Co-authored-by: Bartek Iwańczuk --- cli/tests/unit/kv_test.ts | 6 ++++++ ext/kv/01_db.ts | 11 +++++++++-- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/cli/tests/unit/kv_test.ts b/cli/tests/unit/kv_test.ts index 3a3476857a..3c5efa5887 100644 --- a/cli/tests/unit/kv_test.ts +++ b/cli/tests/unit/kv_test.ts @@ -1256,6 +1256,12 @@ dbTest("keys must be arrays", async (db) => { ); }); +Deno.test("Deno.Kv constructor throws", () => { + assertThrows(() => { + new Deno.Kv(); + }); +}); + // This function is never called, it is just used to check that all the types // are behaving as expected. async function _typeCheckingTests() { diff --git a/ext/kv/01_db.ts b/ext/kv/01_db.ts index ca37aa8401..f8181cc2e7 100644 --- a/ext/kv/01_db.ts +++ b/ext/kv/01_db.ts @@ -23,7 +23,7 @@ const encodeCursor: ( async function openKv(path: string) { const rid = await core.opAsync("op_kv_database_open", path); - return new Kv(rid); + return new Kv(rid, kvSymbol); } interface RawKvEntry { @@ -43,10 +43,17 @@ type RawValue = { value: bigint; }; +const kvSymbol = Symbol("KvRid"); + class Kv { #rid: number; - constructor(rid: number) { + constructor(rid: number = undefined, symbol: symbol = undefined) { + if (kvSymbol !== symbol) { + throw new TypeError( + "Deno.Kv can not be constructed, use Deno.openKv instead.", + ); + } this.#rid = rid; } From e3276fbb71093faf4e8850f68ed2e080a9bda222 Mon Sep 17 00:00:00 2001 From: Nayeem Rahman Date: Wed, 3 May 2023 22:10:51 +0100 Subject: [PATCH 116/320] fix(test): disable preventDefault() for beforeunload event (#18911) Fixes #18910. --- cli/tests/integration/bench_tests.rs | 5 +++++ cli/tests/integration/test_tests.rs | 5 +++++ .../testdata/bench/before_unload_prevent_default.out | 7 +++++++ .../testdata/bench/before_unload_prevent_default.ts | 6 ++++++ .../testdata/test/before_unload_prevent_default.out | 5 +++++ cli/tests/testdata/test/before_unload_prevent_default.ts | 6 ++++++ cli/tools/bench.rs | 9 +++------ cli/tools/test.rs | 9 +++------ 8 files changed, 40 insertions(+), 12 deletions(-) create mode 100644 cli/tests/testdata/bench/before_unload_prevent_default.out create mode 100644 cli/tests/testdata/bench/before_unload_prevent_default.ts create mode 100644 cli/tests/testdata/test/before_unload_prevent_default.out create mode 100644 cli/tests/testdata/test/before_unload_prevent_default.ts diff --git a/cli/tests/integration/bench_tests.rs b/cli/tests/integration/bench_tests.rs index 16ac5852ec..5b7361b304 100644 --- a/cli/tests/integration/bench_tests.rs +++ b/cli/tests/integration/bench_tests.rs @@ -114,6 +114,11 @@ itest!(finally_timeout { output: "bench/finally_timeout.out", }); +itest!(before_unload_prevent_default { + args: "bench --quiet bench/before_unload_prevent_default.ts", + output: "bench/before_unload_prevent_default.out", +}); + itest!(group_baseline { args: "bench bench/group_baseline.ts", exit_code: 0, diff --git a/cli/tests/integration/test_tests.rs b/cli/tests/integration/test_tests.rs index 0dea3b8440..04966f4eee 100644 --- a/cli/tests/integration/test_tests.rs +++ b/cli/tests/integration/test_tests.rs @@ -361,6 +361,11 @@ itest!(test_with_custom_jsx { output: "test/hello_world.out", }); +itest!(before_unload_prevent_default { + args: "test --quiet test/before_unload_prevent_default.ts", + output: "test/before_unload_prevent_default.out", +}); + #[test] fn captured_output() { let context = TestContext::default(); diff --git a/cli/tests/testdata/bench/before_unload_prevent_default.out b/cli/tests/testdata/bench/before_unload_prevent_default.out new file mode 100644 index 0000000000..9166816259 --- /dev/null +++ b/cli/tests/testdata/bench/before_unload_prevent_default.out @@ -0,0 +1,7 @@ +cpu: [WILDCARD] +runtime: deno [WILDCARD] + +[WILDCARD]/before_unload_prevent_default.ts +benchmark time (avg) (min … max) p75 p99 p995 +------------------------------------------------- ----------------------------- +foo [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD] diff --git a/cli/tests/testdata/bench/before_unload_prevent_default.ts b/cli/tests/testdata/bench/before_unload_prevent_default.ts new file mode 100644 index 0000000000..2759d46592 --- /dev/null +++ b/cli/tests/testdata/bench/before_unload_prevent_default.ts @@ -0,0 +1,6 @@ +addEventListener("beforeunload", (e) => { + // The worker should be killed once benchmarks are done regardless of this. + e.preventDefault(); +}); + +Deno.bench("foo", () => {}); diff --git a/cli/tests/testdata/test/before_unload_prevent_default.out b/cli/tests/testdata/test/before_unload_prevent_default.out new file mode 100644 index 0000000000..09da32ff96 --- /dev/null +++ b/cli/tests/testdata/test/before_unload_prevent_default.out @@ -0,0 +1,5 @@ +running 1 test from [WILDCARD]/before_unload_prevent_default.ts +foo ... ok ([WILDCARD]) + +ok | 1 passed | 0 failed ([WILDCARD]) + diff --git a/cli/tests/testdata/test/before_unload_prevent_default.ts b/cli/tests/testdata/test/before_unload_prevent_default.ts new file mode 100644 index 0000000000..421ded5200 --- /dev/null +++ b/cli/tests/testdata/test/before_unload_prevent_default.ts @@ -0,0 +1,6 @@ +addEventListener("beforeunload", (e) => { + // The worker should be killed once tests are done regardless of this. + e.preventDefault(); +}); + +Deno.test("foo", () => {}); diff --git a/cli/tools/bench.rs b/cli/tools/bench.rs index aa5bd044df..3d5f99aba4 100644 --- a/cli/tools/bench.rs +++ b/cli/tools/bench.rs @@ -498,12 +498,9 @@ async fn bench_specifier( sender.send(BenchEvent::Result(desc.id, result))?; } - loop { - if !worker.dispatch_beforeunload_event(located_script_name!())? { - break; - } - worker.run_event_loop(false).await?; - } + // Ignore `defaultPrevented` of the `beforeunload` event. We don't allow the + // event loop to continue beyond what's needed to await results. + worker.dispatch_beforeunload_event(located_script_name!())?; worker.dispatch_unload_event(located_script_name!())?; Ok(()) } diff --git a/cli/tools/test.rs b/cli/tools/test.rs index 847260352a..50e220a466 100644 --- a/cli/tools/test.rs +++ b/cli/tools/test.rs @@ -1033,12 +1033,9 @@ pub async fn test_specifier( sender.send(TestEvent::Result(desc.id, result, elapsed as u64))?; } - loop { - if !worker.dispatch_beforeunload_event(located_script_name!())? { - break; - } - worker.run_event_loop(false).await?; - } + // Ignore `defaultPrevented` of the `beforeunload` event. We don't allow the + // event loop to continue beyond what's needed to await results. + worker.dispatch_beforeunload_event(located_script_name!())?; worker.dispatch_unload_event(located_script_name!())?; if let Some(coverage_collector) = coverage_collector.as_mut() { From 7a8bb3b611f02b272b1c19b6f3d8a85b099ca317 Mon Sep 17 00:00:00 2001 From: Nayeem Rahman Date: Thu, 4 May 2023 01:44:59 +0100 Subject: [PATCH 117/320] fix(core): allow esm extensions not included in snapshot (#18980) Fixes #18979. This changes the predicate for allowing `ext:` specifier resolution from `snapshot_loaded_and_not_snapshotting` to `ext_resolution_allowed` which is only set to true during the extension module loading phase. Module loaders as used in core are now declared as `ExtModuleLoader` rather than `dyn ModuleLoader`. --- bench_util/js_runtime.rs | 4 +- .../run/extension_dynamic_import.ts.out | 2 +- core/bindings.rs | 97 +++--- core/extensions.rs | 10 + core/modules.rs | 276 +++++++----------- core/runtime.rs | 134 ++++----- runtime/examples/hello_runtime.js | 1 + runtime/examples/hello_runtime.rs | 57 +--- runtime/examples/hello_runtime_bootstrap.js | 5 + 9 files changed, 221 insertions(+), 365 deletions(-) create mode 100644 runtime/examples/hello_runtime_bootstrap.js diff --git a/bench_util/js_runtime.rs b/bench_util/js_runtime.rs index 57085ef964..a12e6ca62a 100644 --- a/bench_util/js_runtime.rs +++ b/bench_util/js_runtime.rs @@ -10,9 +10,7 @@ use crate::profiling::is_profiling; pub fn create_js_runtime(setup: impl FnOnce() -> Vec) -> JsRuntime { JsRuntime::new(RuntimeOptions { extensions: setup(), - module_loader: Some( - std::rc::Rc::new(deno_core::ExtModuleLoader::default()), - ), + module_loader: None, ..Default::default() }) } diff --git a/cli/tests/testdata/run/extension_dynamic_import.ts.out b/cli/tests/testdata/run/extension_dynamic_import.ts.out index 18b05ea47d..081318960e 100644 --- a/cli/tests/testdata/run/extension_dynamic_import.ts.out +++ b/cli/tests/testdata/run/extension_dynamic_import.ts.out @@ -1,4 +1,4 @@ -error: Uncaught TypeError: Cannot load extension module from external code +error: Uncaught (in promise) TypeError: Cannot load extension module from external code await import("ext:runtime/01_errors.js"); ^ at [WILDCARD]/extension_dynamic_import.ts:1:1 diff --git a/core/bindings.rs b/core/bindings.rs index 2d9c914619..1437bc6575 100644 --- a/core/bindings.rs +++ b/core/bindings.rs @@ -10,7 +10,6 @@ use crate::error::is_instance_of_error; use crate::error::JsStackFrame; use crate::modules::get_asserted_module_type_from_assertions; use crate::modules::parse_import_assertions; -use crate::modules::resolve_helper; use crate::modules::validate_import_assertions; use crate::modules::ImportAssertionsKind; use crate::modules::ModuleMap; @@ -259,46 +258,43 @@ pub fn host_import_module_dynamically_callback<'s>( .unwrap() .to_rust_string_lossy(scope); - let is_ext_module = specifier_str.starts_with("ext:"); let resolver = v8::PromiseResolver::new(scope).unwrap(); let promise = resolver.get_promise(scope); - if !is_ext_module { - let assertions = parse_import_assertions( - scope, - import_assertions, - ImportAssertionsKind::DynamicImport, + let assertions = parse_import_assertions( + scope, + import_assertions, + ImportAssertionsKind::DynamicImport, + ); + + { + let tc_scope = &mut v8::TryCatch::new(scope); + validate_import_assertions(tc_scope, &assertions); + if tc_scope.has_caught() { + let e = tc_scope.exception().unwrap(); + resolver.reject(tc_scope, e); + } + } + let asserted_module_type = + get_asserted_module_type_from_assertions(&assertions); + + let resolver_handle = v8::Global::new(scope, resolver); + { + let state_rc = JsRuntime::state(scope); + let module_map_rc = JsRuntime::module_map(scope); + + debug!( + "dyn_import specifier {} referrer {} ", + specifier_str, referrer_name_str ); - - { - let tc_scope = &mut v8::TryCatch::new(scope); - validate_import_assertions(tc_scope, &assertions); - if tc_scope.has_caught() { - let e = tc_scope.exception().unwrap(); - resolver.reject(tc_scope, e); - } - } - let asserted_module_type = - get_asserted_module_type_from_assertions(&assertions); - - let resolver_handle = v8::Global::new(scope, resolver); - { - let state_rc = JsRuntime::state(scope); - let module_map_rc = JsRuntime::module_map(scope); - - debug!( - "dyn_import specifier {} referrer {} ", - specifier_str, referrer_name_str - ); - ModuleMap::load_dynamic_import( - module_map_rc, - &specifier_str, - &referrer_name_str, - asserted_module_type, - resolver_handle, - ); - state_rc.borrow_mut().notify_new_dynamic_import(); - } + ModuleMap::load_dynamic_import( + module_map_rc, + &specifier_str, + &referrer_name_str, + asserted_module_type, + resolver_handle, + ); + state_rc.borrow_mut().notify_new_dynamic_import(); } // Map errors from module resolution (not JS errors from module execution) to // ones rethrown from this scope, so they include the call stack of the @@ -311,16 +307,6 @@ pub fn host_import_module_dynamically_callback<'s>( let promise = promise.catch(scope, map_err).unwrap(); - if is_ext_module { - let message = v8::String::new_external_onebyte_static( - scope, - b"Cannot load extension module from external code", - ) - .unwrap(); - let exception = v8::Exception::type_error(scope, message); - resolver.reject(scope, exception); - } - Some(promise) } @@ -375,13 +361,7 @@ fn import_meta_resolve( url_prop.to_rust_string_lossy(scope) }; let module_map_rc = JsRuntime::module_map(scope); - let (loader, snapshot_loaded_and_not_snapshotting) = { - let module_map = module_map_rc.borrow(); - ( - module_map.loader.clone(), - module_map.snapshot_loaded_and_not_snapshotting, - ) - }; + let loader = module_map_rc.borrow().loader.clone(); let specifier_str = specifier.to_rust_string_lossy(scope); if specifier_str.starts_with("npm:") { @@ -389,13 +369,8 @@ fn import_meta_resolve( return; } - match resolve_helper( - snapshot_loaded_and_not_snapshotting, - loader, - &specifier_str, - &referrer, - ResolutionKind::DynamicImport, - ) { + match loader.resolve(&specifier_str, &referrer, ResolutionKind::DynamicImport) + { Ok(resolved) => { let resolved_val = serde_v8::to_v8(scope, resolved.as_str()).unwrap(); rv.set(resolved_val); diff --git a/core/extensions.rs b/core/extensions.rs index a8b52eb3b6..ba151da3d3 100644 --- a/core/extensions.rs +++ b/core/extensions.rs @@ -471,6 +471,16 @@ impl Extension { pub fn disable(self) -> Self { self.enabled(false) } + + pub(crate) fn find_esm( + &self, + specifier: &str, + ) -> Option<&ExtensionFileSource> { + self + .get_esm_sources()? + .iter() + .find(|s| s.specifier == specifier) + } } // Provides a convenient builder pattern to declare Extensions diff --git a/core/modules.rs b/core/modules.rs index c63c4dd30b..bc795de5cf 100644 --- a/core/modules.rs +++ b/core/modules.rs @@ -9,6 +9,7 @@ use crate::module_specifier::ModuleSpecifier; use crate::resolve_import; use crate::resolve_url; use crate::snapshot_util::SnapshottedData; +use crate::Extension; use crate::JsRuntime; use crate::OpState; use anyhow::Error; @@ -379,25 +380,6 @@ impl ModuleLoader for NoopModuleLoader { } } -/// Helper function, that calls into `loader.resolve()`, but denies resolution -/// of `ext` scheme if we are running with a snapshot loaded and not -/// creating a snapshot -pub(crate) fn resolve_helper( - snapshot_loaded_and_not_snapshotting: bool, - loader: Rc, - specifier: &str, - referrer: &str, - kind: ResolutionKind, -) -> Result { - if snapshot_loaded_and_not_snapshotting && specifier.starts_with("ext:") { - return Err(generic_error( - "Cannot load extension module from external code", - )); - } - - loader.resolve(specifier, referrer, kind) -} - /// Function that can be passed to the `ExtModuleLoader` that allows to /// transpile sources before passing to V8. pub type ExtModuleLoaderCb = @@ -405,7 +387,8 @@ pub type ExtModuleLoaderCb = pub struct ExtModuleLoader { module_loader: Rc, - esm_sources: Vec, + extensions: Rc>>, + ext_resolution_allowed: RefCell, used_esm_sources: RefCell>, maybe_load_callback: Option, } @@ -414,7 +397,8 @@ impl Default for ExtModuleLoader { fn default() -> Self { Self { module_loader: Rc::new(NoopModuleLoader), - esm_sources: vec![], + extensions: Default::default(), + ext_resolution_allowed: Default::default(), used_esm_sources: RefCell::new(HashMap::default()), maybe_load_callback: None, } @@ -424,70 +408,46 @@ impl Default for ExtModuleLoader { impl ExtModuleLoader { pub fn new( module_loader: Option>, - esm_sources: Vec, + extensions: Rc>>, maybe_load_callback: Option, ) -> Self { - let used_esm_sources: HashMap = esm_sources + let used_esm_sources: HashMap = extensions + .borrow() .iter() + .flat_map(|e| e.get_esm_sources()) + .flatten() .map(|file_source| (file_source.specifier.to_string(), false)) .collect(); ExtModuleLoader { module_loader: module_loader.unwrap_or_else(|| Rc::new(NoopModuleLoader)), - esm_sources, + extensions, + ext_resolution_allowed: Default::default(), used_esm_sources: RefCell::new(used_esm_sources), maybe_load_callback, } } -} -impl Drop for ExtModuleLoader { - fn drop(&mut self) { - let used_esm_sources = self.used_esm_sources.get_mut(); - let unused_modules: Vec<_> = used_esm_sources - .iter() - .filter(|(_s, v)| !*v) - .map(|(s, _)| s) - .collect(); - - if !unused_modules.is_empty() { - let mut msg = - "Following modules were passed to ExtModuleLoader but never used:\n" - .to_string(); - for m in unused_modules { - msg.push_str(" - "); - msg.push_str(m); - msg.push('\n'); - } - panic!("{}", msg); - } - } -} - -impl ModuleLoader for ExtModuleLoader { - fn resolve( + pub fn resolve( &self, specifier: &str, referrer: &str, kind: ResolutionKind, ) -> Result { - if let Ok(url_specifier) = ModuleSpecifier::parse(specifier) { - if url_specifier.scheme() == "ext" { - let referrer_specifier = ModuleSpecifier::parse(referrer).ok(); - if referrer == "." || referrer_specifier.unwrap().scheme() == "ext" { - return Ok(url_specifier); - } else { - return Err(generic_error( - "Cannot load extension module from external code", - )); - }; + if specifier.starts_with("ext:") { + if !referrer.starts_with("ext:") && referrer != "." + || !*self.ext_resolution_allowed.borrow() + { + return Err(generic_error( + "Cannot load extension module from external code", + )); } + return Ok(ModuleSpecifier::parse(specifier)?); } - self.module_loader.resolve(specifier, referrer, kind) } - fn load( + pub fn load( &self, module_specifier: &ModuleSpecifier, maybe_referrer: Option<&ModuleSpecifier>, @@ -502,10 +462,10 @@ impl ModuleLoader for ExtModuleLoader { } let specifier = module_specifier.to_string(); - let maybe_file_source = self - .esm_sources + let extensions = self.extensions.borrow(); + let maybe_file_source = extensions .iter() - .find(|file_source| file_source.specifier == module_specifier.as_str()); + .find_map(|e| e.find_esm(module_specifier.as_str())); if let Some(file_source) = maybe_file_source { { @@ -538,7 +498,7 @@ impl ModuleLoader for ExtModuleLoader { .boxed_local() } - fn prepare_load( + pub fn prepare_load( &self, op_state: Rc>, module_specifier: &ModuleSpecifier, @@ -556,6 +516,37 @@ impl ModuleLoader for ExtModuleLoader { is_dyn_import, ) } + + pub fn allow_ext_resolution(&self) { + *self.ext_resolution_allowed.borrow_mut() = true; + } + + pub fn disallow_ext_resolution(&self) { + *self.ext_resolution_allowed.borrow_mut() = false; + } +} + +impl Drop for ExtModuleLoader { + fn drop(&mut self) { + let used_esm_sources = self.used_esm_sources.get_mut(); + let unused_modules: Vec<_> = used_esm_sources + .iter() + .filter(|(_s, v)| !*v) + .map(|(s, _)| s) + .collect(); + + if !unused_modules.is_empty() { + let mut msg = + "Following modules were passed to ExtModuleLoader but never used:\n" + .to_string(); + for m in unused_modules { + msg.push_str(" - "); + msg.push_str(m); + msg.push('\n'); + } + panic!("{}", msg); + } + } } /// Basic file system module loader. @@ -643,8 +634,7 @@ pub(crate) struct RecursiveModuleLoad { // These three fields are copied from `module_map_rc`, but they are cloned // ahead of time to avoid already-borrowed errors. op_state: Rc>, - loader: Rc, - snapshot_loaded_and_not_snapshotting: bool, + loader: Rc, } impl RecursiveModuleLoad { @@ -700,9 +690,6 @@ impl RecursiveModuleLoad { init, state: LoadState::Init, module_map_rc: module_map_rc.clone(), - snapshot_loaded_and_not_snapshotting: module_map_rc - .borrow() - .snapshot_loaded_and_not_snapshotting, op_state, loader, pending: FuturesUnordered::new(), @@ -731,29 +718,17 @@ impl RecursiveModuleLoad { fn resolve_root(&self) -> Result { match self.init { - LoadInit::Main(ref specifier) => resolve_helper( - self.snapshot_loaded_and_not_snapshotting, - self.loader.clone(), - specifier, - ".", - ResolutionKind::MainModule, - ), - LoadInit::Side(ref specifier) => resolve_helper( - self.snapshot_loaded_and_not_snapshotting, - self.loader.clone(), - specifier, - ".", - ResolutionKind::Import, - ), - LoadInit::DynamicImport(ref specifier, ref referrer, _) => { - resolve_helper( - self.snapshot_loaded_and_not_snapshotting, - self.loader.clone(), - specifier, - referrer, - ResolutionKind::DynamicImport, - ) + LoadInit::Main(ref specifier) => { + self + .loader + .resolve(specifier, ".", ResolutionKind::MainModule) } + LoadInit::Side(ref specifier) => { + self.loader.resolve(specifier, ".", ResolutionKind::Import) + } + LoadInit::DynamicImport(ref specifier, ref referrer, _) => self + .loader + .resolve(specifier, referrer, ResolutionKind::DynamicImport), } } @@ -762,29 +737,21 @@ impl RecursiveModuleLoad { let (module_specifier, maybe_referrer) = match self.init { LoadInit::Main(ref specifier) => { - let spec = resolve_helper( - self.snapshot_loaded_and_not_snapshotting, - self.loader.clone(), - specifier, - ".", - ResolutionKind::MainModule, - )?; + let spec = + self + .loader + .resolve(specifier, ".", ResolutionKind::MainModule)?; (spec, None) } LoadInit::Side(ref specifier) => { - let spec = resolve_helper( - self.snapshot_loaded_and_not_snapshotting, - self.loader.clone(), - specifier, - ".", - ResolutionKind::Import, - )?; + let spec = + self + .loader + .resolve(specifier, ".", ResolutionKind::Import)?; (spec, None) } LoadInit::DynamicImport(ref specifier, ref referrer, _) => { - let spec = resolve_helper( - self.snapshot_loaded_and_not_snapshotting, - self.loader.clone(), + let spec = self.loader.resolve( specifier, referrer, ResolutionKind::DynamicImport, @@ -1093,7 +1060,7 @@ pub(crate) struct ModuleMap { pub(crate) next_load_id: ModuleLoadId, // Handling of futures for loading module sources - pub loader: Rc, + pub loader: Rc, op_state: Rc>, pub(crate) dynamic_import_map: HashMap>, @@ -1105,8 +1072,6 @@ pub(crate) struct ModuleMap { // This store is used temporarly, to forward parsed JSON // value from `new_json_module` to `json_module_evaluation_steps` json_value_store: HashMap, v8::Global>, - - pub(crate) snapshot_loaded_and_not_snapshotting: bool, } impl ModuleMap { @@ -1381,9 +1346,8 @@ impl ModuleMap { } pub(crate) fn new( - loader: Rc, + loader: Rc, op_state: Rc>, - snapshot_loaded_and_not_snapshotting: bool, ) -> ModuleMap { Self { handles: vec![], @@ -1397,7 +1361,6 @@ impl ModuleMap { preparing_dynamic_imports: FuturesUnordered::new(), pending_dynamic_imports: FuturesUnordered::new(), json_value_store: HashMap::new(), - snapshot_loaded_and_not_snapshotting, } } @@ -1526,9 +1489,7 @@ impl ModuleMap { return Err(ModuleError::Exception(exception)); } - let module_specifier = match resolve_helper( - self.snapshot_loaded_and_not_snapshotting, - self.loader.clone(), + let module_specifier = match self.loader.resolve( &import_specifier, name.as_ref(), if is_dynamic_import { @@ -1717,20 +1678,9 @@ impl ModuleMap { .dynamic_import_map .insert(load.id, resolver_handle); - let (loader, snapshot_loaded_and_not_snapshotting) = { - let module_map = module_map_rc.borrow(); - ( - module_map.loader.clone(), - module_map.snapshot_loaded_and_not_snapshotting, - ) - }; - let resolve_result = resolve_helper( - snapshot_loaded_and_not_snapshotting, - loader, - specifier, - referrer, - ResolutionKind::DynamicImport, - ); + let loader = module_map_rc.borrow().loader.clone(); + let resolve_result = + loader.resolve(specifier, referrer, ResolutionKind::DynamicImport); let fut = match resolve_result { Ok(module_specifier) => { if module_map_rc @@ -1764,14 +1714,10 @@ impl ModuleMap { referrer: &str, import_assertions: HashMap, ) -> Option> { - let resolved_specifier = resolve_helper( - self.snapshot_loaded_and_not_snapshotting, - self.loader.clone(), - specifier, - referrer, - ResolutionKind::Import, - ) - .expect("Module should have been already resolved"); + let resolved_specifier = self + .loader + .resolve(specifier, referrer, ResolutionKind::Import) + .expect("Module should have been already resolved"); let module_type = get_asserted_module_type_from_assertions(&import_assertions); @@ -3042,48 +2988,34 @@ if (import.meta.url != 'file:///main_with_code.js') throw Error(); } #[test] - fn ext_module_loader() { + fn ext_resolution() { let loader = ExtModuleLoader::default(); - assert!(loader - .resolve("ext:foo", "ext:bar", ResolutionKind::Import) - .is_ok()); + loader.allow_ext_resolution(); + loader + .resolve("ext:core.js", "ext:referrer.js", ResolutionKind::Import) + .unwrap(); + loader + .resolve("ext:core.js", ".", ResolutionKind::Import) + .unwrap(); + } + + #[test] + fn ext_resolution_failure() { + let loader = ExtModuleLoader::default(); + loader.allow_ext_resolution(); assert_eq!( loader - .resolve("ext:foo", "file://bar", ResolutionKind::Import) + .resolve("ext:core.js", "file://bar", ResolutionKind::Import,) .err() .map(|e| e.to_string()), Some("Cannot load extension module from external code".to_string()) ); + loader.disallow_ext_resolution(); assert_eq!( loader - .resolve("file://foo", "file://bar", ResolutionKind::Import) + .resolve("ext:core.js", "ext:referrer.js", ResolutionKind::Import,) .err() .map(|e| e.to_string()), - Some( - "Module loading is not supported; attempted to resolve: \"file://foo\" from \"file://bar\"" - .to_string() - ) - ); - assert_eq!( - loader - .resolve("file://foo", "ext:bar", ResolutionKind::Import) - .err() - .map(|e| e.to_string()), - Some( - "Module loading is not supported; attempted to resolve: \"file://foo\" from \"ext:bar\"" - .to_string() - ) - ); - assert_eq!( - resolve_helper( - true, - Rc::new(loader), - "ext:core.js", - "file://bar", - ResolutionKind::Import, - ) - .err() - .map(|e| e.to_string()), Some("Cannot load extension module from external code".to_string()) ); } diff --git a/core/runtime.rs b/core/runtime.rs index 9676ce571f..1cbefb6fe9 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -132,7 +132,7 @@ pub struct JsRuntime { v8_isolate: Option, snapshot_options: snapshot_util::SnapshotOptions, allocations: IsolateAllocations, - extensions: Vec, + extensions: Rc>>, event_loop_middlewares: Vec>, // Marks if this is considered the top-level runtime. Used only be inspector. is_main: bool, @@ -416,7 +416,7 @@ impl JsRuntime { let global_context; let mut maybe_snapshotted_data = None; - let (mut isolate, snapshot_options) = if snapshot_options.will_snapshot() { + let mut isolate = if snapshot_options.will_snapshot() { let snapshot_creator = snapshot_util::create_snapshot_creator(refs, options.startup_snapshot); let mut isolate = JsRuntime::setup_isolate(snapshot_creator); @@ -433,7 +433,7 @@ impl JsRuntime { global_context = v8::Global::new(scope, context); } - (isolate, snapshot_options) + isolate } else { #[cfg(not(target_env = "msvc"))] let vtable: &'static v8::RustAllocatorVtable< @@ -492,7 +492,7 @@ impl JsRuntime { global_context = v8::Global::new(scope, context); } - (isolate, snapshot_options) + isolate }; // SAFETY: this is first use of `isolate_ptr` so we are sure we're @@ -521,61 +521,33 @@ impl JsRuntime { None }; - let loader = if snapshot_options != snapshot_util::SnapshotOptions::Load { - let esm_sources = options + let loader = options + .module_loader + .unwrap_or_else(|| Rc::new(NoopModuleLoader)); + #[cfg(feature = "include_js_files_for_snapshotting")] + if snapshot_options.will_snapshot() { + for source in options .extensions .iter() - .flat_map(|ext| match ext.get_esm_sources() { - Some(s) => s.to_owned(), - None => vec![], - }) - .collect::>(); - - #[cfg(feature = "include_js_files_for_snapshotting")] - if snapshot_options != snapshot_util::SnapshotOptions::None { - for source in &esm_sources { - use crate::ExtensionFileSourceCode; - if let ExtensionFileSourceCode::LoadedFromFsDuringSnapshot(path) = - &source.code - { - println!("cargo:rerun-if-changed={}", path.display()) - } - } - } - - #[cfg(feature = "include_js_files_for_snapshotting")] + .flat_map(|e| vec![e.get_esm_sources(), e.get_js_sources()]) + .flatten() + .flatten() { - let js_sources = options - .extensions - .iter() - .flat_map(|ext| match ext.get_js_sources() { - Some(s) => s.to_owned(), - None => vec![], - }) - .collect::>(); - - if snapshot_options != snapshot_util::SnapshotOptions::None { - for source in &js_sources { - use crate::ExtensionFileSourceCode; - if let ExtensionFileSourceCode::LoadedFromFsDuringSnapshot(path) = - &source.code - { - println!("cargo:rerun-if-changed={}", path.display()) - } - } + use crate::ExtensionFileSourceCode; + if let ExtensionFileSourceCode::LoadedFromFsDuringSnapshot(path) = + &source.code + { + println!("cargo:rerun-if-changed={}", path.display()) } } - - Rc::new(crate::modules::ExtModuleLoader::new( - options.module_loader, - esm_sources, - options.snapshot_module_load_cb, - )) - } else { - options - .module_loader - .unwrap_or_else(|| Rc::new(NoopModuleLoader)) - }; + } + let num_extensions = options.extensions.len(); + let extensions = Rc::new(RefCell::new(options.extensions)); + let ext_loader = Rc::new(crate::modules::ExtModuleLoader::new( + Some(loader.clone()), + extensions.clone(), + options.snapshot_module_load_cb, + )); { let mut state = state_rc.borrow_mut(); @@ -589,12 +561,8 @@ impl JsRuntime { Self::STATE_DATA_OFFSET, Rc::into_raw(state_rc.clone()) as *mut c_void, ); - - let module_map_rc = Rc::new(RefCell::new(ModuleMap::new( - loader, - op_state, - snapshot_options == snapshot_util::SnapshotOptions::Load, - ))); + let module_map_rc = + Rc::new(RefCell::new(ModuleMap::new(ext_loader, op_state))); if let Some(snapshotted_data) = maybe_snapshotted_data { let scope = &mut v8::HandleScope::with_context(&mut isolate, global_context); @@ -610,10 +578,10 @@ impl JsRuntime { v8_isolate: Some(isolate), snapshot_options, allocations: IsolateAllocations::default(), - event_loop_middlewares: Vec::with_capacity(options.extensions.len()), - extensions: options.extensions, + event_loop_middlewares: Vec::with_capacity(num_extensions), + extensions, state: state_rc, - module_map: Some(module_map_rc), + module_map: Some(module_map_rc.clone()), is_main: options.is_main, }; @@ -621,7 +589,9 @@ impl JsRuntime { // available during the initialization process. js_runtime.init_extension_ops().unwrap(); let realm = js_runtime.global_realm(); + module_map_rc.borrow().loader.allow_ext_resolution(); js_runtime.init_extension_js(&realm).unwrap(); + module_map_rc.borrow().loader.disallow_ext_resolution(); js_runtime } @@ -722,7 +692,21 @@ impl JsRuntime { JsRealm::new(v8::Global::new(scope, context)) }; + self + .module_map + .as_ref() + .unwrap() + .borrow() + .loader + .allow_ext_resolution(); self.init_extension_js(&realm)?; + self + .module_map + .as_ref() + .unwrap() + .borrow() + .loader + .disallow_ext_resolution(); Ok(realm) } @@ -790,7 +774,7 @@ impl JsRuntime { // Take extensions to avoid double-borrow let extensions = std::mem::take(&mut self.extensions); - for ext in &extensions { + for ext in extensions.borrow().iter() { { if let Some(esm_files) = ext.get_esm_sources() { if let Some(entry_point) = ext.get_esm_entry_point() { @@ -863,23 +847,15 @@ impl JsRuntime { /// Initializes ops of provided Extensions fn init_extension_ops(&mut self) -> Result<(), Error> { let op_state = self.op_state(); - // Take extensions to avoid double-borrow - { - let mut extensions: Vec = std::mem::take(&mut self.extensions); + // Setup state + for e in self.extensions.borrow_mut().iter_mut() { + // ops are already registered during in bindings::initialize_context(); + e.init_state(&mut op_state.borrow_mut()); - // Setup state - for e in extensions.iter_mut() { - // ops are already registered during in bindings::initialize_context(); - e.init_state(&mut op_state.borrow_mut()); - - // Setup event-loop middleware - if let Some(middleware) = e.init_event_loop_middleware() { - self.event_loop_middlewares.push(middleware); - } + // Setup event-loop middleware + if let Some(middleware) = e.init_event_loop_middleware() { + self.event_loop_middlewares.push(middleware); } - - // Restore extensions - self.extensions = extensions; } Ok(()) } diff --git a/runtime/examples/hello_runtime.js b/runtime/examples/hello_runtime.js index 066fa21d66..5b079d8d89 100644 --- a/runtime/examples/hello_runtime.js +++ b/runtime/examples/hello_runtime.js @@ -1,3 +1,4 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. console.log("Hello world!"); console.log(Deno); +Extension.hello(); diff --git a/runtime/examples/hello_runtime.rs b/runtime/examples/hello_runtime.rs index 2e930a03f2..157a200f4c 100644 --- a/runtime/examples/hello_runtime.rs +++ b/runtime/examples/hello_runtime.rs @@ -1,72 +1,31 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::FsModuleLoader; -use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel; -use deno_runtime::deno_web::BlobStore; use deno_runtime::permissions::PermissionsContainer; use deno_runtime::worker::MainWorker; use deno_runtime::worker::WorkerOptions; -use deno_runtime::BootstrapOptions; use std::path::Path; use std::rc::Rc; -use std::sync::Arc; -fn get_error_class_name(e: &AnyError) -> &'static str { - deno_runtime::errors::get_error_class_name(e).unwrap_or("Error") -} +deno_core::extension!(hello_runtime, esm = ["hello_runtime_bootstrap.js"]); #[tokio::main] async fn main() -> Result<(), AnyError> { - let module_loader = Rc::new(FsModuleLoader); - let create_web_worker_cb = Arc::new(|_| { - todo!("Web workers are not supported in the example"); - }); - let web_worker_event_cb = Arc::new(|_| { - todo!("Web workers are not supported in the example"); - }); - - let options = WorkerOptions { - bootstrap: BootstrapOptions::default(), - extensions: vec![], - startup_snapshot: None, - unsafely_ignore_certificate_errors: None, - root_cert_store_provider: None, - seed: None, - source_map_getter: None, - format_js_error_fn: None, - web_worker_preload_module_cb: web_worker_event_cb.clone(), - web_worker_pre_execute_module_cb: web_worker_event_cb, - create_web_worker_cb, - maybe_inspector_server: None, - should_break_on_first_statement: false, - should_wait_for_inspector_session: false, - module_loader, - node_fs: None, - npm_resolver: None, - get_error_class_fn: Some(&get_error_class_name), - cache_storage_dir: None, - origin_storage_dir: None, - blob_store: BlobStore::default(), - broadcast_channel: InMemoryBroadcastChannel::default(), - shared_array_buffer_store: None, - compiled_wasm_module_store: None, - stdio: Default::default(), - }; - let js_path = Path::new(env!("CARGO_MANIFEST_DIR")).join("examples/hello_runtime.js"); let main_module = deno_core::resolve_path( &js_path.to_string_lossy(), - &std::env::current_dir().context("Unable to get CWD")?, + &std::env::current_dir()?, )?; - let permissions = PermissionsContainer::allow_all(); - let mut worker = MainWorker::bootstrap_from_options( main_module.clone(), - permissions, - options, + PermissionsContainer::allow_all(), + WorkerOptions { + module_loader: Rc::new(FsModuleLoader), + extensions: vec![hello_runtime::init_ops_and_esm()], + ..Default::default() + }, ); worker.execute_main_module(&main_module).await?; worker.run_event_loop(false).await?; diff --git a/runtime/examples/hello_runtime_bootstrap.js b/runtime/examples/hello_runtime_bootstrap.js new file mode 100644 index 0000000000..759dde9395 --- /dev/null +++ b/runtime/examples/hello_runtime_bootstrap.js @@ -0,0 +1,5 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +function hello() { + console.log("Hello from extension!"); +} +globalThis.Extension = { hello }; From b8d0e616eaedb81a759c41d5009921bcc6b0b0bf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Thu, 4 May 2023 02:48:23 +0200 Subject: [PATCH 118/320] fix(npm): canonicalize search directory when looking for package.json (#18981) Co-authored-by: David Sherret --- ext/node/resolution.rs | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/ext/node/resolution.rs b/ext/node/resolution.rs index 0c90fffb6e..046c774fa2 100644 --- a/ext/node/resolution.rs +++ b/ext/node/resolution.rs @@ -1078,14 +1078,17 @@ impl NodeResolver { url: &ModuleSpecifier, ) -> Result { let file_path = url.to_file_path().unwrap(); - let mut current_dir = file_path.parent().unwrap(); + let current_dir = deno_core::strip_unc_prefix( + self.fs.canonicalize(file_path.parent().unwrap())?, + ); + let mut current_dir = current_dir.as_path(); let package_json_path = current_dir.join("package.json"); if self.fs.exists(&package_json_path) { return Ok(package_json_path); } let root_pkg_folder = self .npm_resolver - .resolve_package_folder_from_path(&url.to_file_path().unwrap())?; + .resolve_package_folder_from_path(current_dir)?; while current_dir.starts_with(&root_pkg_folder) { current_dir = current_dir.parent().unwrap(); let package_json_path = current_dir.join("package.json"); From 95e209a0e4ef0e4a0b886583057b84ef204261d3 Mon Sep 17 00:00:00 2001 From: Luca Casonato Date: Thu, 4 May 2023 14:36:38 +0200 Subject: [PATCH 119/320] refactor(ext/node): remove NodeEnv trait (#18986) --- cli/build.rs | 2 +- ext/node/lib.rs | 26 +++++++---------- ext/node/ops/require.rs | 65 ++++++++++++++++++++--------------------- runtime/build.rs | 8 +---- runtime/lib.rs | 5 ---- runtime/web_worker.rs | 2 +- runtime/worker.rs | 2 +- 7 files changed, 47 insertions(+), 63 deletions(-) diff --git a/cli/build.rs b/cli/build.rs index 7a3252e20b..21f8c229a1 100644 --- a/cli/build.rs +++ b/cli/build.rs @@ -362,7 +362,7 @@ fn create_cli_snapshot(snapshot_path: PathBuf) { deno_http::deno_http::init_ops(), deno_io::deno_io::init_ops(Default::default()), deno_fs::deno_fs::init_ops::<_, PermissionsContainer>(false, StdFs), - deno_node::deno_node::init_ops::( + deno_node::deno_node::init_ops::( None, Some(Arc::new(deno_node::RealFs)), ), diff --git a/ext/node/lib.rs b/ext/node/lib.rs index b5db83297e..128f3a2fea 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -39,10 +39,6 @@ pub use resolution::NodeResolution; pub use resolution::NodeResolutionMode; pub use resolution::NodeResolver; -pub trait NodeEnv { - type P: NodePermissions; -} - pub trait NodePermissions { fn check_read(&self, path: &Path) -> Result<(), AnyError>; } @@ -192,7 +188,7 @@ fn op_node_build_os() -> String { deno_core::extension!(deno_node, deps = [ deno_io, deno_fs ], - parameters = [Env: NodeEnv], + parameters = [P: NodePermissions], ops = [ ops::crypto::op_node_create_decipheriv, ops::crypto::op_node_cipheriv_encrypt, @@ -271,26 +267,26 @@ deno_core::extension!(deno_node, ops::zlib::op_zlib_reset, op_node_build_os, ops::require::op_require_init_paths, - ops::require::op_require_node_module_paths, + ops::require::op_require_node_module_paths

    , ops::require::op_require_proxy_path, ops::require::op_require_is_deno_dir_package, ops::require::op_require_resolve_deno_dir, ops::require::op_require_is_request_relative, ops::require::op_require_resolve_lookup_paths, - ops::require::op_require_try_self_parent_path, - ops::require::op_require_try_self, - ops::require::op_require_real_path, + ops::require::op_require_try_self_parent_path

    , + ops::require::op_require_try_self

    , + ops::require::op_require_real_path

    , ops::require::op_require_path_is_absolute, ops::require::op_require_path_dirname, - ops::require::op_require_stat, + ops::require::op_require_stat

    , ops::require::op_require_path_resolve, ops::require::op_require_path_basename, - ops::require::op_require_read_file, + ops::require::op_require_read_file

    , ops::require::op_require_as_file_path, - ops::require::op_require_resolve_exports, - ops::require::op_require_read_closest_package_json, - ops::require::op_require_read_package_scope, - ops::require::op_require_package_imports_resolve, + ops::require::op_require_resolve_exports

    , + ops::require::op_require_read_closest_package_json

    , + ops::require::op_require_read_package_scope

    , + ops::require::op_require_package_imports_resolve

    , ops::require::op_require_break_on_next_statement, ], esm_entry_point = "ext:deno_node/02_init.js", diff --git a/ext/node/ops/require.rs b/ext/node/ops/require.rs index 1c8647bab7..4a2b97187a 100644 --- a/ext/node/ops/require.rs +++ b/ext/node/ops/require.rs @@ -16,7 +16,6 @@ use std::rc::Rc; use std::sync::Arc; use crate::resolution; -use crate::NodeEnv; use crate::NodeFs; use crate::NodeModuleKind; use crate::NodePermissions; @@ -88,12 +87,12 @@ pub fn op_require_init_paths() -> Vec { } #[op] -pub fn op_require_node_module_paths( +pub fn op_require_node_module_paths

    ( state: &mut OpState, from: String, ) -> Result, AnyError> where - Env: NodeEnv + 'static, + P: NodePermissions + 'static, { let fs = state.borrow::>(); // Guarantee that "from" is absolute. @@ -105,7 +104,7 @@ where .to_file_path() .unwrap(); - ensure_read_permission::(state, &from)?; + ensure_read_permission::

    (state, &from)?; if cfg!(windows) { // return root node_modules when path is 'D:\\'. @@ -255,15 +254,15 @@ fn op_require_path_is_absolute(p: String) -> bool { } #[op] -fn op_require_stat( +fn op_require_stat

    ( state: &mut OpState, path: String, ) -> Result where - Env: NodeEnv + 'static, + P: NodePermissions + 'static, { let path = PathBuf::from(path); - ensure_read_permission::(state, &path)?; + ensure_read_permission::

    (state, &path)?; let fs = state.borrow::>(); if let Ok(metadata) = fs.metadata(&path) { if metadata.is_file { @@ -277,15 +276,15 @@ where } #[op] -fn op_require_real_path( +fn op_require_real_path

    ( state: &mut OpState, request: String, ) -> Result where - Env: NodeEnv + 'static, + P: NodePermissions + 'static, { let path = PathBuf::from(request); - ensure_read_permission::(state, &path)?; + ensure_read_permission::

    (state, &path)?; let fs = state.borrow::>(); let canonicalized_path = deno_core::strip_unc_prefix(fs.canonicalize(&path)?); Ok(canonicalized_path.to_string_lossy().to_string()) @@ -328,14 +327,14 @@ fn op_require_path_basename(request: String) -> Result { } #[op] -fn op_require_try_self_parent_path( +fn op_require_try_self_parent_path

    ( state: &mut OpState, has_parent: bool, maybe_parent_filename: Option, maybe_parent_id: Option, ) -> Result, AnyError> where - Env: NodeEnv + 'static, + P: NodePermissions + 'static, { if !has_parent { return Ok(None); @@ -349,7 +348,7 @@ where if parent_id == "" || parent_id == "internal/preload" { let fs = state.borrow::>(); if let Ok(cwd) = fs.current_dir() { - ensure_read_permission::(state, &cwd)?; + ensure_read_permission::

    (state, &cwd)?; return Ok(Some(cwd.to_string_lossy().to_string())); } } @@ -358,20 +357,20 @@ where } #[op] -fn op_require_try_self( +fn op_require_try_self

    ( state: &mut OpState, parent_path: Option, request: String, ) -> Result, AnyError> where - Env: NodeEnv + 'static, + P: NodePermissions + 'static, { if parent_path.is_none() { return Ok(None); } let node_resolver = state.borrow::>(); - let permissions = state.borrow::(); + let permissions = state.borrow::

    (); let pkg = node_resolver .get_package_scope_config( &Url::from_file_path(parent_path.unwrap()).unwrap(), @@ -421,15 +420,15 @@ where } #[op] -fn op_require_read_file( +fn op_require_read_file

    ( state: &mut OpState, file_path: String, ) -> Result where - Env: NodeEnv + 'static, + P: NodePermissions + 'static, { let file_path = PathBuf::from(file_path); - ensure_read_permission::(state, &file_path)?; + ensure_read_permission::

    (state, &file_path)?; let fs = state.borrow::>(); Ok(fs.read_to_string(&file_path)?) } @@ -446,7 +445,7 @@ pub fn op_require_as_file_path(file_or_url: String) -> String { } #[op] -fn op_require_resolve_exports( +fn op_require_resolve_exports

    ( state: &mut OpState, uses_local_node_modules_dir: bool, modules_path: String, @@ -456,12 +455,12 @@ fn op_require_resolve_exports( parent_path: String, ) -> Result, AnyError> where - Env: NodeEnv + 'static, + P: NodePermissions + 'static, { let fs = state.borrow::>(); let npm_resolver = state.borrow::>(); let node_resolver = state.borrow::>(); - let permissions = state.borrow::(); + let permissions = state.borrow::

    (); let pkg_path = if npm_resolver .in_npm_package_at_path(&PathBuf::from(&modules_path)) @@ -502,19 +501,19 @@ where } #[op] -fn op_require_read_closest_package_json( +fn op_require_read_closest_package_json

    ( state: &mut OpState, filename: String, ) -> Result where - Env: NodeEnv + 'static, + P: NodePermissions + 'static, { - ensure_read_permission::( + ensure_read_permission::

    ( state, PathBuf::from(&filename).parent().unwrap(), )?; let node_resolver = state.borrow::>(); - let permissions = state.borrow::(); + let permissions = state.borrow::

    (); node_resolver.get_closest_package_json( &Url::from_file_path(filename).unwrap(), permissions, @@ -522,15 +521,15 @@ where } #[op] -fn op_require_read_package_scope( +fn op_require_read_package_scope

    ( state: &mut OpState, package_json_path: String, ) -> Option where - Env: NodeEnv + 'static, + P: NodePermissions + 'static, { let node_resolver = state.borrow::>(); - let permissions = state.borrow::(); + let permissions = state.borrow::

    (); let package_json_path = PathBuf::from(package_json_path); node_resolver .load_package_json(permissions, package_json_path) @@ -538,18 +537,18 @@ where } #[op] -fn op_require_package_imports_resolve( +fn op_require_package_imports_resolve

    ( state: &mut OpState, parent_filename: String, request: String, ) -> Result, AnyError> where - Env: NodeEnv + 'static, + P: NodePermissions + 'static, { let parent_path = PathBuf::from(&parent_filename); - ensure_read_permission::(state, &parent_path)?; + ensure_read_permission::

    (state, &parent_path)?; let node_resolver = state.borrow::>(); - let permissions = state.borrow::(); + let permissions = state.borrow::

    (); let pkg = node_resolver .load_package_json(permissions, parent_path.join("package.json"))?; diff --git a/runtime/build.rs b/runtime/build.rs index 2f3b125959..bba2eae551 100644 --- a/runtime/build.rs +++ b/runtime/build.rs @@ -215,12 +215,6 @@ mod startup_snapshot { } } - struct SnapshotNodeEnv; - - impl deno_node::NodeEnv for SnapshotNodeEnv { - type P = Permissions; - } - deno_core::extension!(runtime, deps = [ deno_webidl, @@ -320,7 +314,7 @@ mod startup_snapshot { runtime::init_ops_and_esm(), // FIXME(bartlomieju): these extensions are specified last, because they // depend on `runtime`, even though it should be other way around - deno_node::deno_node::init_ops_and_esm::(None, None), + deno_node::deno_node::init_ops_and_esm::(None, None), #[cfg(not(feature = "snapshot_from_snapshot"))] runtime_main::init_ops_and_esm(), ]; diff --git a/runtime/lib.rs b/runtime/lib.rs index 878171913f..50822d373e 100644 --- a/runtime/lib.rs +++ b/runtime/lib.rs @@ -35,8 +35,3 @@ pub mod worker; mod worker_bootstrap; pub use worker_bootstrap::BootstrapOptions; - -pub struct RuntimeNodeEnv; -impl deno_node::NodeEnv for RuntimeNodeEnv { - type P = permissions::PermissionsContainer; -} diff --git a/runtime/web_worker.rs b/runtime/web_worker.rs index b688aae8b3..1b3dd28096 100644 --- a/runtime/web_worker.rs +++ b/runtime/web_worker.rs @@ -442,7 +442,7 @@ impl WebWorker { deno_http::deno_http::init_ops(), deno_io::deno_io::init_ops(Some(options.stdio)), deno_fs::deno_fs::init_ops::<_, PermissionsContainer>(unstable, StdFs), - deno_node::deno_node::init_ops::( + deno_node::deno_node::init_ops::( options.npm_resolver, options.node_fs, ), diff --git a/runtime/worker.rs b/runtime/worker.rs index 0d68a4b51e..ac67011f0d 100644 --- a/runtime/worker.rs +++ b/runtime/worker.rs @@ -267,7 +267,7 @@ impl MainWorker { deno_http::deno_http::init_ops(), deno_io::deno_io::init_ops(Some(options.stdio)), deno_fs::deno_fs::init_ops::<_, PermissionsContainer>(unstable, StdFs), - deno_node::deno_node::init_ops::( + deno_node::deno_node::init_ops::( options.npm_resolver, options.node_fs, ), From 8382adaf7dcc9c813a51552cca196121c02a650e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Thu, 4 May 2023 14:37:45 +0200 Subject: [PATCH 120/320] docs: update Deno.Writer docstring (#18987) Closes https://github.com/denoland/deno/issues/18985#issuecomment-1534493623 --- cli/tsc/dts/lib.deno.ns.d.ts | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/cli/tsc/dts/lib.deno.ns.d.ts b/cli/tsc/dts/lib.deno.ns.d.ts index 4b8dbb7fdc..395f8c667d 100644 --- a/cli/tsc/dts/lib.deno.ns.d.ts +++ b/cli/tsc/dts/lib.deno.ns.d.ts @@ -1488,6 +1488,12 @@ declare namespace Deno { * would resolve to `n` < `p.byteLength`. `write()` must not modify the * slice data, even temporarily. * + * This function is one of the lowest + * level APIs and most users should not work with this directly, but rather use + * [`writeAll()`](https://deno.land/std/streams/write_all.ts?s=writeAll) from + * [`std/streams/write_all.ts`](https://deno.land/std/streams/write_all.ts) + * instead. + * * Implementations should not retain a reference to `p`. */ write(p: Uint8Array): Promise; From 4b645676d62fd595ecac47e24be1b83a3ba636c6 Mon Sep 17 00:00:00 2001 From: denobot <33910674+denobot@users.noreply.github.com> Date: Thu, 4 May 2023 19:19:35 +0200 Subject: [PATCH 121/320] chore: forward v1.33.2 release commit to main (#18990) **THIS PR HAS GIT CONFLICTS THAT MUST BE RESOLVED** This is the release commit being forwarded back to main for 1.33.2 Please ensure: - [x] Everything looks ok in the PR - [x] The release has been published To make edits to this PR: ```shell git fetch upstream forward_v1.33.2 && git checkout -b forward_v1.33.2 upstream/forward_v1.33.2 ``` Don't need this PR? Close it. cc @levex Co-authored-by: levex Co-authored-by: Levente Kurusa --- Cargo.lock | 52 ++++++++++++++++---------------- Cargo.toml | 50 +++++++++++++++--------------- Releases.md | 25 +++++++++++++++ bench_util/Cargo.toml | 2 +- cli/Cargo.toml | 2 +- cli/deno_std.rs | 2 +- cli/napi/sym/Cargo.toml | 2 +- core/Cargo.toml | 2 +- ext/broadcast_channel/Cargo.toml | 2 +- ext/cache/Cargo.toml | 2 +- ext/console/Cargo.toml | 2 +- ext/crypto/Cargo.toml | 2 +- ext/fetch/Cargo.toml | 2 +- ext/ffi/Cargo.toml | 2 +- ext/fs/Cargo.toml | 2 +- ext/http/Cargo.toml | 2 +- ext/io/Cargo.toml | 2 +- ext/kv/Cargo.toml | 2 +- ext/napi/Cargo.toml | 2 +- ext/net/Cargo.toml | 2 +- ext/node/Cargo.toml | 2 +- ext/tls/Cargo.toml | 2 +- ext/url/Cargo.toml | 2 +- ext/web/Cargo.toml | 2 +- ext/webidl/Cargo.toml | 2 +- ext/websocket/Cargo.toml | 2 +- ext/webstorage/Cargo.toml | 2 +- ops/Cargo.toml | 2 +- runtime/Cargo.toml | 2 +- serde_v8/Cargo.toml | 2 +- 30 files changed, 103 insertions(+), 78 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8c4f0f6ac5..73b0f3f0de 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -696,7 +696,7 @@ checksum = "8d7439c3735f405729d52c3fbbe4de140eaf938a1fe47d227c27f8254d4302a5" [[package]] name = "deno" -version = "1.33.1" +version = "1.33.2" dependencies = [ "async-trait", "atty", @@ -819,7 +819,7 @@ dependencies = [ [[package]] name = "deno_bench_util" -version = "0.96.0" +version = "0.97.0" dependencies = [ "bencher", "deno_core", @@ -829,7 +829,7 @@ dependencies = [ [[package]] name = "deno_broadcast_channel" -version = "0.96.0" +version = "0.97.0" dependencies = [ "async-trait", "deno_core", @@ -839,7 +839,7 @@ dependencies = [ [[package]] name = "deno_cache" -version = "0.34.0" +version = "0.35.0" dependencies = [ "async-trait", "deno_core", @@ -851,14 +851,14 @@ dependencies = [ [[package]] name = "deno_console" -version = "0.102.0" +version = "0.103.0" dependencies = [ "deno_core", ] [[package]] name = "deno_core" -version = "0.184.0" +version = "0.185.0" dependencies = [ "anyhow", "bytes", @@ -884,7 +884,7 @@ dependencies = [ [[package]] name = "deno_crypto" -version = "0.116.0" +version = "0.117.0" dependencies = [ "aes", "aes-gcm", @@ -952,7 +952,7 @@ dependencies = [ [[package]] name = "deno_fetch" -version = "0.126.0" +version = "0.127.0" dependencies = [ "bytes", "data-url", @@ -969,7 +969,7 @@ dependencies = [ [[package]] name = "deno_ffi" -version = "0.89.0" +version = "0.90.0" dependencies = [ "deno_core", "dlopen", @@ -984,7 +984,7 @@ dependencies = [ [[package]] name = "deno_fs" -version = "0.12.0" +version = "0.13.0" dependencies = [ "async-trait", "deno_core", @@ -1024,7 +1024,7 @@ dependencies = [ [[package]] name = "deno_http" -version = "0.97.0" +version = "0.98.0" dependencies = [ "async-compression", "base64 0.13.1", @@ -1057,7 +1057,7 @@ dependencies = [ [[package]] name = "deno_io" -version = "0.12.0" +version = "0.13.0" dependencies = [ "deno_core", "nix", @@ -1068,7 +1068,7 @@ dependencies = [ [[package]] name = "deno_kv" -version = "0.10.0" +version = "0.11.0" dependencies = [ "anyhow", "async-trait", @@ -1122,7 +1122,7 @@ dependencies = [ [[package]] name = "deno_napi" -version = "0.32.0" +version = "0.33.0" dependencies = [ "deno_core", "libloading", @@ -1130,7 +1130,7 @@ dependencies = [ [[package]] name = "deno_net" -version = "0.94.0" +version = "0.95.0" dependencies = [ "deno_core", "deno_tls", @@ -1145,7 +1145,7 @@ dependencies = [ [[package]] name = "deno_node" -version = "0.39.0" +version = "0.40.0" dependencies = [ "aes", "cbc", @@ -1214,7 +1214,7 @@ dependencies = [ [[package]] name = "deno_ops" -version = "0.62.0" +version = "0.63.0" dependencies = [ "lazy-regex", "once_cell", @@ -1232,7 +1232,7 @@ dependencies = [ [[package]] name = "deno_runtime" -version = "0.110.0" +version = "0.111.0" dependencies = [ "atty", "console_static_text", @@ -1313,7 +1313,7 @@ dependencies = [ [[package]] name = "deno_tls" -version = "0.89.0" +version = "0.90.0" dependencies = [ "deno_core", "once_cell", @@ -1327,7 +1327,7 @@ dependencies = [ [[package]] name = "deno_url" -version = "0.102.0" +version = "0.103.0" dependencies = [ "deno_bench_util", "deno_core", @@ -1339,7 +1339,7 @@ dependencies = [ [[package]] name = "deno_web" -version = "0.133.0" +version = "0.134.0" dependencies = [ "async-trait", "base64-simd", @@ -1357,7 +1357,7 @@ dependencies = [ [[package]] name = "deno_webidl" -version = "0.102.0" +version = "0.103.0" dependencies = [ "deno_bench_util", "deno_core", @@ -1365,7 +1365,7 @@ dependencies = [ [[package]] name = "deno_websocket" -version = "0.107.0" +version = "0.108.0" dependencies = [ "bytes", "deno_core", @@ -1381,7 +1381,7 @@ dependencies = [ [[package]] name = "deno_webstorage" -version = "0.97.0" +version = "0.98.0" dependencies = [ "deno_core", "deno_web", @@ -2983,7 +2983,7 @@ dependencies = [ [[package]] name = "napi_sym" -version = "0.32.0" +version = "0.33.0" dependencies = [ "proc-macro2 1.0.56", "quote 1.0.26", @@ -4252,7 +4252,7 @@ dependencies = [ [[package]] name = "serde_v8" -version = "0.95.0" +version = "0.96.0" dependencies = [ "bencher", "bytes", diff --git a/Cargo.toml b/Cargo.toml index 911f86bd2a..f7f6d553b6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -44,12 +44,12 @@ repository = "https://github.com/denoland/deno" v8 = { version = "0.71.0", default-features = false } deno_ast = { version = "0.26.0", features = ["transpiling"] } -deno_core = { version = "0.184.0", path = "./core" } -deno_ops = { version = "0.62.0", path = "./ops" } -serde_v8 = { version = "0.95.0", path = "./serde_v8" } -deno_runtime = { version = "0.110.0", path = "./runtime" } -napi_sym = { version = "0.32.0", path = "./cli/napi/sym" } -deno_bench_util = { version = "0.96.0", path = "./bench_util" } +deno_core = { version = "0.185.0", path = "./core" } +deno_ops = { version = "0.63.0", path = "./ops" } +serde_v8 = { version = "0.96.0", path = "./serde_v8" } +deno_runtime = { version = "0.111.0", path = "./runtime" } +napi_sym = { version = "0.33.0", path = "./cli/napi/sym" } +deno_bench_util = { version = "0.97.0", path = "./bench_util" } test_util = { path = "./test_util" } deno_lockfile = "0.13.0" deno_media_type = { version = "0.1.0", features = ["module_specifier"] } @@ -57,25 +57,25 @@ deno_npm = "0.3.0" deno_semver = "0.2.1" # exts -deno_broadcast_channel = { version = "0.96.0", path = "./ext/broadcast_channel" } -deno_cache = { version = "0.34.0", path = "./ext/cache" } -deno_console = { version = "0.102.0", path = "./ext/console" } -deno_crypto = { version = "0.116.0", path = "./ext/crypto" } -deno_fetch = { version = "0.126.0", path = "./ext/fetch" } -deno_ffi = { version = "0.89.0", path = "./ext/ffi" } -deno_fs = { version = "0.12.0", path = "./ext/fs" } -deno_http = { version = "0.97.0", path = "./ext/http" } -deno_io = { version = "0.12.0", path = "./ext/io" } -deno_net = { version = "0.94.0", path = "./ext/net" } -deno_node = { version = "0.39.0", path = "./ext/node" } -deno_kv = { version = "0.10.0", path = "./ext/kv" } -deno_tls = { version = "0.89.0", path = "./ext/tls" } -deno_url = { version = "0.102.0", path = "./ext/url" } -deno_web = { version = "0.133.0", path = "./ext/web" } -deno_webidl = { version = "0.102.0", path = "./ext/webidl" } -deno_websocket = { version = "0.107.0", path = "./ext/websocket" } -deno_webstorage = { version = "0.97.0", path = "./ext/webstorage" } -deno_napi = { version = "0.32.0", path = "./ext/napi" } +deno_broadcast_channel = { version = "0.97.0", path = "./ext/broadcast_channel" } +deno_cache = { version = "0.35.0", path = "./ext/cache" } +deno_console = { version = "0.103.0", path = "./ext/console" } +deno_crypto = { version = "0.117.0", path = "./ext/crypto" } +deno_fetch = { version = "0.127.0", path = "./ext/fetch" } +deno_ffi = { version = "0.90.0", path = "./ext/ffi" } +deno_fs = { version = "0.13.0", path = "./ext/fs" } +deno_http = { version = "0.98.0", path = "./ext/http" } +deno_io = { version = "0.13.0", path = "./ext/io" } +deno_net = { version = "0.95.0", path = "./ext/net" } +deno_node = { version = "0.40.0", path = "./ext/node" } +deno_kv = { version = "0.11.0", path = "./ext/kv" } +deno_tls = { version = "0.90.0", path = "./ext/tls" } +deno_url = { version = "0.103.0", path = "./ext/url" } +deno_web = { version = "0.134.0", path = "./ext/web" } +deno_webidl = { version = "0.103.0", path = "./ext/webidl" } +deno_websocket = { version = "0.108.0", path = "./ext/websocket" } +deno_webstorage = { version = "0.98.0", path = "./ext/webstorage" } +deno_napi = { version = "0.33.0", path = "./ext/napi" } aes = "=0.8.2" anyhow = "1.0.57" diff --git a/Releases.md b/Releases.md index ce28e5d6af..666e7b2a15 100644 --- a/Releases.md +++ b/Releases.md @@ -6,6 +6,31 @@ https://github.com/denoland/deno/releases We also have one-line install commands at: https://github.com/denoland/deno_install +### 1.33.2 / 2023.05.04 + +- fix(core): Use primordials for methods (#18839) +- fix(core): allow esm extensions not included in snapshot (#18980) +- fix(core): rebuild when JS sources for snapshotting change (#18976) +- fix(ext/io) several sync fs fixes (#18886) +- fix(ext/kv): KvU64#valueOf and KvU64 inspect (#18656) +- fix(ext/kv): stricter structured clone serializer (#18914) +- fix(ext/kv): throw on the Kv constructor (#18978) +- fix(ext/node): add missing `release` property to node's `process` (#18923) +- fix(ext/url): throw `TypeError` for empty argument (#18896) +- fix(ext/websocket): update fastwebsockets to 0.3.1 (#18916) +- fix(fmt/json): support formatting number with exponent and no sign (#18894) +- fix(node/http): Request.setTimeout(0) should clear (#18949) +- fix(npm): canonicalize filename before returning (#18948) +- fix(npm): canonicalize search directory when looking for package.json (#18981) +- fix(test): disable preventDefault() for beforeunload event (#18911) +- perf(core): async op pseudo-codegen and performance work (#18887) +- perf(core): use jemalloc for V8 array buffer allocator (#18875) +- perf(ext/web): fast path for ws events (#18905) +- perf(ext/websocket): use internal dispatch for msg events (#18904) +- perf: lazily create RootCertStore (#18938) +- perf: lazily retrieve ppid (#18940) +- perf: use jemalloc as global allocator (#18957) + ### 1.33.1 / 2023.04.28 - fix(ext/fetch): subview Uint8Array in Req/Resp (#18890) diff --git a/bench_util/Cargo.toml b/bench_util/Cargo.toml index 4595c3d8ad..aac3103064 100644 --- a/bench_util/Cargo.toml +++ b/bench_util/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_bench_util" -version = "0.96.0" +version = "0.97.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 63842a6a3f..8067d147d5 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno" -version = "1.33.1" +version = "1.33.2" authors.workspace = true default-run = "deno" edition.workspace = true diff --git a/cli/deno_std.rs b/cli/deno_std.rs index 826d73e7a8..8f11e9624d 100644 --- a/cli/deno_std.rs +++ b/cli/deno_std.rs @@ -2,4 +2,4 @@ // WARNING: Ensure this is the only deno_std version reference as this // is automatically updated by the version bump workflow. -pub const CURRENT_STD_URL_STR: &str = "https://deno.land/std@0.185.0/"; +pub const CURRENT_STD_URL_STR: &str = "https://deno.land/std@0.186.0/"; diff --git a/cli/napi/sym/Cargo.toml b/cli/napi/sym/Cargo.toml index 2ecdbf2b45..ed00bd4a8b 100644 --- a/cli/napi/sym/Cargo.toml +++ b/cli/napi/sym/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "napi_sym" -version = "0.32.0" +version = "0.33.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/core/Cargo.toml b/core/Cargo.toml index e2ffca6579..77a1ca1a21 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_core" -version = "0.184.0" +version = "0.185.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/broadcast_channel/Cargo.toml b/ext/broadcast_channel/Cargo.toml index dcff0ade30..199f816881 100644 --- a/ext/broadcast_channel/Cargo.toml +++ b/ext/broadcast_channel/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_broadcast_channel" -version = "0.96.0" +version = "0.97.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/cache/Cargo.toml b/ext/cache/Cargo.toml index ddbec38346..6b3385966c 100644 --- a/ext/cache/Cargo.toml +++ b/ext/cache/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_cache" -version = "0.34.0" +version = "0.35.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/console/Cargo.toml b/ext/console/Cargo.toml index c38b537f4b..b2340d10d3 100644 --- a/ext/console/Cargo.toml +++ b/ext/console/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_console" -version = "0.102.0" +version = "0.103.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/crypto/Cargo.toml b/ext/crypto/Cargo.toml index d6642733ba..71ed46976c 100644 --- a/ext/crypto/Cargo.toml +++ b/ext/crypto/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_crypto" -version = "0.116.0" +version = "0.117.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/fetch/Cargo.toml b/ext/fetch/Cargo.toml index b46159b7ed..a215febac4 100644 --- a/ext/fetch/Cargo.toml +++ b/ext/fetch/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fetch" -version = "0.126.0" +version = "0.127.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/ffi/Cargo.toml b/ext/ffi/Cargo.toml index 291935e1ca..69e3a5e42c 100644 --- a/ext/ffi/Cargo.toml +++ b/ext/ffi/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_ffi" -version = "0.89.0" +version = "0.90.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/fs/Cargo.toml b/ext/fs/Cargo.toml index 5675a483c3..10c71a5438 100644 --- a/ext/fs/Cargo.toml +++ b/ext/fs/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fs" -version = "0.12.0" +version = "0.13.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/http/Cargo.toml b/ext/http/Cargo.toml index 73f9f6ef28..9691879ad6 100644 --- a/ext/http/Cargo.toml +++ b/ext/http/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_http" -version = "0.97.0" +version = "0.98.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/io/Cargo.toml b/ext/io/Cargo.toml index e11dc833c4..55b2ccab66 100644 --- a/ext/io/Cargo.toml +++ b/ext/io/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_io" -version = "0.12.0" +version = "0.13.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/kv/Cargo.toml b/ext/kv/Cargo.toml index 3a060f9c4f..fd36ee536d 100644 --- a/ext/kv/Cargo.toml +++ b/ext/kv/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_kv" -version = "0.10.0" +version = "0.11.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/napi/Cargo.toml b/ext/napi/Cargo.toml index 331876615e..c427be25c0 100644 --- a/ext/napi/Cargo.toml +++ b/ext/napi/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_napi" -version = "0.32.0" +version = "0.33.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/net/Cargo.toml b/ext/net/Cargo.toml index 5d185e2d2d..87c3cba567 100644 --- a/ext/net/Cargo.toml +++ b/ext/net/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_net" -version = "0.94.0" +version = "0.95.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/node/Cargo.toml b/ext/node/Cargo.toml index 23d4ba1723..38c8474dce 100644 --- a/ext/node/Cargo.toml +++ b/ext/node/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_node" -version = "0.39.0" +version = "0.40.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/tls/Cargo.toml b/ext/tls/Cargo.toml index 13808a22a0..49fb2aae3f 100644 --- a/ext/tls/Cargo.toml +++ b/ext/tls/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_tls" -version = "0.89.0" +version = "0.90.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/url/Cargo.toml b/ext/url/Cargo.toml index 43c6748e66..f5e8815077 100644 --- a/ext/url/Cargo.toml +++ b/ext/url/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_url" -version = "0.102.0" +version = "0.103.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/web/Cargo.toml b/ext/web/Cargo.toml index 6f1185b5b3..ba58f13c54 100644 --- a/ext/web/Cargo.toml +++ b/ext/web/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_web" -version = "0.133.0" +version = "0.134.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/webidl/Cargo.toml b/ext/webidl/Cargo.toml index 754db9ee18..bda6aeeb1a 100644 --- a/ext/webidl/Cargo.toml +++ b/ext/webidl/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_webidl" -version = "0.102.0" +version = "0.103.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/websocket/Cargo.toml b/ext/websocket/Cargo.toml index 0cc549dad9..ce6891f0f0 100644 --- a/ext/websocket/Cargo.toml +++ b/ext/websocket/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_websocket" -version = "0.107.0" +version = "0.108.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/webstorage/Cargo.toml b/ext/webstorage/Cargo.toml index 10561b20ad..3bffacab54 100644 --- a/ext/webstorage/Cargo.toml +++ b/ext/webstorage/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_webstorage" -version = "0.97.0" +version = "0.98.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ops/Cargo.toml b/ops/Cargo.toml index 2d091b2ee3..efeefbcd0a 100644 --- a/ops/Cargo.toml +++ b/ops/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_ops" -version = "0.62.0" +version = "0.63.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index 88813e4872..8618714dca 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_runtime" -version = "0.110.0" +version = "0.111.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/serde_v8/Cargo.toml b/serde_v8/Cargo.toml index ad384347dc..60ffc40e66 100644 --- a/serde_v8/Cargo.toml +++ b/serde_v8/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "serde_v8" -version = "0.95.0" +version = "0.96.0" authors.workspace = true edition.workspace = true license.workspace = true From 5270c43e412cc636cd9923182169d166d181f78a Mon Sep 17 00:00:00 2001 From: David Sherret Date: Thu, 4 May 2023 14:28:42 -0400 Subject: [PATCH 122/320] refactor(ext/fs): boxed deno_fs::FileSystem (#18945) 1. Boxed `File` and `FileSystem` to allow more easily passing this through the CLI code (as shown within this pr). 2. `StdFileResource` is now `FileResource`. `FileResource` now contains an `Rc`. --- Cargo.lock | 3 + Cargo.toml | 1 + cli/build.rs | 9 +- cli/factory.rs | 4 + cli/standalone/mod.rs | 2 + cli/worker.rs | 36 +- core/resources.rs | 7 + ext/fs/Cargo.toml | 4 +- ext/fs/interface.rs | 164 ++------- ext/fs/lib.rs | 135 ++++---- ext/fs/ops.rs | 454 +++++++++---------------- ext/fs/std_fs.rs | 411 +++++------------------ ext/io/Cargo.toml | 3 + ext/io/fs.rs | 330 ++++++++++++++++++ ext/io/lib.rs | 737 ++++++++++++++++++++++------------------- runtime/build.rs | 6 +- runtime/ops/process.rs | 6 +- runtime/ops/tty.rs | 125 +++---- runtime/web_worker.rs | 5 +- runtime/worker.rs | 6 +- 20 files changed, 1190 insertions(+), 1258 deletions(-) create mode 100644 ext/io/fs.rs diff --git a/Cargo.lock b/Cargo.lock index 73b0f3f0de..ad816c9a2d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1059,7 +1059,10 @@ dependencies = [ name = "deno_io" version = "0.13.0" dependencies = [ + "async-trait", "deno_core", + "filetime", + "fs3", "nix", "once_cell", "tokio", diff --git a/Cargo.toml b/Cargo.toml index f7f6d553b6..4ffac7e793 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -92,6 +92,7 @@ dlopen = "0.1.8" encoding_rs = "=0.8.31" ecb = "=0.1.1" fastwebsockets = "=0.3.1" +filetime = "0.2.16" flate2 = "=1.0.24" fs3 = "0.5.0" futures = "0.3.21" diff --git a/cli/build.rs b/cli/build.rs index 21f8c229a1..6cedb53cef 100644 --- a/cli/build.rs +++ b/cli/build.rs @@ -9,7 +9,6 @@ use deno_core::Extension; use deno_core::ExtensionFileSource; use deno_core::ExtensionFileSourceCode; use deno_runtime::deno_cache::SqliteBackedCache; -use deno_runtime::deno_fs::StdFs; use deno_runtime::deno_kv::sqlite::SqliteDbHandler; use deno_runtime::permissions::PermissionsContainer; use deno_runtime::*; @@ -361,11 +360,11 @@ fn create_cli_snapshot(snapshot_path: PathBuf) { deno_napi::deno_napi::init_ops::(), deno_http::deno_http::init_ops(), deno_io::deno_io::init_ops(Default::default()), - deno_fs::deno_fs::init_ops::<_, PermissionsContainer>(false, StdFs), - deno_node::deno_node::init_ops::( - None, - Some(Arc::new(deno_node::RealFs)), + deno_fs::deno_fs::init_ops::( + false, + Arc::new(deno_fs::RealFs), ), + deno_node::deno_node::init_ops::(None, None), cli::init_ops_and_esm(), // NOTE: This needs to be init_ops_and_esm! ]; diff --git a/cli/factory.rs b/cli/factory.rs index 73d0cb8ea9..295794a51b 100644 --- a/cli/factory.rs +++ b/cli/factory.rs @@ -42,6 +42,7 @@ use crate::worker::HasNodeSpecifierChecker; use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; +use deno_runtime::deno_fs; use deno_runtime::deno_node; use deno_runtime::deno_node::analyze::NodeCodeTranslator; use deno_runtime::deno_node::NodeResolver; @@ -553,6 +554,7 @@ impl CliFactory { let node_code_translator = self.node_code_translator().await?.clone(); let options = self.cli_options().clone(); let main_worker_options = self.create_cli_main_worker_options()?; + let fs = Arc::new(deno_fs::RealFs); let node_fs = self.node_fs().clone(); let root_cert_store_provider = self.root_cert_store_provider().clone(); let node_resolver = self.node_resolver().await?.clone(); @@ -579,6 +581,7 @@ impl CliFactory { ), )), root_cert_store_provider.clone(), + fs.clone(), node_fs.clone(), maybe_inspector_server.clone(), main_worker_options.clone(), @@ -610,6 +613,7 @@ impl CliFactory { ), )), self.root_cert_store_provider().clone(), + Arc::new(deno_fs::RealFs), self.node_fs().clone(), self.maybe_inspector_server().clone(), self.create_cli_main_worker_options()?, diff --git a/cli/standalone/mod.rs b/cli/standalone/mod.rs index 2ef21d417e..0f65db679e 100644 --- a/cli/standalone/mod.rs +++ b/cli/standalone/mod.rs @@ -30,6 +30,7 @@ use deno_core::ModuleSpecifier; use deno_core::ModuleType; use deno_core::ResolutionKind; use deno_graph::source::Resolver; +use deno_runtime::deno_fs; use deno_runtime::deno_node; use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_tls::rustls::RootCertStore; @@ -253,6 +254,7 @@ pub async fn run( BlobStore::default(), Box::new(module_loader_factory), root_cert_store_provider, + Arc::new(deno_fs::RealFs), node_fs, None, CliMainWorkerOptions { diff --git a/cli/worker.rs b/cli/worker.rs index ae8822fe40..5216af2638 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -18,6 +18,7 @@ use deno_core::SharedArrayBufferStore; use deno_core::SourceMapGetter; use deno_runtime::colors; use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel; +use deno_runtime::deno_fs; use deno_runtime::deno_node; use deno_runtime::deno_node::NodeResolution; use deno_runtime::deno_node::NodeResolver; @@ -97,6 +98,7 @@ struct SharedWorkerState { compiled_wasm_module_store: CompiledWasmModuleStore, module_loader_factory: Box, root_cert_store_provider: Arc, + fs: Arc, node_fs: Arc, maybe_inspector_server: Option>, } @@ -308,6 +310,7 @@ impl CliMainWorkerFactory { blob_store: BlobStore, module_loader_factory: Box, root_cert_store_provider: Arc, + fs: Arc, node_fs: Arc, maybe_inspector_server: Option>, options: CliMainWorkerOptions, @@ -325,6 +328,7 @@ impl CliMainWorkerFactory { compiled_wasm_module_store: Default::default(), module_loader_factory, root_cert_store_provider, + fs, node_fs, maybe_inspector_server, }), @@ -445,6 +449,7 @@ impl CliMainWorkerFactory { should_break_on_first_statement: shared.options.inspect_brk, should_wait_for_inspector_session: shared.options.inspect_wait, module_loader, + fs: shared.fs.clone(), node_fs: Some(shared.node_fs.clone()), npm_resolver: Some(shared.npm_resolver.clone()), get_error_class_fn: Some(&errors::get_error_class_name), @@ -570,6 +575,7 @@ fn create_web_worker_callback( format_js_error_fn: Some(Arc::new(format_js_error)), source_map_getter: maybe_source_map_getter, module_loader, + fs: shared.fs.clone(), node_fs: Some(shared.node_fs.clone()), npm_resolver: Some(shared.npm_resolver.clone()), worker_type: args.worker_type, @@ -597,13 +603,8 @@ fn create_web_worker_callback( #[cfg(test)] mod tests { - use std::rc::Rc; - use super::*; use deno_core::resolve_path; - use deno_core::FsModuleLoader; - use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel; - use deno_runtime::deno_web::BlobStore; use deno_runtime::permissions::Permissions; fn create_test_worker() -> MainWorker { @@ -612,31 +613,8 @@ mod tests { let permissions = PermissionsContainer::new(Permissions::default()); let options = WorkerOptions { - bootstrap: BootstrapOptions::default(), - extensions: vec![], startup_snapshot: Some(crate::js::deno_isolate_init()), - unsafely_ignore_certificate_errors: None, - root_cert_store_provider: None, - seed: None, - format_js_error_fn: None, - source_map_getter: None, - web_worker_preload_module_cb: Arc::new(|_| unreachable!()), - web_worker_pre_execute_module_cb: Arc::new(|_| unreachable!()), - create_web_worker_cb: Arc::new(|_| unreachable!()), - maybe_inspector_server: None, - should_break_on_first_statement: false, - should_wait_for_inspector_session: false, - module_loader: Rc::new(FsModuleLoader), - node_fs: Some(Arc::new(deno_node::RealFs)), - npm_resolver: None, - get_error_class_fn: None, - cache_storage_dir: None, - origin_storage_dir: None, - blob_store: BlobStore::default(), - broadcast_channel: InMemoryBroadcastChannel::default(), - shared_array_buffer_store: None, - compiled_wasm_module_store: None, - stdio: Default::default(), + ..Default::default() }; MainWorker::bootstrap_from_options(main_module, permissions, options) diff --git a/core/resources.rs b/core/resources.rs index 84e6847fc6..94d2a2306a 100644 --- a/core/resources.rs +++ b/core/resources.rs @@ -187,6 +187,13 @@ pub trait Resource: Any + 'static { None } + /// Resources backed by a file descriptor can let ops know to allow for + /// low-level optimizations. + #[cfg(windows)] + fn backing_fd(self: Rc) -> Option { + None + } + fn size_hint(&self) -> (u64, Option) { (0, None) } diff --git a/ext/fs/Cargo.toml b/ext/fs/Cargo.toml index 10c71a5438..f6d563b64d 100644 --- a/ext/fs/Cargo.toml +++ b/ext/fs/Cargo.toml @@ -17,8 +17,8 @@ path = "lib.rs" async-trait.workspace = true deno_core.workspace = true deno_io.workspace = true -filetime = "0.2.16" -fs3 = "0.5.0" +filetime.workspace = true +fs3.workspace = true libc.workspace = true log.workspace = true rand.workspace = true diff --git a/ext/fs/interface.rs b/ext/fs/interface.rs index 184cb8096f..1847b59828 100644 --- a/ext/fs/interface.rs +++ b/ext/fs/interface.rs @@ -1,6 +1,5 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -use std::io; use std::path::Path; use std::path::PathBuf; use std::rc::Rc; @@ -8,6 +7,10 @@ use std::rc::Rc; use serde::Deserialize; use serde::Serialize; +use deno_io::fs::File; +use deno_io::fs::FsResult; +use deno_io::fs::FsStat; + #[derive(Deserialize, Default, Debug, Clone, Copy)] #[serde(rename_all = "camelCase")] #[serde(default)] @@ -52,27 +55,6 @@ impl OpenOptions { } } -pub struct FsStat { - pub is_file: bool, - pub is_directory: bool, - pub is_symlink: bool, - pub size: u64, - - pub mtime: Option, - pub atime: Option, - pub birthtime: Option, - - pub dev: u64, - pub ino: u64, - pub mode: u32, - pub nlink: u64, - pub uid: u32, - pub gid: u32, - pub rdev: u64, - pub blksize: u64, - pub blocks: u64, -} - #[derive(Deserialize)] pub enum FsFileType { #[serde(rename = "file")] @@ -90,93 +72,25 @@ pub struct FsDirEntry { pub is_symlink: bool, } -pub enum FsError { - Io(io::Error), - FileBusy, - NotSupported, -} - -impl From for FsError { - fn from(err: io::Error) -> Self { - Self::Io(err) - } -} - -pub type FsResult = Result; - #[async_trait::async_trait(?Send)] -pub trait File { - fn write_all_sync(self: Rc, buf: &[u8]) -> FsResult<()>; - async fn write_all_async(self: Rc, buf: Vec) -> FsResult<()>; - - fn read_all_sync(self: Rc) -> FsResult>; - async fn read_all_async(self: Rc) -> FsResult>; - - fn chmod_sync(self: Rc, pathmode: u32) -> FsResult<()>; - async fn chmod_async(self: Rc, mode: u32) -> FsResult<()>; - - fn seek_sync(self: Rc, pos: io::SeekFrom) -> FsResult; - async fn seek_async(self: Rc, pos: io::SeekFrom) -> FsResult; - - fn datasync_sync(self: Rc) -> FsResult<()>; - async fn datasync_async(self: Rc) -> FsResult<()>; - - fn sync_sync(self: Rc) -> FsResult<()>; - async fn sync_async(self: Rc) -> FsResult<()>; - - fn stat_sync(self: Rc) -> FsResult; - async fn stat_async(self: Rc) -> FsResult; - - fn lock_sync(self: Rc, exclusive: bool) -> FsResult<()>; - async fn lock_async(self: Rc, exclusive: bool) -> FsResult<()>; - fn unlock_sync(self: Rc) -> FsResult<()>; - async fn unlock_async(self: Rc) -> FsResult<()>; - - fn truncate_sync(self: Rc, len: u64) -> FsResult<()>; - async fn truncate_async(self: Rc, len: u64) -> FsResult<()>; - - fn utime_sync( - self: Rc, - atime_secs: i64, - atime_nanos: u32, - mtime_secs: i64, - mtime_nanos: u32, - ) -> FsResult<()>; - async fn utime_async( - self: Rc, - atime_secs: i64, - atime_nanos: u32, - mtime_secs: i64, - mtime_nanos: u32, - ) -> FsResult<()>; -} - -#[async_trait::async_trait(?Send)] -pub trait FileSystem: Clone { - type File: File; - +pub trait FileSystem: Send + Sync { fn cwd(&self) -> FsResult; fn tmp_dir(&self) -> FsResult; - fn chdir(&self, path: impl AsRef) -> FsResult<()>; + fn chdir(&self, path: &Path) -> FsResult<()>; fn umask(&self, mask: Option) -> FsResult; fn open_sync( &self, - path: impl AsRef, + path: &Path, options: OpenOptions, - ) -> FsResult; + ) -> FsResult>; async fn open_async( &self, path: PathBuf, options: OpenOptions, - ) -> FsResult; + ) -> FsResult>; - fn mkdir_sync( - &self, - path: impl AsRef, - recusive: bool, - mode: u32, - ) -> FsResult<()>; + fn mkdir_sync(&self, path: &Path, recusive: bool, mode: u32) -> FsResult<()>; async fn mkdir_async( &self, path: PathBuf, @@ -184,12 +98,12 @@ pub trait FileSystem: Clone { mode: u32, ) -> FsResult<()>; - fn chmod_sync(&self, path: impl AsRef, mode: u32) -> FsResult<()>; + fn chmod_sync(&self, path: &Path, mode: u32) -> FsResult<()>; async fn chmod_async(&self, path: PathBuf, mode: u32) -> FsResult<()>; fn chown_sync( &self, - path: impl AsRef, + path: &Path, uid: Option, gid: Option, ) -> FsResult<()>; @@ -200,52 +114,36 @@ pub trait FileSystem: Clone { gid: Option, ) -> FsResult<()>; - fn remove_sync( - &self, - path: impl AsRef, - recursive: bool, - ) -> FsResult<()>; + fn remove_sync(&self, path: &Path, recursive: bool) -> FsResult<()>; async fn remove_async(&self, path: PathBuf, recursive: bool) -> FsResult<()>; - fn copy_file_sync( - &self, - oldpath: impl AsRef, - newpath: impl AsRef, - ) -> FsResult<()>; + fn copy_file_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()>; async fn copy_file_async( &self, oldpath: PathBuf, newpath: PathBuf, ) -> FsResult<()>; - fn stat_sync(&self, path: impl AsRef) -> FsResult; + fn stat_sync(&self, path: &Path) -> FsResult; async fn stat_async(&self, path: PathBuf) -> FsResult; - fn lstat_sync(&self, path: impl AsRef) -> FsResult; + fn lstat_sync(&self, path: &Path) -> FsResult; async fn lstat_async(&self, path: PathBuf) -> FsResult; - fn realpath_sync(&self, path: impl AsRef) -> FsResult; + fn realpath_sync(&self, path: &Path) -> FsResult; async fn realpath_async(&self, path: PathBuf) -> FsResult; - fn read_dir_sync(&self, path: impl AsRef) -> FsResult>; + fn read_dir_sync(&self, path: &Path) -> FsResult>; async fn read_dir_async(&self, path: PathBuf) -> FsResult>; - fn rename_sync( - &self, - oldpath: impl AsRef, - newpath: impl AsRef, - ) -> FsResult<()>; + fn rename_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()>; async fn rename_async( &self, oldpath: PathBuf, newpath: PathBuf, ) -> FsResult<()>; - fn link_sync( - &self, - oldpath: impl AsRef, - newpath: impl AsRef, - ) -> FsResult<()>; + fn link_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()>; async fn link_async( &self, oldpath: PathBuf, @@ -254,8 +152,8 @@ pub trait FileSystem: Clone { fn symlink_sync( &self, - oldpath: impl AsRef, - newpath: impl AsRef, + oldpath: &Path, + newpath: &Path, file_type: Option, ) -> FsResult<()>; async fn symlink_async( @@ -265,15 +163,15 @@ pub trait FileSystem: Clone { file_type: Option, ) -> FsResult<()>; - fn read_link_sync(&self, path: impl AsRef) -> FsResult; + fn read_link_sync(&self, path: &Path) -> FsResult; async fn read_link_async(&self, path: PathBuf) -> FsResult; - fn truncate_sync(&self, path: impl AsRef, len: u64) -> FsResult<()>; + fn truncate_sync(&self, path: &Path, len: u64) -> FsResult<()>; async fn truncate_async(&self, path: PathBuf, len: u64) -> FsResult<()>; fn utime_sync( &self, - path: impl AsRef, + path: &Path, atime_secs: i64, atime_nanos: u32, mtime_secs: i64, @@ -290,12 +188,11 @@ pub trait FileSystem: Clone { fn write_file_sync( &self, - path: impl AsRef, + path: &Path, options: OpenOptions, data: &[u8], ) -> FsResult<()> { let file = self.open_sync(path, options)?; - let file = Rc::new(file); if let Some(mode) = options.mode { file.clone().chmod_sync(mode)?; } @@ -309,25 +206,22 @@ pub trait FileSystem: Clone { data: Vec, ) -> FsResult<()> { let file = self.open_async(path, options).await?; - let file = Rc::new(file); if let Some(mode) = options.mode { file.clone().chmod_async(mode).await?; } - file.write_all_async(data).await?; + file.write_all(data.into()).await?; Ok(()) } - fn read_file_sync(&self, path: impl AsRef) -> FsResult> { + fn read_file_sync(&self, path: &Path) -> FsResult> { let options = OpenOptions::read(); let file = self.open_sync(path, options)?; - let file = Rc::new(file); let buf = file.read_all_sync()?; Ok(buf) } async fn read_file_async(&self, path: PathBuf) -> FsResult> { let options = OpenOptions::read(); - let file = self.clone().open_async(path, options).await?; - let file = Rc::new(file); + let file = self.open_async(path, options).await?; let buf = file.read_all_async().await?; Ok(buf) } diff --git a/ext/fs/lib.rs b/ext/fs/lib.rs index 464d84adeb..4fdf6b3f11 100644 --- a/ext/fs/lib.rs +++ b/ext/fs/lib.rs @@ -4,25 +4,21 @@ mod interface; mod ops; mod std_fs; -pub use crate::interface::File; pub use crate::interface::FileSystem; pub use crate::interface::FsDirEntry; -pub use crate::interface::FsError; pub use crate::interface::FsFileType; -pub use crate::interface::FsResult; -pub use crate::interface::FsStat; pub use crate::interface::OpenOptions; use crate::ops::*; -pub use crate::std_fs::StdFs; +pub use crate::std_fs::RealFs; use deno_core::error::AnyError; use deno_core::OpState; -use deno_core::Resource; use std::cell::RefCell; use std::convert::From; use std::path::Path; use std::rc::Rc; +use std::sync::Arc; pub trait FsPermissions { fn check_read(&mut self, p: &Path, api_name: &str) -> Result<(), AnyError>; @@ -87,78 +83,77 @@ pub(crate) fn check_unstable2(state: &Rc>, api_name: &str) { deno_core::extension!(deno_fs, deps = [ deno_web ], - parameters = [Fs: FileSystem, P: FsPermissions], - bounds = [Fs::File: Resource], + parameters = [P: FsPermissions], ops = [ - op_cwd, - op_umask, - op_chdir, + op_cwd

    , + op_umask, + op_chdir

    , - op_open_sync, - op_open_async, - op_mkdir_sync, - op_mkdir_async, - op_chmod_sync, - op_chmod_async, - op_chown_sync, - op_chown_async, - op_remove_sync, - op_remove_async, - op_copy_file_sync, - op_copy_file_async, - op_stat_sync, - op_stat_async, - op_lstat_sync, - op_lstat_async, - op_realpath_sync, - op_realpath_async, - op_read_dir_sync, - op_read_dir_async, - op_rename_sync, - op_rename_async, - op_link_sync, - op_link_async, - op_symlink_sync, - op_symlink_async, - op_read_link_sync, - op_read_link_async, - op_truncate_sync, - op_truncate_async, - op_utime_sync, - op_utime_async, - op_make_temp_dir_sync, - op_make_temp_dir_async, - op_make_temp_file_sync, - op_make_temp_file_async, - op_write_file_sync, - op_write_file_async, - op_read_file_sync, - op_read_file_async, - op_read_file_text_sync, - op_read_file_text_async, + op_open_sync

    , + op_open_async

    , + op_mkdir_sync

    , + op_mkdir_async

    , + op_chmod_sync

    , + op_chmod_async

    , + op_chown_sync

    , + op_chown_async

    , + op_remove_sync

    , + op_remove_async

    , + op_copy_file_sync

    , + op_copy_file_async

    , + op_stat_sync

    , + op_stat_async

    , + op_lstat_sync

    , + op_lstat_async

    , + op_realpath_sync

    , + op_realpath_async

    , + op_read_dir_sync

    , + op_read_dir_async

    , + op_rename_sync

    , + op_rename_async

    , + op_link_sync

    , + op_link_async

    , + op_symlink_sync

    , + op_symlink_async

    , + op_read_link_sync

    , + op_read_link_async

    , + op_truncate_sync

    , + op_truncate_async

    , + op_utime_sync

    , + op_utime_async

    , + op_make_temp_dir_sync

    , + op_make_temp_dir_async

    , + op_make_temp_file_sync

    , + op_make_temp_file_async

    , + op_write_file_sync

    , + op_write_file_async

    , + op_read_file_sync

    , + op_read_file_async

    , + op_read_file_text_sync

    , + op_read_file_text_async

    , - op_seek_sync, - op_seek_async, - op_fdatasync_sync, - op_fdatasync_async, - op_fsync_sync, - op_fsync_async, - op_fstat_sync, - op_fstat_async, - op_flock_sync, - op_flock_async, - op_funlock_sync, - op_funlock_async, - op_ftruncate_sync, - op_ftruncate_async, - op_futime_sync, - op_futime_async, + op_seek_sync, + op_seek_async, + op_fdatasync_sync, + op_fdatasync_async, + op_fsync_sync, + op_fsync_async, + op_fstat_sync, + op_fstat_async, + op_flock_sync, + op_flock_async, + op_funlock_sync, + op_funlock_async, + op_ftruncate_sync, + op_ftruncate_async, + op_futime_sync, + op_futime_async, ], esm = [ "30_fs.js" ], options = { unstable: bool, - fs: Fs, + fs: Arc, }, state = |state, options| { state.put(UnstableChecker { unstable: options.unstable }); diff --git a/ext/fs/ops.rs b/ext/fs/ops.rs index 8c5d212015..c9996d8ce7 100644 --- a/ext/fs/ops.rs +++ b/ext/fs/ops.rs @@ -7,65 +7,39 @@ use std::io::SeekFrom; use std::path::Path; use std::path::PathBuf; use std::rc::Rc; +use std::sync::Arc; use deno_core::error::custom_error; -use deno_core::error::not_supported; -use deno_core::error::resource_unavailable; use deno_core::error::type_error; use deno_core::error::AnyError; use deno_core::op; use deno_core::CancelFuture; use deno_core::CancelHandle; use deno_core::OpState; -use deno_core::Resource; use deno_core::ResourceId; use deno_core::ZeroCopyBuf; +use deno_io::fs::FileResource; +use deno_io::fs::FsError; +use deno_io::fs::FsStat; use rand::rngs::ThreadRng; use rand::thread_rng; use rand::Rng; use serde::Serialize; -use tokio::task::JoinError; use crate::check_unstable; use crate::check_unstable2; use crate::interface::FsDirEntry; -use crate::interface::FsError; use crate::interface::FsFileType; -use crate::interface::FsStat; -use crate::File; use crate::FileSystem; use crate::FsPermissions; use crate::OpenOptions; -impl From for FsError { - fn from(err: JoinError) -> Self { - if err.is_cancelled() { - todo!("async tasks must not be cancelled") - } - if err.is_panic() { - std::panic::resume_unwind(err.into_panic()); // resume the panic on the main thread - } - unreachable!() - } -} - -impl From for AnyError { - fn from(err: FsError) -> Self { - match err { - FsError::Io(err) => AnyError::from(err), - FsError::FileBusy => resource_unavailable(), - FsError::NotSupported => not_supported(), - } - } -} - #[op] -pub fn op_cwd(state: &mut OpState) -> Result +pub fn op_cwd

    (state: &mut OpState) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { - let fs = state.borrow::(); + let fs = state.borrow::>(); let path = fs.cwd()?; state .borrow_mut::

    () @@ -75,34 +49,36 @@ where } #[op] -fn op_chdir(state: &mut OpState, directory: &str) -> Result<(), AnyError> +fn op_chdir

    (state: &mut OpState, directory: &str) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let d = PathBuf::from(&directory); state.borrow_mut::

    ().check_read(&d, "Deno.chdir()")?; - state.borrow::().chdir(&d).context_path("chdir", &d) + state + .borrow::>() + .chdir(&d) + .context_path("chdir", &d) } #[op] -fn op_umask(state: &mut OpState, mask: Option) -> Result +fn op_umask(state: &mut OpState, mask: Option) -> Result where - Fs: FileSystem + 'static, { check_unstable(state, "Deno.umask"); - state.borrow::().umask(mask).context("umask") + state + .borrow::>() + .umask(mask) + .context("umask") } #[op] -fn op_open_sync( +fn op_open_sync

    ( state: &mut OpState, path: String, options: Option, ) -> Result where - Fs: FileSystem + 'static, - Fs::File: Resource, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -111,22 +87,22 @@ where let permissions = state.borrow_mut::

    (); permissions.check(&options, &path, "Deno.openSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::>(); let file = fs.open_sync(&path, options).context_path("open", &path)?; - let rid = state.resource_table.add(file); + let rid = state + .resource_table + .add(FileResource::new(file, "fsFile".to_string())); Ok(rid) } #[op] -async fn op_open_async( +async fn op_open_async

    ( state: Rc>, path: String, options: Option, ) -> Result where - Fs: FileSystem + 'static, - Fs::File: Resource, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -136,26 +112,28 @@ where let mut state = state.borrow_mut(); let permissions = state.borrow_mut::

    (); permissions.check(&options, &path, "Deno.open()")?; - state.borrow::().clone() + state.borrow::>().clone() }; let file = fs .open_async(path.clone(), options) .await .context_path("open", &path)?; - let rid = state.borrow_mut().resource_table.add(file); + let rid = state + .borrow_mut() + .resource_table + .add(FileResource::new(file, "fsFile".to_string())); Ok(rid) } #[op] -fn op_mkdir_sync( +fn op_mkdir_sync

    ( state: &mut OpState, path: String, recursive: bool, mode: Option, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -166,7 +144,7 @@ where .borrow_mut::

    () .check_write(&path, "Deno.mkdirSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::>(); fs.mkdir_sync(&path, recursive, mode) .context_path("mkdir", &path)?; @@ -174,14 +152,13 @@ where } #[op] -async fn op_mkdir_async( +async fn op_mkdir_async

    ( state: Rc>, path: String, recursive: bool, mode: Option, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -191,7 +168,7 @@ where let fs = { let mut state = state.borrow_mut(); state.borrow_mut::

    ().check_write(&path, "Deno.mkdir()")?; - state.borrow::().clone() + state.borrow::>().clone() }; fs.mkdir_async(path.clone(), recursive, mode) @@ -202,39 +179,37 @@ where } #[op] -fn op_chmod_sync( +fn op_chmod_sync

    ( state: &mut OpState, path: String, mode: u32, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); state .borrow_mut::

    () .check_write(&path, "Deno.chmodSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::>(); fs.chmod_sync(&path, mode).context_path("chmod", &path)?; Ok(()) } #[op] -async fn op_chmod_async( +async fn op_chmod_async

    ( state: Rc>, path: String, mode: u32, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); let fs = { let mut state = state.borrow_mut(); state.borrow_mut::

    ().check_write(&path, "Deno.chmod()")?; - state.borrow::().clone() + state.borrow::>().clone() }; fs.chmod_async(path.clone(), mode) .await @@ -243,42 +218,40 @@ where } #[op] -fn op_chown_sync( +fn op_chown_sync

    ( state: &mut OpState, path: String, uid: Option, gid: Option, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); state .borrow_mut::

    () .check_write(&path, "Deno.chownSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::>(); fs.chown_sync(&path, uid, gid) .context_path("chown", &path)?; Ok(()) } #[op] -async fn op_chown_async( +async fn op_chown_async

    ( state: Rc>, path: String, uid: Option, gid: Option, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); let fs = { let mut state = state.borrow_mut(); state.borrow_mut::

    ().check_write(&path, "Deno.chown()")?; - state.borrow::().clone() + state.borrow::>().clone() }; fs.chown_async(path.clone(), uid, gid) .await @@ -287,13 +260,12 @@ where } #[op] -fn op_remove_sync( +fn op_remove_sync

    ( state: &mut OpState, path: &str, recursive: bool, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -302,7 +274,7 @@ where .borrow_mut::

    () .check_write(&path, "Deno.removeSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::>(); fs.remove_sync(&path, recursive) .context_path("remove", &path)?; @@ -310,13 +282,12 @@ where } #[op] -async fn op_remove_async( +async fn op_remove_async

    ( state: Rc>, path: String, recursive: bool, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -326,7 +297,7 @@ where state .borrow_mut::

    () .check_write(&path, "Deno.remove()")?; - state.borrow::().clone() + state.borrow::>().clone() }; fs.remove_async(path.clone(), recursive) @@ -337,13 +308,12 @@ where } #[op] -fn op_copy_file_sync( +fn op_copy_file_sync

    ( state: &mut OpState, from: &str, to: &str, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let from = PathBuf::from(from); @@ -353,7 +323,7 @@ where permissions.check_read(&from, "Deno.copyFileSync()")?; permissions.check_write(&to, "Deno.copyFileSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::>(); fs.copy_file_sync(&from, &to) .context_two_path("copy", &from, &to)?; @@ -361,13 +331,12 @@ where } #[op] -async fn op_copy_file_async( +async fn op_copy_file_async

    ( state: Rc>, from: String, to: String, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let from = PathBuf::from(from); @@ -378,7 +347,7 @@ where let permissions = state.borrow_mut::

    (); permissions.check_read(&from, "Deno.copyFile()")?; permissions.check_write(&to, "Deno.copyFile()")?; - state.borrow::().clone() + state.borrow::>().clone() }; fs.copy_file_async(from.clone(), to.clone()) @@ -389,20 +358,19 @@ where } #[op] -fn op_stat_sync( +fn op_stat_sync

    ( state: &mut OpState, path: String, stat_out_buf: &mut [u32], ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); state .borrow_mut::

    () .check_read(&path, "Deno.statSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::>(); let stat = fs.stat_sync(&path).context_path("stat", &path)?; let serializable_stat = SerializableStat::from(stat); serializable_stat.write(stat_out_buf); @@ -410,12 +378,11 @@ where } #[op] -async fn op_stat_async( +async fn op_stat_async

    ( state: Rc>, path: String, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -423,7 +390,7 @@ where let mut state = state.borrow_mut(); let permissions = state.borrow_mut::

    (); permissions.check_read(&path, "Deno.stat()")?; - state.borrow::().clone() + state.borrow::>().clone() }; let stat = fs .stat_async(path.clone()) @@ -433,20 +400,19 @@ where } #[op] -fn op_lstat_sync( +fn op_lstat_sync

    ( state: &mut OpState, path: String, stat_out_buf: &mut [u32], ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); state .borrow_mut::

    () .check_read(&path, "Deno.lstatSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::>(); let stat = fs.lstat_sync(&path).context_path("lstat", &path)?; let serializable_stat = SerializableStat::from(stat); serializable_stat.write(stat_out_buf); @@ -454,12 +420,11 @@ where } #[op] -async fn op_lstat_async( +async fn op_lstat_async

    ( state: Rc>, path: String, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -467,7 +432,7 @@ where let mut state = state.borrow_mut(); let permissions = state.borrow_mut::

    (); permissions.check_read(&path, "Deno.lstat()")?; - state.borrow::().clone() + state.borrow::>().clone() }; let stat = fs .lstat_async(path.clone()) @@ -477,17 +442,16 @@ where } #[op] -fn op_realpath_sync( +fn op_realpath_sync

    ( state: &mut OpState, path: String, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); - let fs = state.borrow::().clone(); + let fs = state.borrow::>().clone(); let permissions = state.borrow_mut::

    (); permissions.check_read(&path, "Deno.realPathSync()")?; if path.is_relative() { @@ -502,12 +466,11 @@ where } #[op] -async fn op_realpath_async( +async fn op_realpath_async

    ( state: Rc>, path: String, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -515,7 +478,7 @@ where let fs; { let mut state = state.borrow_mut(); - fs = state.borrow::().clone(); + fs = state.borrow::>().clone(); let permissions = state.borrow_mut::

    (); permissions.check_read(&path, "Deno.realPath()")?; if path.is_relative() { @@ -532,12 +495,11 @@ where } #[op] -fn op_read_dir_sync( +fn op_read_dir_sync

    ( state: &mut OpState, path: String, ) -> Result, AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -546,19 +508,18 @@ where .borrow_mut::

    () .check_read(&path, "Deno.readDirSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::>(); let entries = fs.read_dir_sync(&path).context_path("readdir", &path)?; Ok(entries) } #[op] -async fn op_read_dir_async( +async fn op_read_dir_async

    ( state: Rc>, path: String, ) -> Result, AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -568,7 +529,7 @@ where state .borrow_mut::

    () .check_read(&path, "Deno.readDir()")?; - state.borrow::().clone() + state.borrow::>().clone() }; let entries = fs @@ -580,13 +541,12 @@ where } #[op] -fn op_rename_sync( +fn op_rename_sync

    ( state: &mut OpState, oldpath: String, newpath: String, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let oldpath = PathBuf::from(oldpath); @@ -597,7 +557,7 @@ where permissions.check_write(&oldpath, "Deno.renameSync()")?; permissions.check_write(&newpath, "Deno.renameSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::>(); fs.rename_sync(&oldpath, &newpath) .context_two_path("rename", &oldpath, &newpath)?; @@ -605,13 +565,12 @@ where } #[op] -async fn op_rename_async( +async fn op_rename_async

    ( state: Rc>, oldpath: String, newpath: String, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let oldpath = PathBuf::from(oldpath); @@ -623,7 +582,7 @@ where permissions.check_read(&oldpath, "Deno.rename()")?; permissions.check_write(&oldpath, "Deno.rename()")?; permissions.check_write(&newpath, "Deno.rename()")?; - state.borrow::().clone() + state.borrow::>().clone() }; fs.rename_async(oldpath.clone(), newpath.clone()) @@ -634,13 +593,12 @@ where } #[op] -fn op_link_sync( +fn op_link_sync

    ( state: &mut OpState, oldpath: &str, newpath: &str, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let oldpath = PathBuf::from(oldpath); @@ -652,7 +610,7 @@ where permissions.check_read(&newpath, "Deno.linkSync()")?; permissions.check_write(&newpath, "Deno.linkSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::>(); fs.link_sync(&oldpath, &newpath) .context_two_path("link", &oldpath, &newpath)?; @@ -660,13 +618,12 @@ where } #[op] -async fn op_link_async( +async fn op_link_async

    ( state: Rc>, oldpath: String, newpath: String, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let oldpath = PathBuf::from(&oldpath); @@ -679,7 +636,7 @@ where permissions.check_write(&oldpath, "Deno.link()")?; permissions.check_read(&newpath, "Deno.link()")?; permissions.check_write(&newpath, "Deno.link()")?; - state.borrow::().clone() + state.borrow::>().clone() }; fs.link_async(oldpath.clone(), newpath.clone()) @@ -690,14 +647,13 @@ where } #[op] -fn op_symlink_sync( +fn op_symlink_sync

    ( state: &mut OpState, oldpath: &str, newpath: &str, file_type: Option, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let oldpath = PathBuf::from(oldpath); @@ -707,7 +663,7 @@ where permissions.check_write_all("Deno.symlinkSync()")?; permissions.check_read_all("Deno.symlinkSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::>(); fs.symlink_sync(&oldpath, &newpath, file_type) .context_two_path("symlink", &oldpath, &newpath)?; @@ -715,14 +671,13 @@ where } #[op] -async fn op_symlink_async( +async fn op_symlink_async

    ( state: Rc>, oldpath: String, newpath: String, file_type: Option, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let oldpath = PathBuf::from(&oldpath); @@ -733,7 +688,7 @@ where let permissions = state.borrow_mut::

    (); permissions.check_write_all("Deno.symlink()")?; permissions.check_read_all("Deno.symlink()")?; - state.borrow::().clone() + state.borrow::>().clone() }; fs.symlink_async(oldpath.clone(), newpath.clone(), file_type) @@ -744,12 +699,11 @@ where } #[op] -fn op_read_link_sync( +fn op_read_link_sync

    ( state: &mut OpState, path: String, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -758,7 +712,7 @@ where .borrow_mut::

    () .check_read(&path, "Deno.readLink()")?; - let fs = state.borrow::(); + let fs = state.borrow::>(); let target = fs.read_link_sync(&path).context_path("readlink", &path)?; let target_string = path_into_string(target.into_os_string())?; @@ -766,12 +720,11 @@ where } #[op] -async fn op_read_link_async( +async fn op_read_link_async

    ( state: Rc>, path: String, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -781,7 +734,7 @@ where state .borrow_mut::

    () .check_read(&path, "Deno.readLink()")?; - state.borrow::().clone() + state.borrow::>().clone() }; let target = fs @@ -793,13 +746,12 @@ where } #[op] -fn op_truncate_sync( +fn op_truncate_sync

    ( state: &mut OpState, path: &str, len: u64, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -808,7 +760,7 @@ where .borrow_mut::

    () .check_write(&path, "Deno.truncateSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::>(); fs.truncate_sync(&path, len) .context_path("truncate", &path)?; @@ -816,13 +768,12 @@ where } #[op] -async fn op_truncate_async( +async fn op_truncate_async

    ( state: Rc>, path: String, len: u64, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -832,7 +783,7 @@ where state .borrow_mut::

    () .check_write(&path, "Deno.truncate()")?; - state.borrow::().clone() + state.borrow::>().clone() }; fs.truncate_async(path.clone(), len) @@ -843,7 +794,7 @@ where } #[op] -fn op_utime_sync( +fn op_utime_sync

    ( state: &mut OpState, path: &str, atime_secs: i64, @@ -852,14 +803,13 @@ fn op_utime_sync( mtime_nanos: u32, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); state.borrow_mut::

    ().check_write(&path, "Deno.utime()")?; - let fs = state.borrow::(); + let fs = state.borrow::>(); fs.utime_sync(&path, atime_secs, atime_nanos, mtime_secs, mtime_nanos) .context_path("utime", &path)?; @@ -867,7 +817,7 @@ where } #[op] -async fn op_utime_async( +async fn op_utime_async

    ( state: Rc>, path: String, atime_secs: i64, @@ -876,7 +826,6 @@ async fn op_utime_async( mtime_nanos: u32, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -884,7 +833,7 @@ where let fs = { let mut state = state.borrow_mut(); state.borrow_mut::

    ().check_write(&path, "Deno.utime()")?; - state.borrow::().clone() + state.borrow::>().clone() }; fs.utime_async( @@ -901,17 +850,16 @@ where } #[op] -fn op_make_temp_dir_sync( +fn op_make_temp_dir_sync

    ( state: &mut OpState, dir: Option, prefix: Option, suffix: Option, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { - let (dir, fs) = make_temp_check_sync::(state, dir)?; + let (dir, fs) = make_temp_check_sync::

    (state, dir)?; let mut rng = thread_rng(); @@ -935,17 +883,16 @@ where } #[op] -async fn op_make_temp_dir_async( +async fn op_make_temp_dir_async

    ( state: Rc>, dir: Option, prefix: Option, suffix: Option, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { - let (dir, fs) = make_temp_check_async::(state, dir)?; + let (dir, fs) = make_temp_check_async::

    (state, dir)?; let mut rng = thread_rng(); @@ -969,17 +916,16 @@ where } #[op] -fn op_make_temp_file_sync( +fn op_make_temp_file_sync

    ( state: &mut OpState, dir: Option, prefix: Option, suffix: Option, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { - let (dir, fs) = make_temp_check_sync::(state, dir)?; + let (dir, fs) = make_temp_check_sync::

    (state, dir)?; let open_opts = OpenOptions { write: true, @@ -1010,17 +956,16 @@ where } #[op] -async fn op_make_temp_file_async( +async fn op_make_temp_file_async

    ( state: Rc>, dir: Option, prefix: Option, suffix: Option, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { - let (dir, fs) = make_temp_check_async::(state, dir)?; + let (dir, fs) = make_temp_check_async::

    (state, dir)?; let open_opts = OpenOptions { write: true, @@ -1049,15 +994,14 @@ where .context("tmpfile") } -fn make_temp_check_sync( +fn make_temp_check_sync

    ( state: &mut OpState, dir: Option, -) -> Result<(PathBuf, Fs), AnyError> +) -> Result<(PathBuf, Arc), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { - let fs = state.borrow::().clone(); + let fs = state.borrow::>().clone(); let dir = match dir { Some(dir) => { let dir = PathBuf::from(dir); @@ -1079,16 +1023,15 @@ where Ok((dir, fs)) } -fn make_temp_check_async( +fn make_temp_check_async

    ( state: Rc>, dir: Option, -) -> Result<(PathBuf, Fs), AnyError> +) -> Result<(PathBuf, Arc), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let mut state = state.borrow_mut(); - let fs = state.borrow::().clone(); + let fs = state.borrow::>().clone(); let dir = match dir { Some(dir) => { let dir = PathBuf::from(dir); @@ -1128,7 +1071,7 @@ fn tmp_name( } #[op] -fn op_write_file_sync( +fn op_write_file_sync

    ( state: &mut OpState, path: String, mode: Option, @@ -1138,7 +1081,6 @@ fn op_write_file_sync( data: ZeroCopyBuf, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -1147,7 +1089,7 @@ where let options = OpenOptions::write(create, append, create_new, mode); permissions.check(&options, &path, "Deno.writeFileSync()")?; - let fs = state.borrow::(); + let fs = state.borrow::>(); fs.write_file_sync(&path, options, &data) .context_path("writefile", &path)?; @@ -1156,7 +1098,7 @@ where } #[op] -async fn op_write_file_async( +async fn op_write_file_async

    ( state: Rc>, path: String, mode: Option, @@ -1167,7 +1109,6 @@ async fn op_write_file_async( cancel_rid: Option, ) -> Result<(), AnyError> where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -1180,7 +1121,7 @@ where permissions.check(&options, &path, "Deno.writeFile()")?; let cancel_handle = cancel_rid .and_then(|rid| state.resource_table.get::(rid).ok()); - (state.borrow::().clone(), cancel_handle) + (state.borrow::>().clone(), cancel_handle) }; let fut = fs.write_file_async(path.clone(), options, data.to_vec()); @@ -1201,12 +1142,11 @@ where } #[op] -fn op_read_file_sync( +fn op_read_file_sync

    ( state: &mut OpState, path: String, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -1214,20 +1154,19 @@ where let permissions = state.borrow_mut::

    (); permissions.check_read(&path, "Deno.readFileSync()")?; - let fs = state.borrow::(); - let buf = fs.read_file_sync(path).context("readfile")?; + let fs = state.borrow::>(); + let buf = fs.read_file_sync(&path).context("readfile")?; Ok(buf.into()) } #[op] -async fn op_read_file_async( +async fn op_read_file_async

    ( state: Rc>, path: String, cancel_rid: Option, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -1238,7 +1177,7 @@ where permissions.check_read(&path, "Deno.readFile()")?; let cancel_handle = cancel_rid .and_then(|rid| state.resource_table.get::(rid).ok()); - (state.borrow::().clone(), cancel_handle) + (state.borrow::>().clone(), cancel_handle) }; let fut = fs.read_file_async(path.clone()); @@ -1259,12 +1198,11 @@ where } #[op] -fn op_read_file_text_sync( +fn op_read_file_text_sync

    ( state: &mut OpState, path: String, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -1272,20 +1210,19 @@ where let permissions = state.borrow_mut::

    (); permissions.check_read(&path, "Deno.readFileSync()")?; - let fs = state.borrow::(); - let buf = fs.read_file_sync(path).context("readfile")?; + let fs = state.borrow::>(); + let buf = fs.read_file_sync(&path).context("readfile")?; Ok(string_from_utf8_lossy(buf)) } #[op] -async fn op_read_file_text_async( +async fn op_read_file_text_async

    ( state: Rc>, path: String, cancel_rid: Option, ) -> Result where - Fs: FileSystem + 'static, P: FsPermissions + 'static, { let path = PathBuf::from(path); @@ -1296,7 +1233,7 @@ where permissions.check_read(&path, "Deno.readFile()")?; let cancel_handle = cancel_rid .and_then(|rid| state.resource_table.get::(rid).ok()); - (state.borrow::().clone(), cancel_handle) + (state.borrow::>().clone(), cancel_handle) }; let fut = fs.read_file_async(path.clone()); @@ -1340,106 +1277,75 @@ fn to_seek_from(offset: i64, whence: i32) -> Result { } #[op] -fn op_seek_sync( +fn op_seek_sync( state: &mut OpState, rid: ResourceId, offset: i64, whence: i32, -) -> Result -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ +) -> Result { let pos = to_seek_from(offset, whence)?; - let file = state.resource_table.get::(rid)?; + let file = FileResource::get_file(state, rid)?; let cursor = file.seek_sync(pos)?; Ok(cursor) } #[op] -async fn op_seek_async( +async fn op_seek_async( state: Rc>, rid: ResourceId, offset: i64, whence: i32, -) -> Result -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ +) -> Result { let pos = to_seek_from(offset, whence)?; - let file = state.borrow().resource_table.get::(rid)?; + let file = FileResource::get_file(&state.borrow(), rid)?; let cursor = file.seek_async(pos).await?; Ok(cursor) } #[op] -fn op_fdatasync_sync( +fn op_fdatasync_sync( state: &mut OpState, rid: ResourceId, -) -> Result<(), AnyError> -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ - let file = state.resource_table.get::(rid)?; +) -> Result<(), AnyError> { + let file = FileResource::get_file(state, rid)?; file.datasync_sync()?; Ok(()) } #[op] -async fn op_fdatasync_async( +async fn op_fdatasync_async( state: Rc>, rid: ResourceId, -) -> Result<(), AnyError> -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ - let file = state.borrow().resource_table.get::(rid)?; +) -> Result<(), AnyError> { + let file = FileResource::get_file(&state.borrow(), rid)?; file.datasync_async().await?; Ok(()) } #[op] -fn op_fsync_sync( - state: &mut OpState, - rid: ResourceId, -) -> Result<(), AnyError> -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ - let file = state.resource_table.get::(rid)?; +fn op_fsync_sync(state: &mut OpState, rid: ResourceId) -> Result<(), AnyError> { + let file = FileResource::get_file(state, rid)?; file.sync_sync()?; Ok(()) } #[op] -async fn op_fsync_async( +async fn op_fsync_async( state: Rc>, rid: ResourceId, -) -> Result<(), AnyError> -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ - let file = state.borrow().resource_table.get::(rid)?; +) -> Result<(), AnyError> { + let file = FileResource::get_file(&state.borrow(), rid)?; file.sync_async().await?; Ok(()) } #[op] -fn op_fstat_sync( +fn op_fstat_sync( state: &mut OpState, rid: ResourceId, stat_out_buf: &mut [u32], -) -> Result<(), AnyError> -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ - let file = state.resource_table.get::(rid)?; +) -> Result<(), AnyError> { + let file = FileResource::get_file(state, rid)?; let stat = file.stat_sync()?; let serializable_stat = SerializableStat::from(stat); serializable_stat.write(stat_out_buf); @@ -1447,143 +1353,107 @@ where } #[op] -async fn op_fstat_async( +async fn op_fstat_async( state: Rc>, rid: ResourceId, -) -> Result -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ - let file = state.borrow().resource_table.get::(rid)?; +) -> Result { + let file = FileResource::get_file(&state.borrow(), rid)?; let stat = file.stat_async().await?; Ok(stat.into()) } #[op] -fn op_flock_sync( +fn op_flock_sync( state: &mut OpState, rid: ResourceId, exclusive: bool, -) -> Result<(), AnyError> -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ +) -> Result<(), AnyError> { check_unstable(state, "Deno.flockSync"); - let file = state.resource_table.get::(rid)?; + let file = FileResource::get_file(state, rid)?; file.lock_sync(exclusive)?; Ok(()) } #[op] -async fn op_flock_async( +async fn op_flock_async( state: Rc>, rid: ResourceId, exclusive: bool, -) -> Result<(), AnyError> -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ +) -> Result<(), AnyError> { check_unstable2(&state, "Deno.flock"); - let file = state.borrow().resource_table.get::(rid)?; + let file = FileResource::get_file(&state.borrow(), rid)?; file.lock_async(exclusive).await?; Ok(()) } #[op] -fn op_funlock_sync( +fn op_funlock_sync( state: &mut OpState, rid: ResourceId, -) -> Result<(), AnyError> -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ +) -> Result<(), AnyError> { check_unstable(state, "Deno.funlockSync"); - let file = state.resource_table.get::(rid)?; + let file = FileResource::get_file(state, rid)?; file.unlock_sync()?; Ok(()) } #[op] -async fn op_funlock_async( +async fn op_funlock_async( state: Rc>, rid: ResourceId, -) -> Result<(), AnyError> -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ +) -> Result<(), AnyError> { check_unstable2(&state, "Deno.funlock"); - let file = state.borrow().resource_table.get::(rid)?; + let file = FileResource::get_file(&state.borrow(), rid)?; file.unlock_async().await?; Ok(()) } #[op] -fn op_ftruncate_sync( +fn op_ftruncate_sync( state: &mut OpState, rid: ResourceId, len: u64, -) -> Result<(), AnyError> -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ - let file = state.resource_table.get::(rid)?; +) -> Result<(), AnyError> { + let file = FileResource::get_file(state, rid)?; file.truncate_sync(len)?; Ok(()) } #[op] -async fn op_ftruncate_async( +async fn op_ftruncate_async( state: Rc>, rid: ResourceId, len: u64, -) -> Result<(), AnyError> -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ - let file = state.borrow().resource_table.get::(rid)?; +) -> Result<(), AnyError> { + let file = FileResource::get_file(&state.borrow(), rid)?; file.truncate_async(len).await?; Ok(()) } #[op] -fn op_futime_sync( +fn op_futime_sync( state: &mut OpState, rid: ResourceId, atime_secs: i64, atime_nanos: u32, mtime_secs: i64, mtime_nanos: u32, -) -> Result<(), AnyError> -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ - let file = state.resource_table.get::(rid)?; +) -> Result<(), AnyError> { + let file = FileResource::get_file(state, rid)?; file.utime_sync(atime_secs, atime_nanos, mtime_secs, mtime_nanos)?; Ok(()) } #[op] -async fn op_futime_async( +async fn op_futime_async( state: Rc>, rid: ResourceId, atime_secs: i64, atime_nanos: u32, mtime_secs: i64, mtime_nanos: u32, -) -> Result<(), AnyError> -where - Fs: FileSystem + 'static, - Fs::File: Resource, -{ - let file = state.borrow().resource_table.get::(rid)?; +) -> Result<(), AnyError> { + let file = FileResource::get_file(&state.borrow(), rid)?; file .utime_async(atime_secs, atime_nanos, mtime_secs, mtime_nanos) .await?; diff --git a/ext/fs/std_fs.rs b/ext/fs/std_fs.rs index 4bdbf49432..a657939db2 100644 --- a/ext/fs/std_fs.rs +++ b/ext/fs/std_fs.rs @@ -4,34 +4,29 @@ use std::fs; use std::io; -use std::io::Read; -use std::io::Seek; use std::io::Write; use std::path::Path; use std::path::PathBuf; use std::rc::Rc; -use std::time::SystemTime; -use std::time::UNIX_EPOCH; -use deno_io::StdFileResource; -use fs3::FileExt; +use deno_io::fs::File; +use deno_io::fs::FsResult; +use deno_io::fs::FsStat; +use deno_io::StdFileResourceInner; use crate::interface::FsDirEntry; -use crate::interface::FsError; use crate::interface::FsFileType; -use crate::interface::FsResult; -use crate::interface::FsStat; -use crate::File; use crate::FileSystem; use crate::OpenOptions; +#[cfg(not(unix))] +use deno_io::fs::FsError; + #[derive(Clone)] -pub struct StdFs; +pub struct RealFs; #[async_trait::async_trait(?Send)] -impl FileSystem for StdFs { - type File = StdFileResource; - +impl FileSystem for RealFs { fn cwd(&self) -> FsResult { std::env::current_dir().map_err(Into::into) } @@ -40,7 +35,7 @@ impl FileSystem for StdFs { Ok(std::env::temp_dir()) } - fn chdir(&self, path: impl AsRef) -> FsResult<()> { + fn chdir(&self, path: &Path) -> FsResult<()> { std::env::set_current_dir(path).map_err(Into::into) } @@ -78,27 +73,27 @@ impl FileSystem for StdFs { fn open_sync( &self, - path: impl AsRef, + path: &Path, options: OpenOptions, - ) -> FsResult { + ) -> FsResult> { let opts = open_options(options); let std_file = opts.open(path)?; - Ok(StdFileResource::fs_file(std_file)) + Ok(Rc::new(StdFileResourceInner::file(std_file))) } async fn open_async( &self, path: PathBuf, options: OpenOptions, - ) -> FsResult { + ) -> FsResult> { let opts = open_options(options); let std_file = tokio::task::spawn_blocking(move || opts.open(path)).await??; - Ok(StdFileResource::fs_file(std_file)) + Ok(Rc::new(StdFileResourceInner::file(std_file))) } fn mkdir_sync( &self, - path: impl AsRef, + path: &Path, recursive: bool, mode: u32, ) -> FsResult<()> { @@ -110,19 +105,19 @@ impl FileSystem for StdFs { recursive: bool, mode: u32, ) -> FsResult<()> { - tokio::task::spawn_blocking(move || mkdir(path, recursive, mode)).await? + tokio::task::spawn_blocking(move || mkdir(&path, recursive, mode)).await? } - fn chmod_sync(&self, path: impl AsRef, mode: u32) -> FsResult<()> { + fn chmod_sync(&self, path: &Path, mode: u32) -> FsResult<()> { chmod(path, mode) } async fn chmod_async(&self, path: PathBuf, mode: u32) -> FsResult<()> { - tokio::task::spawn_blocking(move || chmod(path, mode)).await? + tokio::task::spawn_blocking(move || chmod(&path, mode)).await? } fn chown_sync( &self, - path: impl AsRef, + path: &Path, uid: Option, gid: Option, ) -> FsResult<()> { @@ -134,68 +129,56 @@ impl FileSystem for StdFs { uid: Option, gid: Option, ) -> FsResult<()> { - tokio::task::spawn_blocking(move || chown(path, uid, gid)).await? + tokio::task::spawn_blocking(move || chown(&path, uid, gid)).await? } - fn remove_sync( - &self, - path: impl AsRef, - recursive: bool, - ) -> FsResult<()> { + fn remove_sync(&self, path: &Path, recursive: bool) -> FsResult<()> { remove(path, recursive) } async fn remove_async(&self, path: PathBuf, recursive: bool) -> FsResult<()> { - tokio::task::spawn_blocking(move || remove(path, recursive)).await? + tokio::task::spawn_blocking(move || remove(&path, recursive)).await? } - fn copy_file_sync( - &self, - from: impl AsRef, - to: impl AsRef, - ) -> FsResult<()> { + fn copy_file_sync(&self, from: &Path, to: &Path) -> FsResult<()> { copy_file(from, to) } async fn copy_file_async(&self, from: PathBuf, to: PathBuf) -> FsResult<()> { - tokio::task::spawn_blocking(move || copy_file(from, to)).await? + tokio::task::spawn_blocking(move || copy_file(&from, &to)).await? } - fn stat_sync(&self, path: impl AsRef) -> FsResult { + fn stat_sync(&self, path: &Path) -> FsResult { stat(path).map(Into::into) } async fn stat_async(&self, path: PathBuf) -> FsResult { - tokio::task::spawn_blocking(move || stat(path)) + tokio::task::spawn_blocking(move || stat(&path)) .await? .map(Into::into) } - fn lstat_sync(&self, path: impl AsRef) -> FsResult { + fn lstat_sync(&self, path: &Path) -> FsResult { lstat(path).map(Into::into) } async fn lstat_async(&self, path: PathBuf) -> FsResult { - tokio::task::spawn_blocking(move || lstat(path)) + tokio::task::spawn_blocking(move || lstat(&path)) .await? .map(Into::into) } - fn realpath_sync(&self, path: impl AsRef) -> FsResult { + fn realpath_sync(&self, path: &Path) -> FsResult { realpath(path) } async fn realpath_async(&self, path: PathBuf) -> FsResult { - tokio::task::spawn_blocking(move || realpath(path)).await? + tokio::task::spawn_blocking(move || realpath(&path)).await? } - fn read_dir_sync(&self, path: impl AsRef) -> FsResult> { + fn read_dir_sync(&self, path: &Path) -> FsResult> { read_dir(path) } async fn read_dir_async(&self, path: PathBuf) -> FsResult> { - tokio::task::spawn_blocking(move || read_dir(path)).await? + tokio::task::spawn_blocking(move || read_dir(&path)).await? } - fn rename_sync( - &self, - oldpath: impl AsRef, - newpath: impl AsRef, - ) -> FsResult<()> { + fn rename_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> { fs::rename(oldpath, newpath).map_err(Into::into) } async fn rename_async( @@ -208,11 +191,7 @@ impl FileSystem for StdFs { .map_err(Into::into) } - fn link_sync( - &self, - oldpath: impl AsRef, - newpath: impl AsRef, - ) -> FsResult<()> { + fn link_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> { fs::hard_link(oldpath, newpath).map_err(Into::into) } async fn link_async( @@ -227,8 +206,8 @@ impl FileSystem for StdFs { fn symlink_sync( &self, - oldpath: impl AsRef, - newpath: impl AsRef, + oldpath: &Path, + newpath: &Path, file_type: Option, ) -> FsResult<()> { symlink(oldpath, newpath, file_type) @@ -239,11 +218,11 @@ impl FileSystem for StdFs { newpath: PathBuf, file_type: Option, ) -> FsResult<()> { - tokio::task::spawn_blocking(move || symlink(oldpath, newpath, file_type)) + tokio::task::spawn_blocking(move || symlink(&oldpath, &newpath, file_type)) .await? } - fn read_link_sync(&self, path: impl AsRef) -> FsResult { + fn read_link_sync(&self, path: &Path) -> FsResult { fs::read_link(path).map_err(Into::into) } async fn read_link_async(&self, path: PathBuf) -> FsResult { @@ -252,16 +231,16 @@ impl FileSystem for StdFs { .map_err(Into::into) } - fn truncate_sync(&self, path: impl AsRef, len: u64) -> FsResult<()> { + fn truncate_sync(&self, path: &Path, len: u64) -> FsResult<()> { truncate(path, len) } async fn truncate_async(&self, path: PathBuf, len: u64) -> FsResult<()> { - tokio::task::spawn_blocking(move || truncate(path, len)).await? + tokio::task::spawn_blocking(move || truncate(&path, len)).await? } fn utime_sync( &self, - path: impl AsRef, + path: &Path, atime_secs: i64, atime_nanos: u32, mtime_secs: i64, @@ -289,7 +268,7 @@ impl FileSystem for StdFs { fn write_file_sync( &self, - path: impl AsRef, + path: &Path, options: OpenOptions, data: &[u8], ) -> FsResult<()> { @@ -324,7 +303,7 @@ impl FileSystem for StdFs { .await? } - fn read_file_sync(&self, path: impl AsRef) -> FsResult> { + fn read_file_sync(&self, path: &Path) -> FsResult> { fs::read(path).map_err(Into::into) } async fn read_file_async(&self, path: PathBuf) -> FsResult> { @@ -334,7 +313,7 @@ impl FileSystem for StdFs { } } -fn mkdir(path: impl AsRef, recursive: bool, mode: u32) -> FsResult<()> { +fn mkdir(path: &Path, recursive: bool, mode: u32) -> FsResult<()> { let mut builder = fs::DirBuilder::new(); builder.recursive(recursive); #[cfg(unix)] @@ -350,7 +329,7 @@ fn mkdir(path: impl AsRef, recursive: bool, mode: u32) -> FsResult<()> { } #[cfg(unix)] -fn chmod(path: impl AsRef, mode: u32) -> FsResult<()> { +fn chmod(path: &Path, mode: u32) -> FsResult<()> { use std::os::unix::fs::PermissionsExt; let permissions = fs::Permissions::from_mode(mode); fs::set_permissions(path, permissions)?; @@ -359,24 +338,20 @@ fn chmod(path: impl AsRef, mode: u32) -> FsResult<()> { // TODO: implement chmod for Windows (#4357) #[cfg(not(unix))] -fn chmod(path: impl AsRef, _mode: u32) -> FsResult<()> { +fn chmod(path: &Path, _mode: u32) -> FsResult<()> { // Still check file/dir exists on Windows std::fs::metadata(path)?; Err(FsError::NotSupported) } #[cfg(unix)] -fn chown( - path: impl AsRef, - uid: Option, - gid: Option, -) -> FsResult<()> { +fn chown(path: &Path, uid: Option, gid: Option) -> FsResult<()> { use nix::unistd::chown; use nix::unistd::Gid; use nix::unistd::Uid; let owner = uid.map(Uid::from_raw); let group = gid.map(Gid::from_raw); - let res = chown(path.as_ref(), owner, group); + let res = chown(path, owner, group); if let Err(err) = res { return Err(io::Error::from_raw_os_error(err as i32).into()); } @@ -385,60 +360,57 @@ fn chown( // TODO: implement chown for Windows #[cfg(not(unix))] -fn chown( - _path: impl AsRef, - _uid: Option, - _gid: Option, -) -> FsResult<()> { +fn chown(_path: &Path, _uid: Option, _gid: Option) -> FsResult<()> { Err(FsError::NotSupported) } -fn remove(path: impl AsRef, recursive: bool) -> FsResult<()> { +fn remove(path: &Path, recursive: bool) -> FsResult<()> { // TODO: this is racy. This should open fds, and then `unlink` those. - let metadata = fs::symlink_metadata(&path)?; + let metadata = fs::symlink_metadata(path)?; let file_type = metadata.file_type(); let res = if file_type.is_dir() { if recursive { - fs::remove_dir_all(&path) + fs::remove_dir_all(path) } else { - fs::remove_dir(&path) + fs::remove_dir(path) } } else if file_type.is_symlink() { #[cfg(unix)] { - fs::remove_file(&path) + fs::remove_file(path) } #[cfg(not(unix))] { use std::os::windows::prelude::MetadataExt; use winapi::um::winnt::FILE_ATTRIBUTE_DIRECTORY; if metadata.file_attributes() & FILE_ATTRIBUTE_DIRECTORY != 0 { - fs::remove_dir(&path) + fs::remove_dir(path) } else { - fs::remove_file(&path) + fs::remove_file(path) } } } else { - fs::remove_file(&path) + fs::remove_file(path) }; res.map_err(Into::into) } -fn copy_file(from: impl AsRef, to: impl AsRef) -> FsResult<()> { +fn copy_file(from: &Path, to: &Path) -> FsResult<()> { #[cfg(target_os = "macos")] { use libc::clonefile; use libc::stat; use libc::unlink; use std::ffi::CString; + use std::io::Read; use std::os::unix::fs::OpenOptionsExt; use std::os::unix::fs::PermissionsExt; use std::os::unix::prelude::OsStrExt; - let from_str = CString::new(from.as_ref().as_os_str().as_bytes()).unwrap(); - let to_str = CString::new(to.as_ref().as_os_str().as_bytes()).unwrap(); + let from_str = CString::new(from.as_os_str().as_bytes()).unwrap(); + let to_str = CString::new(to.as_os_str().as_bytes()).unwrap(); // SAFETY: `from` and `to` are valid C strings. // std::fs::copy does open() + fcopyfile() on macOS. We try to use @@ -499,36 +471,37 @@ fn copy_file(from: impl AsRef, to: impl AsRef) -> FsResult<()> { } #[cfg(not(windows))] -fn stat(path: impl AsRef) -> FsResult { +fn stat(path: &Path) -> FsResult { let metadata = fs::metadata(path)?; - Ok(metadata_to_fsstat(metadata)) + Ok(FsStat::from_std(metadata)) } #[cfg(windows)] -fn stat(path: impl AsRef) -> FsResult { - let metadata = fs::metadata(path.as_ref())?; - let mut fsstat = metadata_to_fsstat(metadata); +fn stat(path: &Path) -> FsResult { + let metadata = fs::metadata(path)?; + let mut fsstat = FsStat::from_std(metadata); use winapi::um::winbase::FILE_FLAG_BACKUP_SEMANTICS; - let path = path.as_ref().canonicalize()?; + let path = path.canonicalize()?; stat_extra(&mut fsstat, &path, FILE_FLAG_BACKUP_SEMANTICS)?; Ok(fsstat) } #[cfg(not(windows))] -fn lstat(path: impl AsRef) -> FsResult { +fn lstat(path: &Path) -> FsResult { let metadata = fs::symlink_metadata(path)?; - Ok(metadata_to_fsstat(metadata)) + Ok(FsStat::from_std(metadata)) } #[cfg(windows)] -fn lstat(path: impl AsRef) -> FsResult { - let metadata = fs::symlink_metadata(path.as_ref())?; - let mut fsstat = metadata_to_fsstat(metadata); +fn lstat(path: &Path) -> FsResult { use winapi::um::winbase::FILE_FLAG_BACKUP_SEMANTICS; use winapi::um::winbase::FILE_FLAG_OPEN_REPARSE_POINT; + + let metadata = fs::symlink_metadata(path)?; + let mut fsstat = FsStat::from_std(metadata); stat_extra( &mut fsstat, - path.as_ref(), + path, FILE_FLAG_BACKUP_SEMANTICS | FILE_FLAG_OPEN_REPARSE_POINT, )?; Ok(fsstat) @@ -595,62 +568,11 @@ fn stat_extra( } } -#[inline(always)] -fn metadata_to_fsstat(metadata: fs::Metadata) -> FsStat { - macro_rules! unix_or_zero { - ($member:ident) => {{ - #[cfg(unix)] - { - use std::os::unix::fs::MetadataExt; - metadata.$member() - } - #[cfg(not(unix))] - { - 0 - } - }}; - } - - #[inline(always)] - fn to_msec(maybe_time: Result) -> Option { - match maybe_time { - Ok(time) => Some( - time - .duration_since(UNIX_EPOCH) - .map(|t| t.as_millis() as u64) - .unwrap_or_else(|err| err.duration().as_millis() as u64), - ), - Err(_) => None, - } - } - - FsStat { - is_file: metadata.is_file(), - is_directory: metadata.is_dir(), - is_symlink: metadata.file_type().is_symlink(), - size: metadata.len(), - - mtime: to_msec(metadata.modified()), - atime: to_msec(metadata.accessed()), - birthtime: to_msec(metadata.created()), - - dev: unix_or_zero!(dev), - ino: unix_or_zero!(ino), - mode: unix_or_zero!(mode), - nlink: unix_or_zero!(nlink), - uid: unix_or_zero!(uid), - gid: unix_or_zero!(gid), - rdev: unix_or_zero!(rdev), - blksize: unix_or_zero!(blksize), - blocks: unix_or_zero!(blocks), - } +fn realpath(path: &Path) -> FsResult { + Ok(deno_core::strip_unc_prefix(path.canonicalize()?)) } -fn realpath(path: impl AsRef) -> FsResult { - Ok(deno_core::strip_unc_prefix(path.as_ref().canonicalize()?)) -} - -fn read_dir(path: impl AsRef) -> FsResult> { +fn read_dir(path: &Path) -> FsResult> { let entries = fs::read_dir(path)? .filter_map(|entry| { let entry = entry.ok()?; @@ -679,24 +601,24 @@ fn read_dir(path: impl AsRef) -> FsResult> { #[cfg(not(windows))] fn symlink( - oldpath: impl AsRef, - newpath: impl AsRef, + oldpath: &Path, + newpath: &Path, _file_type: Option, ) -> FsResult<()> { - std::os::unix::fs::symlink(oldpath.as_ref(), newpath.as_ref())?; + std::os::unix::fs::symlink(oldpath, newpath)?; Ok(()) } #[cfg(windows)] fn symlink( - oldpath: impl AsRef, - newpath: impl AsRef, + oldpath: &Path, + newpath: &Path, file_type: Option, ) -> FsResult<()> { let file_type = match file_type { Some(file_type) => file_type, None => { - let old_meta = fs::metadata(&oldpath); + let old_meta = fs::metadata(oldpath); match old_meta { Ok(metadata) => { if metadata.is_file() { @@ -723,17 +645,17 @@ fn symlink( match file_type { FsFileType::File => { - std::os::windows::fs::symlink_file(&oldpath, &newpath)?; + std::os::windows::fs::symlink_file(oldpath, newpath)?; } FsFileType::Directory => { - std::os::windows::fs::symlink_dir(&oldpath, &newpath)?; + std::os::windows::fs::symlink_dir(oldpath, newpath)?; } }; Ok(()) } -fn truncate(path: impl AsRef, len: u64) -> FsResult<()> { +fn truncate(path: &Path, len: u64) -> FsResult<()> { let file = fs::OpenOptions::new().write(true).open(path)?; file.set_len(len)?; Ok(()) @@ -760,162 +682,3 @@ fn open_options(options: OpenOptions) -> fs::OpenOptions { open_options.create_new(options.create_new); open_options } - -fn sync( - resource: Rc, - f: impl FnOnce(&mut fs::File) -> io::Result, -) -> FsResult { - let res = resource - .with_file2(|file| f(file)) - .ok_or(FsError::FileBusy)??; - Ok(res) -} - -async fn nonblocking( - resource: Rc, - f: impl FnOnce(&mut fs::File) -> io::Result + Send + 'static, -) -> FsResult { - let res = resource.with_file_blocking_task2(f).await?; - Ok(res) -} - -#[async_trait::async_trait(?Send)] -impl File for StdFileResource { - fn write_all_sync(self: Rc, buf: &[u8]) -> FsResult<()> { - sync(self, |file| file.write_all(buf)) - } - async fn write_all_async(self: Rc, buf: Vec) -> FsResult<()> { - nonblocking(self, move |file| file.write_all(&buf)).await - } - - fn read_all_sync(self: Rc) -> FsResult> { - sync(self, |file| { - let mut buf = Vec::new(); - file.read_to_end(&mut buf)?; - Ok(buf) - }) - } - async fn read_all_async(self: Rc) -> FsResult> { - nonblocking(self, |file| { - let mut buf = Vec::new(); - file.read_to_end(&mut buf)?; - Ok(buf) - }) - .await - } - - fn chmod_sync(self: Rc, _mode: u32) -> FsResult<()> { - #[cfg(unix)] - { - sync(self, |file| { - use std::os::unix::prelude::PermissionsExt; - file.set_permissions(fs::Permissions::from_mode(_mode)) - }) - } - #[cfg(not(unix))] - Err(FsError::NotSupported) - } - - async fn chmod_async(self: Rc, _mode: u32) -> FsResult<()> { - #[cfg(unix)] - { - nonblocking(self, move |file| { - use std::os::unix::prelude::PermissionsExt; - file.set_permissions(fs::Permissions::from_mode(_mode)) - }) - .await - } - #[cfg(not(unix))] - Err(FsError::NotSupported) - } - - fn seek_sync(self: Rc, pos: io::SeekFrom) -> FsResult { - sync(self, |file| file.seek(pos)) - } - async fn seek_async(self: Rc, pos: io::SeekFrom) -> FsResult { - nonblocking(self, move |file| file.seek(pos)).await - } - - fn datasync_sync(self: Rc) -> FsResult<()> { - sync(self, |file| file.sync_data()) - } - async fn datasync_async(self: Rc) -> FsResult<()> { - nonblocking(self, |file| file.sync_data()).await - } - - fn sync_sync(self: Rc) -> FsResult<()> { - sync(self, |file| file.sync_all()) - } - async fn sync_async(self: Rc) -> FsResult<()> { - nonblocking(self, |file| file.sync_all()).await - } - - fn stat_sync(self: Rc) -> FsResult { - sync(self, |file| file.metadata().map(metadata_to_fsstat)) - } - async fn stat_async(self: Rc) -> FsResult { - nonblocking(self, |file| file.metadata().map(metadata_to_fsstat)).await - } - - fn lock_sync(self: Rc, exclusive: bool) -> FsResult<()> { - sync(self, |file| { - if exclusive { - file.lock_exclusive() - } else { - file.lock_shared() - } - }) - } - async fn lock_async(self: Rc, exclusive: bool) -> FsResult<()> { - nonblocking(self, move |file| { - if exclusive { - file.lock_exclusive() - } else { - file.lock_shared() - } - }) - .await - } - - fn unlock_sync(self: Rc) -> FsResult<()> { - sync(self, |file| file.unlock()) - } - async fn unlock_async(self: Rc) -> FsResult<()> { - nonblocking(self, |file| file.unlock()).await - } - - fn truncate_sync(self: Rc, len: u64) -> FsResult<()> { - sync(self, |file| file.set_len(len)) - } - async fn truncate_async(self: Rc, len: u64) -> FsResult<()> { - nonblocking(self, move |file| file.set_len(len)).await - } - - fn utime_sync( - self: Rc, - atime_secs: i64, - atime_nanos: u32, - mtime_secs: i64, - mtime_nanos: u32, - ) -> FsResult<()> { - let atime = filetime::FileTime::from_unix_time(atime_secs, atime_nanos); - let mtime = filetime::FileTime::from_unix_time(mtime_secs, mtime_nanos); - sync(self, |file| { - filetime::set_file_handle_times(file, Some(atime), Some(mtime)) - }) - } - async fn utime_async( - self: Rc, - atime_secs: i64, - atime_nanos: u32, - mtime_secs: i64, - mtime_nanos: u32, - ) -> FsResult<()> { - let atime = filetime::FileTime::from_unix_time(atime_secs, atime_nanos); - let mtime = filetime::FileTime::from_unix_time(mtime_secs, mtime_nanos); - nonblocking(self, move |file| { - filetime::set_file_handle_times(file, Some(atime), Some(mtime)) - }) - .await - } -} diff --git a/ext/io/Cargo.toml b/ext/io/Cargo.toml index 55b2ccab66..fc9de711ff 100644 --- a/ext/io/Cargo.toml +++ b/ext/io/Cargo.toml @@ -14,7 +14,10 @@ description = "IO promitives for Deno extensions" path = "lib.rs" [dependencies] +async-trait.workspace = true deno_core.workspace = true +filetime.workspace = true +fs3.workspace = true once_cell.workspace = true tokio.workspace = true diff --git a/ext/io/fs.rs b/ext/io/fs.rs new file mode 100644 index 0000000000..bb6bdec4fc --- /dev/null +++ b/ext/io/fs.rs @@ -0,0 +1,330 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +use std::borrow::Cow; +use std::io; +use std::rc::Rc; +use std::time::SystemTime; +use std::time::UNIX_EPOCH; + +use deno_core::error::not_supported; +use deno_core::error::resource_unavailable; +use deno_core::error::AnyError; +use deno_core::BufMutView; +use deno_core::BufView; +use deno_core::OpState; +use deno_core::ResourceId; +use tokio::task::JoinError; + +pub enum FsError { + Io(io::Error), + FileBusy, + NotSupported, +} + +impl From for FsError { + fn from(err: io::Error) -> Self { + Self::Io(err) + } +} + +impl From for AnyError { + fn from(err: FsError) -> Self { + match err { + FsError::Io(err) => AnyError::from(err), + FsError::FileBusy => resource_unavailable(), + FsError::NotSupported => not_supported(), + } + } +} + +impl From for FsError { + fn from(err: JoinError) -> Self { + if err.is_cancelled() { + todo!("async tasks must not be cancelled") + } + if err.is_panic() { + std::panic::resume_unwind(err.into_panic()); // resume the panic on the main thread + } + unreachable!() + } +} + +pub type FsResult = Result; + +pub struct FsStat { + pub is_file: bool, + pub is_directory: bool, + pub is_symlink: bool, + pub size: u64, + + pub mtime: Option, + pub atime: Option, + pub birthtime: Option, + + pub dev: u64, + pub ino: u64, + pub mode: u32, + pub nlink: u64, + pub uid: u32, + pub gid: u32, + pub rdev: u64, + pub blksize: u64, + pub blocks: u64, +} + +impl FsStat { + pub fn from_std(metadata: std::fs::Metadata) -> Self { + macro_rules! unix_or_zero { + ($member:ident) => {{ + #[cfg(unix)] + { + use std::os::unix::fs::MetadataExt; + metadata.$member() + } + #[cfg(not(unix))] + { + 0 + } + }}; + } + + #[inline(always)] + fn to_msec(maybe_time: Result) -> Option { + match maybe_time { + Ok(time) => Some( + time + .duration_since(UNIX_EPOCH) + .map(|t| t.as_millis() as u64) + .unwrap_or_else(|err| err.duration().as_millis() as u64), + ), + Err(_) => None, + } + } + + Self { + is_file: metadata.is_file(), + is_directory: metadata.is_dir(), + is_symlink: metadata.file_type().is_symlink(), + size: metadata.len(), + + mtime: to_msec(metadata.modified()), + atime: to_msec(metadata.accessed()), + birthtime: to_msec(metadata.created()), + + dev: unix_or_zero!(dev), + ino: unix_or_zero!(ino), + mode: unix_or_zero!(mode), + nlink: unix_or_zero!(nlink), + uid: unix_or_zero!(uid), + gid: unix_or_zero!(gid), + rdev: unix_or_zero!(rdev), + blksize: unix_or_zero!(blksize), + blocks: unix_or_zero!(blocks), + } + } +} + +#[async_trait::async_trait(?Send)] +pub trait File { + fn read_sync(self: Rc, buf: &mut [u8]) -> FsResult; + async fn read(self: Rc, limit: usize) -> FsResult { + let vec = vec![0; limit]; + let buf = BufMutView::from(vec); + let (nread, buf) = self.read_byob(buf).await?; + let mut vec = buf.unwrap_vec(); + if vec.len() != nread { + vec.truncate(nread); + } + Ok(BufView::from(vec)) + } + async fn read_byob( + self: Rc, + buf: BufMutView, + ) -> FsResult<(usize, BufMutView)>; + + fn write_sync(self: Rc, buf: &[u8]) -> FsResult; + async fn write( + self: Rc, + buf: BufView, + ) -> FsResult; + + fn write_all_sync(self: Rc, buf: &[u8]) -> FsResult<()>; + async fn write_all(self: Rc, buf: BufView) -> FsResult<()>; + + fn read_all_sync(self: Rc) -> FsResult>; + async fn read_all_async(self: Rc) -> FsResult>; + + fn chmod_sync(self: Rc, pathmode: u32) -> FsResult<()>; + async fn chmod_async(self: Rc, mode: u32) -> FsResult<()>; + + fn seek_sync(self: Rc, pos: io::SeekFrom) -> FsResult; + async fn seek_async(self: Rc, pos: io::SeekFrom) -> FsResult; + + fn datasync_sync(self: Rc) -> FsResult<()>; + async fn datasync_async(self: Rc) -> FsResult<()>; + + fn sync_sync(self: Rc) -> FsResult<()>; + async fn sync_async(self: Rc) -> FsResult<()>; + + fn stat_sync(self: Rc) -> FsResult; + async fn stat_async(self: Rc) -> FsResult; + + fn lock_sync(self: Rc, exclusive: bool) -> FsResult<()>; + async fn lock_async(self: Rc, exclusive: bool) -> FsResult<()>; + + fn unlock_sync(self: Rc) -> FsResult<()>; + async fn unlock_async(self: Rc) -> FsResult<()>; + + fn truncate_sync(self: Rc, len: u64) -> FsResult<()>; + async fn truncate_async(self: Rc, len: u64) -> FsResult<()>; + + fn utime_sync( + self: Rc, + atime_secs: i64, + atime_nanos: u32, + mtime_secs: i64, + mtime_nanos: u32, + ) -> FsResult<()>; + async fn utime_async( + self: Rc, + atime_secs: i64, + atime_nanos: u32, + mtime_secs: i64, + mtime_nanos: u32, + ) -> FsResult<()>; + + // lower level functionality + fn as_stdio(self: Rc) -> FsResult; + #[cfg(unix)] + fn backing_fd(self: Rc) -> Option; + #[cfg(windows)] + fn backing_fd(self: Rc) -> Option; + fn try_clone_inner(self: Rc) -> FsResult>; +} + +pub struct FileResource { + name: String, + file: Rc, +} + +impl FileResource { + pub fn new(file: Rc, name: String) -> Self { + Self { name, file } + } + + pub fn with_resource( + state: &OpState, + rid: ResourceId, + f: F, + ) -> Result + where + F: FnOnce(Rc) -> Result, + { + let resource = state.resource_table.get::(rid)?; + f(resource) + } + + pub fn get_file( + state: &OpState, + rid: ResourceId, + ) -> Result, AnyError> { + let resource = state.resource_table.get::(rid)?; + Ok(resource.file()) + } + + pub fn with_file( + state: &OpState, + rid: ResourceId, + f: F, + ) -> Result + where + F: FnOnce(Rc) -> Result, + { + Self::with_resource(state, rid, |r| f(r.file.clone())) + } + + pub fn file(&self) -> Rc { + self.file.clone() + } +} + +impl deno_core::Resource for FileResource { + fn name(&self) -> Cow { + Cow::Borrowed(&self.name) + } + + fn read( + self: Rc, + limit: usize, + ) -> deno_core::AsyncResult { + Box::pin(async move { + self + .file + .clone() + .read(limit) + .await + .map_err(|err| err.into()) + }) + } + + fn read_byob( + self: Rc, + buf: deno_core::BufMutView, + ) -> deno_core::AsyncResult<(usize, deno_core::BufMutView)> { + Box::pin(async move { + self + .file + .clone() + .read_byob(buf) + .await + .map_err(|err| err.into()) + }) + } + + fn write( + self: Rc, + buf: deno_core::BufView, + ) -> deno_core::AsyncResult { + Box::pin(async move { + self.file.clone().write(buf).await.map_err(|err| err.into()) + }) + } + + fn write_all( + self: Rc, + buf: deno_core::BufView, + ) -> deno_core::AsyncResult<()> { + Box::pin(async move { + self + .file + .clone() + .write_all(buf) + .await + .map_err(|err| err.into()) + }) + } + + fn read_byob_sync( + self: Rc, + data: &mut [u8], + ) -> Result { + self.file.clone().read_sync(data).map_err(|err| err.into()) + } + + fn write_sync( + self: Rc, + data: &[u8], + ) -> Result { + self.file.clone().write_sync(data).map_err(|err| err.into()) + } + + #[cfg(unix)] + fn backing_fd(self: Rc) -> Option { + self.file.clone().backing_fd() + } + + #[cfg(windows)] + fn backing_fd(self: Rc) -> Option { + self.file.clone().backing_fd() + } +} diff --git a/ext/io/lib.rs b/ext/io/lib.rs index 73ce725780..49e4ab714f 100644 --- a/ext/io/lib.rs +++ b/ext/io/lib.rs @@ -1,6 +1,5 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -use deno_core::error::resource_unavailable; use deno_core::error::AnyError; use deno_core::op; use deno_core::AsyncMutFuture; @@ -13,8 +12,12 @@ use deno_core::CancelTryFuture; use deno_core::OpState; use deno_core::RcRef; use deno_core::Resource; -use deno_core::ResourceId; use deno_core::TaskQueue; +use fs::FileResource; +use fs::FsError; +use fs::FsResult; +use fs::FsStat; +use fs3::FileExt; use once_cell::sync::Lazy; use std::borrow::Cow; use std::cell::RefCell; @@ -22,6 +25,7 @@ use std::fs::File as StdFile; use std::io; use std::io::ErrorKind; use std::io::Read; +use std::io::Seek; use std::io::Write; use std::rc::Rc; use tokio::io::AsyncRead; @@ -40,6 +44,8 @@ use winapi::um::processenv::GetStdHandle; #[cfg(windows)] use winapi::um::winbase; +pub mod fs; + // Store the stdio fd/handles in global statics in order to keep them // alive for the duration of the application since the last handle/fd // being dropped will close the corresponding pipe. @@ -89,39 +95,39 @@ deno_core::extension!(deno_io, if let Some(stdio) = options.stdio { let t = &mut state.resource_table; - let rid = t.add(StdFileResource::stdio( - match stdio.stdin { - StdioPipe::Inherit => StdFileResourceInner { - kind: StdFileResourceKind::Stdin, - file: STDIN_HANDLE.try_clone().unwrap(), - }, + let rid = t.add(fs::FileResource::new( + Rc::new(match stdio.stdin { + StdioPipe::Inherit => StdFileResourceInner::new( + StdFileResourceKind::Stdin, + STDIN_HANDLE.try_clone().unwrap(), + ), StdioPipe::File(pipe) => StdFileResourceInner::file(pipe), - }, - "stdin", + }), + "stdin".to_string(), )); assert_eq!(rid, 0, "stdin must have ResourceId 0"); - let rid = t.add(StdFileResource::stdio( - match stdio.stdout { - StdioPipe::Inherit => StdFileResourceInner { - kind: StdFileResourceKind::Stdout, - file: STDOUT_HANDLE.try_clone().unwrap(), - }, + let rid = t.add(FileResource::new( + Rc::new(match stdio.stdout { + StdioPipe::Inherit => StdFileResourceInner::new( + StdFileResourceKind::Stdout, + STDOUT_HANDLE.try_clone().unwrap(), + ), StdioPipe::File(pipe) => StdFileResourceInner::file(pipe), - }, - "stdout", + }), + "stdout".to_string(), )); assert_eq!(rid, 1, "stdout must have ResourceId 1"); - let rid = t.add(StdFileResource::stdio( - match stdio.stderr { - StdioPipe::Inherit => StdFileResourceInner { - kind: StdFileResourceKind::Stderr, - file: STDERR_HANDLE.try_clone().unwrap(), - }, + let rid = t.add(FileResource::new( + Rc::new(match stdio.stderr { + StdioPipe::Inherit => StdFileResourceInner::new( + StdFileResourceKind::Stderr, + STDERR_HANDLE.try_clone().unwrap(), + ), StdioPipe::File(pipe) => StdFileResourceInner::file(pipe), - }, - "stderr", + }), + "stderr".to_string(), )); assert_eq!(rid, 2, "stderr must have ResourceId 2"); } @@ -291,150 +297,43 @@ enum StdFileResourceKind { Stderr, } -struct StdFileResourceInner { +pub struct StdFileResourceInner { kind: StdFileResourceKind, - file: StdFile, -} - -impl StdFileResourceInner { - pub fn file(fs_file: StdFile) -> Self { - StdFileResourceInner { - kind: StdFileResourceKind::File, - file: fs_file, - } - } - - pub fn with_file(&mut self, f: impl FnOnce(&mut StdFile) -> R) -> R { - f(&mut self.file) - } - - pub fn try_clone(&self) -> Result { - Ok(Self { - kind: self.kind, - file: self.file.try_clone()?, - }) - } - - pub fn write_and_maybe_flush( - &mut self, - buf: &[u8], - ) -> Result { - // Rust will line buffer and we don't want that behavior - // (see https://github.com/denoland/deno/issues/948), so flush stdout and stderr. - // Although an alternative solution could be to bypass Rust's std by - // using the raw fds/handles, it will cause encoding issues on Windows - // that we get solved for free by using Rust's stdio wrappers (see - // std/src/sys/windows/stdio.rs in Rust's source code). - match self.kind { - StdFileResourceKind::File => Ok(self.file.write(buf)?), - StdFileResourceKind::Stdin => { - Err(Into::::into(ErrorKind::Unsupported).into()) - } - StdFileResourceKind::Stdout => { - // bypass the file and use std::io::stdout() - let mut stdout = std::io::stdout().lock(); - let nwritten = stdout.write(buf)?; - stdout.flush()?; - Ok(nwritten) - } - StdFileResourceKind::Stderr => { - // bypass the file and use std::io::stderr() - let mut stderr = std::io::stderr().lock(); - let nwritten = stderr.write(buf)?; - stderr.flush()?; - Ok(nwritten) - } - } - } - - pub fn write_all_and_maybe_flush( - &mut self, - buf: &[u8], - ) -> Result<(), AnyError> { - // this method exists instead of using a `Write` implementation - // so that we can acquire the locks once and do both actions - match self.kind { - StdFileResourceKind::File => Ok(self.file.write_all(buf)?), - StdFileResourceKind::Stdin => { - Err(Into::::into(ErrorKind::Unsupported).into()) - } - StdFileResourceKind::Stdout => { - // bypass the file and use std::io::stdout() - let mut stdout = std::io::stdout().lock(); - stdout.write_all(buf)?; - stdout.flush()?; - Ok(()) - } - StdFileResourceKind::Stderr => { - // bypass the file and use std::io::stderr() - let mut stderr = std::io::stderr().lock(); - stderr.write_all(buf)?; - stderr.flush()?; - Ok(()) - } - } - } -} - -impl Read for StdFileResourceInner { - fn read(&mut self, buf: &mut [u8]) -> std::io::Result { - match self.kind { - StdFileResourceKind::File | StdFileResourceKind::Stdin => { - self.file.read(buf) - } - StdFileResourceKind::Stdout | StdFileResourceKind::Stderr => { - Err(ErrorKind::Unsupported.into()) - } - } - } -} - -pub struct StdFileResource { - name: String, // We can't use an AsyncRefCell here because we need to allow // access to the resource synchronously at any time and // asynchronously one at a time in order - cell: RefCell>, + cell: RefCell>, // Used to keep async actions in order and only allow one // to occur at a time cell_async_task_queue: TaskQueue, } -impl StdFileResource { - fn stdio(inner: StdFileResourceInner, name: &str) -> Self { - Self { - cell: RefCell::new(Some(inner)), +impl StdFileResourceInner { + pub fn file(fs_file: StdFile) -> Self { + StdFileResourceInner::new(StdFileResourceKind::File, fs_file) + } + + fn new(kind: StdFileResourceKind, fs_file: StdFile) -> Self { + StdFileResourceInner { + kind, + cell: RefCell::new(Some(fs_file)), cell_async_task_queue: Default::default(), - name: name.to_string(), } } - pub fn fs_file(fs_file: StdFile) -> Self { - Self { - cell: RefCell::new(Some(StdFileResourceInner::file(fs_file))), - cell_async_task_queue: Default::default(), - name: "fsFile".to_string(), - } - } - - fn with_inner( - &self, - action: impl FnOnce(&mut StdFileResourceInner) -> Result, - ) -> Option> { - match self.cell.try_borrow_mut() { - Ok(mut cell) if cell.is_some() => { - let mut file = cell.take().unwrap(); - let result = action(&mut file); - cell.replace(file); - Some(result) - } - _ => None, - } - } - - async fn with_inner_blocking_task(&self, action: F) -> R + fn with_sync(&self, action: F) -> FsResult where - F: FnOnce(&mut StdFileResourceInner) -> R + Send + 'static, + F: FnOnce(&mut StdFile) -> FsResult, + { + match self.cell.try_borrow_mut() { + Ok(mut cell) if cell.is_some() => action(cell.as_mut().unwrap()), + _ => Err(fs::FsError::FileBusy), + } + } + + async fn with_inner_blocking_task(&self, action: F) -> R + where + F: FnOnce(&mut StdFile) -> R + Send + 'static, { // we want to restrict this to one async action at a time let _permit = self.cell_async_task_queue.acquire().await; @@ -443,9 +342,9 @@ impl StdFileResource { let mut did_take = false; let mut cell_value = { let mut cell = self.cell.borrow_mut(); - match cell.as_mut().unwrap().try_clone() { - Ok(value) => value, - Err(_) => { + match cell.as_mut().unwrap().try_clone().ok() { + Some(value) => value, + None => { did_take = true; cell.take().unwrap() } @@ -466,200 +365,369 @@ impl StdFileResource { result } - async fn read_byob( - self: Rc, - mut buf: BufMutView, - ) -> Result<(usize, BufMutView), AnyError> { - self - .with_inner_blocking_task(move |inner| { - let nread = inner.read(&mut buf)?; - Ok((nread, buf)) - }) - .await + async fn with_blocking_task(&self, action: F) -> R + where + F: FnOnce() -> R + Send + 'static, + { + // we want to restrict this to one async action at a time + let _permit = self.cell_async_task_queue.acquire().await; + + tokio::task::spawn_blocking(action).await.unwrap() + } +} + +#[async_trait::async_trait(?Send)] +impl crate::fs::File for StdFileResourceInner { + fn write_sync(self: Rc, buf: &[u8]) -> FsResult { + // Rust will line buffer and we don't want that behavior + // (see https://github.com/denoland/deno/issues/948), so flush stdout and stderr. + // Although an alternative solution could be to bypass Rust's std by + // using the raw fds/handles, it will cause encoding issues on Windows + // that we get solved for free by using Rust's stdio wrappers (see + // std/src/sys/windows/stdio.rs in Rust's source code). + match self.kind { + StdFileResourceKind::File => self.with_sync(|file| Ok(file.write(buf)?)), + StdFileResourceKind::Stdin => { + Err(Into::::into(ErrorKind::Unsupported).into()) + } + StdFileResourceKind::Stdout => { + // bypass the file and use std::io::stdout() + let mut stdout = std::io::stdout().lock(); + let nwritten = stdout.write(buf)?; + stdout.flush()?; + Ok(nwritten) + } + StdFileResourceKind::Stderr => { + // bypass the file and use std::io::stderr() + let mut stderr = std::io::stderr().lock(); + let nwritten = stderr.write(buf)?; + stderr.flush()?; + Ok(nwritten) + } + } + } + + fn read_sync(self: Rc, buf: &mut [u8]) -> FsResult { + match self.kind { + StdFileResourceKind::File | StdFileResourceKind::Stdin => { + self.with_sync(|file| Ok(file.read(buf)?)) + } + StdFileResourceKind::Stdout | StdFileResourceKind::Stderr => { + Err(FsError::NotSupported) + } + } + } + + fn write_all_sync(self: Rc, buf: &[u8]) -> FsResult<()> { + match self.kind { + StdFileResourceKind::File => { + self.with_sync(|file| Ok(file.write_all(buf)?)) + } + StdFileResourceKind::Stdin => { + Err(Into::::into(ErrorKind::Unsupported).into()) + } + StdFileResourceKind::Stdout => { + // bypass the file and use std::io::stdout() + let mut stdout = std::io::stdout().lock(); + stdout.write_all(buf)?; + stdout.flush()?; + Ok(()) + } + StdFileResourceKind::Stderr => { + // bypass the file and use std::io::stderr() + let mut stderr = std::io::stderr().lock(); + stderr.write_all(buf)?; + stderr.flush()?; + Ok(()) + } + } + } + async fn write_all(self: Rc, buf: BufView) -> FsResult<()> { + match self.kind { + StdFileResourceKind::File => { + self + .with_inner_blocking_task(move |file| Ok(file.write_all(&buf)?)) + .await + } + StdFileResourceKind::Stdin => { + Err(Into::::into(ErrorKind::Unsupported).into()) + } + StdFileResourceKind::Stdout => { + self + .with_blocking_task(move || { + // bypass the file and use std::io::stdout() + let mut stdout = std::io::stdout().lock(); + stdout.write_all(&buf)?; + stdout.flush()?; + Ok(()) + }) + .await + } + StdFileResourceKind::Stderr => { + self + .with_blocking_task(move || { + // bypass the file and use std::io::stderr() + let mut stderr = std::io::stderr().lock(); + stderr.write_all(&buf)?; + stderr.flush()?; + Ok(()) + }) + .await + } + } } async fn write( self: Rc, view: BufView, - ) -> Result { - self - .with_inner_blocking_task(move |inner| { - let nwritten = inner.write_and_maybe_flush(&view)?; - Ok(deno_core::WriteOutcome::Partial { nwritten, view }) - }) - .await - } - - async fn write_all(self: Rc, view: BufView) -> Result<(), AnyError> { - self - .with_inner_blocking_task(move |inner| { - inner.write_all_and_maybe_flush(&view) - }) - .await - } - - fn read_byob_sync(self: Rc, buf: &mut [u8]) -> Result { - self - .with_inner(|inner| inner.read(buf)) - .ok_or_else(resource_unavailable)? - .map_err(Into::into) - } - - fn write_sync(self: Rc, data: &[u8]) -> Result { - self - .with_inner(|inner| inner.write_and_maybe_flush(data)) - .ok_or_else(resource_unavailable)? - } - - fn with_resource( - state: &mut OpState, - rid: ResourceId, - f: F, - ) -> Result - where - F: FnOnce(Rc) -> Result, - { - let resource = state.resource_table.get::(rid)?; - f(resource) - } - - pub fn with_file( - state: &mut OpState, - rid: ResourceId, - f: F, - ) -> Result - where - F: FnOnce(&mut StdFile) -> Result, - { - Self::with_resource(state, rid, move |resource| { - resource - .with_inner(move |inner| inner.with_file(f)) - .ok_or_else(resource_unavailable)? - }) - } - - pub fn with_file2(self: Rc, f: F) -> Option> - where - F: FnOnce(&mut StdFile) -> Result, - { - self.with_inner(move |inner| inner.with_file(f)) - } - - pub async fn with_file_blocking_task( - state: Rc>, - rid: ResourceId, - f: F, - ) -> Result - where - F: (FnOnce(&mut StdFile) -> Result) + Send + 'static, - { - let resource = state - .borrow_mut() - .resource_table - .get::(rid)?; - - resource - .with_inner_blocking_task(move |inner| inner.with_file(f)) - .await - } - - pub async fn with_file_blocking_task2( - self: Rc, - f: F, - ) -> Result - where - F: (FnOnce(&mut StdFile) -> Result) + Send + 'static, - { - self - .with_inner_blocking_task(move |inner| inner.with_file(f)) - .await - } - - pub fn clone_file( - state: &mut OpState, - rid: ResourceId, - ) -> Result { - Self::with_file(state, rid, move |std_file| { - std_file.try_clone().map_err(AnyError::from) - }) - } - - pub fn as_stdio( - state: &mut OpState, - rid: u32, - ) -> Result { - Self::with_resource(state, rid, |resource| { - resource - .with_inner(|inner| match inner.kind { - StdFileResourceKind::File => { - let file = inner.file.try_clone()?; - Ok(file.into()) - } - _ => Ok(std::process::Stdio::inherit()), - }) - .ok_or_else(resource_unavailable)? - }) - } -} - -impl Resource for StdFileResource { - fn name(&self) -> Cow { - self.name.as_str().into() - } - - fn read(self: Rc, limit: usize) -> AsyncResult { - Box::pin(async move { - let vec = vec![0; limit]; - let buf = BufMutView::from(vec); - let (nread, buf) = StdFileResource::read_byob(self, buf).await?; - let mut vec = buf.unwrap_vec(); - if vec.len() != nread { - vec.truncate(nread); + ) -> FsResult { + match self.kind { + StdFileResourceKind::File => { + self + .with_inner_blocking_task(|file| { + let nwritten = file.write(&view)?; + Ok(deno_core::WriteOutcome::Partial { nwritten, view }) + }) + .await } - Ok(BufView::from(vec)) + StdFileResourceKind::Stdin => { + Err(Into::::into(ErrorKind::Unsupported).into()) + } + StdFileResourceKind::Stdout => { + self + .with_blocking_task(|| { + // bypass the file and use std::io::stdout() + let mut stdout = std::io::stdout().lock(); + let nwritten = stdout.write(&view)?; + stdout.flush()?; + Ok(deno_core::WriteOutcome::Partial { nwritten, view }) + }) + .await + } + StdFileResourceKind::Stderr => { + self + .with_blocking_task(|| { + // bypass the file and use std::io::stderr() + let mut stderr = std::io::stderr().lock(); + let nwritten = stderr.write(&view)?; + stderr.flush()?; + Ok(deno_core::WriteOutcome::Partial { nwritten, view }) + }) + .await + } + } + } + + fn read_all_sync(self: Rc) -> FsResult> { + match self.kind { + StdFileResourceKind::File | StdFileResourceKind::Stdin => { + let mut buf = Vec::new(); + self.with_sync(|file| Ok(file.read_to_end(&mut buf)?))?; + Ok(buf) + } + StdFileResourceKind::Stdout | StdFileResourceKind::Stderr => { + Err(FsError::NotSupported) + } + } + } + async fn read_all_async(self: Rc) -> FsResult> { + match self.kind { + StdFileResourceKind::File | StdFileResourceKind::Stdin => { + self + .with_inner_blocking_task(|file| { + let mut buf = Vec::new(); + file.read_to_end(&mut buf)?; + Ok(buf) + }) + .await + } + StdFileResourceKind::Stdout | StdFileResourceKind::Stderr => { + Err(FsError::NotSupported) + } + } + } + + fn chmod_sync(self: Rc, _mode: u32) -> FsResult<()> { + #[cfg(unix)] + { + use std::os::unix::prelude::PermissionsExt; + self.with_sync(|file| { + Ok(file.set_permissions(std::fs::Permissions::from_mode(_mode))?) + }) + } + #[cfg(not(unix))] + Err(FsError::NotSupported) + } + async fn chmod_async(self: Rc, _mode: u32) -> FsResult<()> { + #[cfg(unix)] + { + use std::os::unix::prelude::PermissionsExt; + self + .with_inner_blocking_task(move |file| { + Ok(file.set_permissions(std::fs::Permissions::from_mode(_mode))?) + }) + .await + } + #[cfg(not(unix))] + Err(FsError::NotSupported) + } + + fn seek_sync(self: Rc, pos: io::SeekFrom) -> FsResult { + self.with_sync(|file| Ok(file.seek(pos)?)) + } + async fn seek_async(self: Rc, pos: io::SeekFrom) -> FsResult { + self + .with_inner_blocking_task(move |file| Ok(file.seek(pos)?)) + .await + } + + fn datasync_sync(self: Rc) -> FsResult<()> { + self.with_sync(|file| Ok(file.sync_data()?)) + } + async fn datasync_async(self: Rc) -> FsResult<()> { + self + .with_inner_blocking_task(|file| Ok(file.sync_data()?)) + .await + } + + fn sync_sync(self: Rc) -> FsResult<()> { + self.with_sync(|file| Ok(file.sync_all()?)) + } + async fn sync_async(self: Rc) -> FsResult<()> { + self + .with_inner_blocking_task(|file| Ok(file.sync_all()?)) + .await + } + + fn stat_sync(self: Rc) -> FsResult { + self.with_sync(|file| Ok(file.metadata().map(FsStat::from_std)?)) + } + async fn stat_async(self: Rc) -> FsResult { + self + .with_inner_blocking_task(|file| { + Ok(file.metadata().map(FsStat::from_std)?) + }) + .await + } + + fn lock_sync(self: Rc, exclusive: bool) -> FsResult<()> { + self.with_sync(|file| { + if exclusive { + file.lock_exclusive()?; + } else { + file.lock_shared()?; + } + Ok(()) }) } - - fn read_byob( - self: Rc, - buf: deno_core::BufMutView, - ) -> AsyncResult<(usize, deno_core::BufMutView)> { - Box::pin(StdFileResource::read_byob(self, buf)) + async fn lock_async(self: Rc, exclusive: bool) -> FsResult<()> { + self + .with_inner_blocking_task(move |file| { + if exclusive { + file.lock_exclusive()?; + } else { + file.lock_shared()?; + } + Ok(()) + }) + .await } - fn write( - self: Rc, - view: deno_core::BufView, - ) -> AsyncResult { - Box::pin(StdFileResource::write(self, view)) + fn unlock_sync(self: Rc) -> FsResult<()> { + self.with_sync(|file| Ok(file.unlock()?)) + } + async fn unlock_async(self: Rc) -> FsResult<()> { + self + .with_inner_blocking_task(|file| Ok(file.unlock()?)) + .await } - fn write_all(self: Rc, view: deno_core::BufView) -> AsyncResult<()> { - Box::pin(StdFileResource::write_all(self, view)) + fn truncate_sync(self: Rc, len: u64) -> FsResult<()> { + self.with_sync(|file| Ok(file.set_len(len)?)) + } + async fn truncate_async(self: Rc, len: u64) -> FsResult<()> { + self + .with_inner_blocking_task(move |file| Ok(file.set_len(len)?)) + .await } - fn write_sync( + fn utime_sync( self: Rc, - data: &[u8], - ) -> Result { - StdFileResource::write_sync(self, data) + atime_secs: i64, + atime_nanos: u32, + mtime_secs: i64, + mtime_nanos: u32, + ) -> FsResult<()> { + let atime = filetime::FileTime::from_unix_time(atime_secs, atime_nanos); + let mtime = filetime::FileTime::from_unix_time(mtime_secs, mtime_nanos); + + self.with_sync(|file| { + filetime::set_file_handle_times(file, Some(atime), Some(mtime))?; + Ok(()) + }) + } + async fn utime_async( + self: Rc, + atime_secs: i64, + atime_nanos: u32, + mtime_secs: i64, + mtime_nanos: u32, + ) -> FsResult<()> { + let atime = filetime::FileTime::from_unix_time(atime_secs, atime_nanos); + let mtime = filetime::FileTime::from_unix_time(mtime_secs, mtime_nanos); + + self + .with_inner_blocking_task(move |file| { + filetime::set_file_handle_times(file, Some(atime), Some(mtime))?; + Ok(()) + }) + .await } - fn read_byob_sync( + async fn read_byob( self: Rc, - data: &mut [u8], - ) -> Result { - StdFileResource::read_byob_sync(self, data) + mut buf: BufMutView, + ) -> FsResult<(usize, BufMutView)> { + self + .with_inner_blocking_task(|file| { + let nread = file.read(&mut buf)?; + Ok((nread, buf)) + }) + .await + } + + fn try_clone_inner(self: Rc) -> FsResult> { + let inner: &Option<_> = &self.cell.borrow(); + match inner { + Some(inner) => Ok(Rc::new(StdFileResourceInner { + kind: self.kind, + cell: RefCell::new(Some(inner.try_clone()?)), + cell_async_task_queue: Default::default(), + })), + None => Err(FsError::FileBusy), + } + } + + fn as_stdio(self: Rc) -> FsResult { + match self.kind { + StdFileResourceKind::File => self.with_sync(|file| { + let file = file.try_clone()?; + Ok(file.into()) + }), + _ => Ok(std::process::Stdio::inherit()), + } } #[cfg(unix)] fn backing_fd(self: Rc) -> Option { use std::os::unix::io::AsRawFd; - self - .with_inner(move |std_file| { - Ok::<_, ()>(std_file.with_file(|f| f.as_raw_fd())) - })? - .ok() + self.with_sync(|file| Ok(file.as_raw_fd())).ok() + } + + #[cfg(windows)] + fn backing_fd(self: Rc) -> Option { + use std::os::windows::prelude::AsRawHandle; + self.with_sync(|file| Ok(file.as_raw_handle())).ok() } } @@ -671,12 +739,7 @@ pub fn op_print( is_err: bool, ) -> Result<(), AnyError> { let rid = if is_err { 2 } else { 1 }; - StdFileResource::with_resource(state, rid, move |resource| { - resource - .with_inner(|inner| { - inner.write_all_and_maybe_flush(msg.as_bytes())?; - Ok(()) - }) - .ok_or_else(resource_unavailable)? + FileResource::with_file(state, rid, move |file| { + Ok(file.write_all_sync(msg.as_bytes())?) }) } diff --git a/runtime/build.rs b/runtime/build.rs index bba2eae551..d096df7db1 100644 --- a/runtime/build.rs +++ b/runtime/build.rs @@ -18,7 +18,6 @@ mod startup_snapshot { use deno_core::Extension; use deno_core::ExtensionFileSource; use deno_core::ModuleCode; - use deno_fs::StdFs; use std::path::Path; fn transpile_ts_for_snapshotting( @@ -310,7 +309,10 @@ mod startup_snapshot { deno_napi::deno_napi::init_ops_and_esm::(), deno_http::deno_http::init_ops_and_esm(), deno_io::deno_io::init_ops_and_esm(Default::default()), - deno_fs::deno_fs::init_ops_and_esm::<_, Permissions>(false, StdFs), + deno_fs::deno_fs::init_ops_and_esm::( + false, + std::sync::Arc::new(deno_fs::RealFs), + ), runtime::init_ops_and_esm(), // FIXME(bartlomieju): these extensions are specified last, because they // depend on `runtime`, even though it should be other way around diff --git a/runtime/ops/process.rs b/runtime/ops/process.rs index cf8740255d..d991c961f2 100644 --- a/runtime/ops/process.rs +++ b/runtime/ops/process.rs @@ -12,10 +12,10 @@ use deno_core::RcRef; use deno_core::Resource; use deno_core::ResourceId; use deno_core::ZeroCopyBuf; +use deno_io::fs::FileResource; use deno_io::ChildStderrResource; use deno_io::ChildStdinResource; use deno_io::ChildStdoutResource; -use deno_io::StdFileResource; use serde::Deserialize; use serde::Serialize; use std::borrow::Cow; @@ -93,7 +93,9 @@ impl StdioOrRid { ) -> Result { match &self { StdioOrRid::Stdio(val) => Ok(val.as_stdio()), - StdioOrRid::Rid(rid) => StdFileResource::as_stdio(state, *rid), + StdioOrRid::Rid(rid) => { + FileResource::with_file(state, *rid, |file| Ok(file.as_stdio()?)) + } } } } diff --git a/runtime/ops/tty.rs b/runtime/ops/tty.rs index a3dc03a6fa..7f24daec4b 100644 --- a/runtime/ops/tty.rs +++ b/runtime/ops/tty.rs @@ -1,10 +1,14 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use std::io::Error; +use std::rc::Rc; + +use deno_core::error::resource_unavailable; use deno_core::error::AnyError; use deno_core::op; use deno_core::OpState; -use deno_io::StdFileResource; -use std::io::Error; +use deno_core::Resource; +use deno_io::fs::FileResource; #[cfg(unix)] use deno_core::ResourceId; @@ -14,8 +18,6 @@ use nix::sys::termios; use std::cell::RefCell; #[cfg(unix)] use std::collections::HashMap; -#[cfg(unix)] -use std::rc::Rc; #[cfg(unix)] #[derive(Default, Clone)] @@ -44,13 +46,14 @@ use winapi::shared::minwindef::DWORD; use winapi::um::wincon; #[cfg(windows)] -fn get_windows_handle( - f: &std::fs::File, +fn get_fd_from_resource( + resource: Rc, ) -> Result { - use std::os::windows::io::AsRawHandle; use winapi::um::handleapi; - let handle = f.as_raw_handle(); + let Some(handle) = resource.backing_fd() else { + return Err(resource_unavailable()); + }; if handle == handleapi::INVALID_HANDLE_VALUE { return Err(Error::last_os_error().into()); } else if handle.is_null() { @@ -59,6 +62,16 @@ fn get_windows_handle( Ok(handle) } +#[cfg(not(windows))] +fn get_fd_from_resource( + resource: Rc, +) -> Result { + match resource.backing_fd() { + Some(fd) => Ok(fd), + None => Err(resource_unavailable()), + } +} + deno_core::extension!( deno_tty, ops = [op_stdin_set_raw, op_isatty, op_console_size], @@ -106,23 +119,15 @@ fn op_stdin_set_raw( // Copyright (c) 2019 Timon. MIT license. #[cfg(windows)] { - use std::os::windows::io::AsRawHandle; use winapi::shared::minwindef::FALSE; use winapi::um::consoleapi; - use winapi::um::handleapi; if cbreak { return Err(deno_core::error::not_supported()); } - StdFileResource::with_file(state, rid, move |std_file| { - let handle = std_file.as_raw_handle(); - - if handle == handleapi::INVALID_HANDLE_VALUE { - return Err(Error::last_os_error().into()); - } else if handle.is_null() { - return Err(custom_error("ReferenceError", "null handle")); - } + FileResource::with_resource(state, rid, move |resource| { + let handle = get_fd_from_resource(resource)?; let mut original_mode: DWORD = 0; // SAFETY: winapi call if unsafe { consoleapi::GetConsoleMode(handle, &mut original_mode) } @@ -147,13 +152,11 @@ fn op_stdin_set_raw( } #[cfg(unix)] { - use std::os::unix::io::AsRawFd; - let tty_mode_store = state.borrow::().clone(); let previous_mode = tty_mode_store.get(rid); - StdFileResource::with_file(state, rid, move |std_file| { - let raw_fd = std_file.as_raw_fd(); + FileResource::with_resource(state, rid, move |resource| { + let raw_fd = get_fd_from_resource(resource)?; if is_raw { let mut raw = match previous_mode { @@ -201,13 +204,14 @@ fn op_isatty( rid: u32, out: &mut [u8], ) -> Result<(), AnyError> { - StdFileResource::with_file(state, rid, move |std_file| { + FileResource::with_resource(state, rid, move |resource| { + let raw_fd = get_fd_from_resource(resource)?; #[cfg(windows)] { use winapi::shared::minwindef::FALSE; use winapi::um::consoleapi; - let handle = get_windows_handle(std_file)?; + let handle = raw_fd; let mut test_mode: DWORD = 0; // If I cannot get mode out of console, it is not a console. // TODO(bartlomieju): @@ -220,8 +224,6 @@ fn op_isatty( } #[cfg(unix)] { - use std::os::unix::io::AsRawFd; - let raw_fd = std_file.as_raw_fd(); // TODO(bartlomieju): #[allow(clippy::undocumented_unsafe_blocks)] { @@ -242,8 +244,9 @@ fn op_console_size( result: &mut [u32], rid: u32, ) -> Result<(), AnyError> { - StdFileResource::with_file(state, rid, move |std_file| { - let size = console_size(std_file)?; + FileResource::with_resource(state, rid, move |resource| { + let fd = get_fd_from_resource(resource)?; + let size = console_size_from_fd(fd)?; result[0] = size.cols; result[1] = size.rows; Ok(()) @@ -276,40 +279,50 @@ pub fn console_size( { use std::os::windows::io::AsRawHandle; let handle = std_file.as_raw_handle(); - - // SAFETY: winapi calls - unsafe { - let mut bufinfo: winapi::um::wincon::CONSOLE_SCREEN_BUFFER_INFO = - std::mem::zeroed(); - - if winapi::um::wincon::GetConsoleScreenBufferInfo(handle, &mut bufinfo) - == 0 - { - return Err(Error::last_os_error()); - } - Ok(ConsoleSize { - cols: bufinfo.dwSize.X as u32, - rows: bufinfo.dwSize.Y as u32, - }) - } + console_size_from_fd(handle) } - #[cfg(unix)] { use std::os::unix::io::AsRawFd; - let fd = std_file.as_raw_fd(); - // SAFETY: libc calls - unsafe { - let mut size: libc::winsize = std::mem::zeroed(); - if libc::ioctl(fd, libc::TIOCGWINSZ, &mut size as *mut _) != 0 { - return Err(Error::last_os_error()); - } - Ok(ConsoleSize { - cols: size.ws_col as u32, - rows: size.ws_row as u32, - }) + console_size_from_fd(fd) + } +} + +#[cfg(windows)] +fn console_size_from_fd( + handle: std::os::windows::io::RawHandle, +) -> Result { + // SAFETY: winapi calls + unsafe { + let mut bufinfo: winapi::um::wincon::CONSOLE_SCREEN_BUFFER_INFO = + std::mem::zeroed(); + + if winapi::um::wincon::GetConsoleScreenBufferInfo(handle, &mut bufinfo) == 0 + { + return Err(Error::last_os_error()); } + Ok(ConsoleSize { + cols: bufinfo.dwSize.X as u32, + rows: bufinfo.dwSize.Y as u32, + }) + } +} + +#[cfg(not(windows))] +fn console_size_from_fd( + fd: std::os::unix::prelude::RawFd, +) -> Result { + // SAFETY: libc calls + unsafe { + let mut size: libc::winsize = std::mem::zeroed(); + if libc::ioctl(fd, libc::TIOCGWINSZ, &mut size as *mut _) != 0 { + return Err(Error::last_os_error()); + } + Ok(ConsoleSize { + cols: size.ws_col as u32, + rows: size.ws_row as u32, + }) } } diff --git a/runtime/web_worker.rs b/runtime/web_worker.rs index 1b3dd28096..e485c0c35b 100644 --- a/runtime/web_worker.rs +++ b/runtime/web_worker.rs @@ -34,7 +34,7 @@ use deno_core::RuntimeOptions; use deno_core::SharedArrayBufferStore; use deno_core::Snapshot; use deno_core::SourceMapGetter; -use deno_fs::StdFs; +use deno_fs::FileSystem; use deno_io::Stdio; use deno_kv::sqlite::SqliteDbHandler; use deno_tls::RootCertStoreProvider; @@ -331,6 +331,7 @@ pub struct WebWorkerOptions { pub unsafely_ignore_certificate_errors: Option>, pub root_cert_store_provider: Option>, pub seed: Option, + pub fs: Arc, pub module_loader: Rc, pub node_fs: Option>, pub npm_resolver: Option>, @@ -441,7 +442,7 @@ impl WebWorker { deno_napi::deno_napi::init_ops::(), deno_http::deno_http::init_ops(), deno_io::deno_io::init_ops(Some(options.stdio)), - deno_fs::deno_fs::init_ops::<_, PermissionsContainer>(unstable, StdFs), + deno_fs::deno_fs::init_ops::(unstable, options.fs), deno_node::deno_node::init_ops::( options.npm_resolver, options.node_fs, diff --git a/runtime/worker.rs b/runtime/worker.rs index ac67011f0d..b9db217804 100644 --- a/runtime/worker.rs +++ b/runtime/worker.rs @@ -30,7 +30,7 @@ use deno_core::RuntimeOptions; use deno_core::SharedArrayBufferStore; use deno_core::Snapshot; use deno_core::SourceMapGetter; -use deno_fs::StdFs; +use deno_fs::FileSystem; use deno_io::Stdio; use deno_kv::sqlite::SqliteDbHandler; use deno_tls::RootCertStoreProvider; @@ -87,6 +87,7 @@ pub struct WorkerOptions { pub root_cert_store_provider: Option>, pub seed: Option, + pub fs: Arc, /// Implementation of `ModuleLoader` which will be /// called when V8 requests to load ES modules. /// @@ -149,6 +150,7 @@ impl Default for WorkerOptions { create_web_worker_cb: Arc::new(|_| { unimplemented!("web workers are not supported") }), + fs: Arc::new(deno_fs::RealFs), module_loader: Rc::new(FsModuleLoader), seed: None, unsafely_ignore_certificate_errors: Default::default(), @@ -266,7 +268,7 @@ impl MainWorker { deno_napi::deno_napi::init_ops::(), deno_http::deno_http::init_ops(), deno_io::deno_io::init_ops(Some(options.stdio)), - deno_fs::deno_fs::init_ops::<_, PermissionsContainer>(unstable, StdFs), + deno_fs::deno_fs::init_ops::(unstable, options.fs), deno_node::deno_node::init_ops::( options.npm_resolver, options.node_fs, From a6c47ee74023f6ef683988cabc8caa95406e3c99 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Fri, 5 May 2023 12:44:24 -0400 Subject: [PATCH 123/320] refactor(ext/node): combine `deno_node::Fs` with `deno_fs::FileSystem` (#18991) --- Cargo.lock | 1 + cli/build.rs | 8 ++-- cli/factory.rs | 27 +++++--------- cli/lsp/language_server.rs | 6 +-- cli/npm/resolvers/local.rs | 8 ++-- cli/npm/resolvers/mod.rs | 4 +- cli/standalone/mod.rs | 10 ++--- cli/worker.rs | 5 --- ext/fs/interface.rs | 24 +++++++++++- ext/fs/std_fs.rs | 2 +- ext/io/fs.rs | 10 +++++ ext/node/Cargo.toml | 1 + ext/node/analyze.rs | 6 +-- ext/node/clippy.toml | 76 +++++++++++++++++++------------------- ext/node/lib.rs | 70 +---------------------------------- ext/node/ops/require.rs | 22 +++++------ ext/node/package_json.rs | 7 ++-- ext/node/resolution.rs | 17 +++++---- runtime/build.rs | 8 ++-- runtime/web_worker.rs | 8 ++-- runtime/worker.rs | 9 +++-- 21 files changed, 142 insertions(+), 187 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ad816c9a2d..c07936bd1a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1154,6 +1154,7 @@ dependencies = [ "cbc", "data-encoding", "deno_core", + "deno_fs", "deno_media_type", "deno_npm", "deno_semver", diff --git a/cli/build.rs b/cli/build.rs index 6cedb53cef..8e6b670e29 100644 --- a/cli/build.rs +++ b/cli/build.rs @@ -326,6 +326,7 @@ deno_core::extension!( fn create_cli_snapshot(snapshot_path: PathBuf) { // NOTE(bartlomieju): ordering is important here, keep it in sync with // `runtime/worker.rs`, `runtime/web_worker.rs` and `runtime/build.rs`! + let fs = Arc::new(deno_fs::RealFs); let extensions: Vec = vec![ deno_webidl::deno_webidl::init_ops(), deno_console::deno_console::init_ops(), @@ -360,11 +361,8 @@ fn create_cli_snapshot(snapshot_path: PathBuf) { deno_napi::deno_napi::init_ops::(), deno_http::deno_http::init_ops(), deno_io::deno_io::init_ops(Default::default()), - deno_fs::deno_fs::init_ops::( - false, - Arc::new(deno_fs::RealFs), - ), - deno_node::deno_node::init_ops::(None, None), + deno_fs::deno_fs::init_ops::(false, fs.clone()), + deno_node::deno_node::init_ops::(None, fs), cli::init_ops_and_esm(), // NOTE: This needs to be init_ops_and_esm! ]; diff --git a/cli/factory.rs b/cli/factory.rs index 295794a51b..3bc5ef9e29 100644 --- a/cli/factory.rs +++ b/cli/factory.rs @@ -43,7 +43,6 @@ use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; use deno_runtime::deno_fs; -use deno_runtime::deno_node; use deno_runtime::deno_node::analyze::NodeCodeTranslator; use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_tls::RootCertStoreProvider; @@ -133,6 +132,7 @@ struct CliFactoryServices { http_client: Deferred>, emit_cache: Deferred, emitter: Deferred>, + fs: Deferred>, graph_container: Deferred>, lockfile: Deferred>>>, maybe_import_map: Deferred>>, @@ -146,7 +146,6 @@ struct CliFactoryServices { module_graph_builder: Deferred>, module_load_preparer: Deferred>, node_code_translator: Deferred>, - node_fs: Deferred>, node_resolver: Deferred>, npm_api: Deferred>, npm_cache: Deferred>, @@ -245,6 +244,10 @@ impl CliFactory { }) } + pub fn fs(&self) -> &Arc { + self.services.fs.get_or_init(|| Arc::new(deno_fs::RealFs)) + } + pub fn maybe_lockfile(&self) -> &Option>> { self .services @@ -292,13 +295,6 @@ impl CliFactory { .await } - pub fn node_fs(&self) -> &Arc { - self - .services - .node_fs - .get_or_init(|| Arc::new(deno_node::RealFs)) - } - pub async fn npm_resolver(&self) -> Result<&Arc, AnyError> { self .services @@ -306,7 +302,7 @@ impl CliFactory { .get_or_try_init_async(async { let npm_resolution = self.npm_resolution().await?; let npm_fs_resolver = create_npm_fs_resolver( - self.node_fs().clone(), + self.fs().clone(), self.npm_cache()?.clone(), self.text_only_progress_bar(), CliNpmRegistryApi::default_url().to_owned(), @@ -437,7 +433,7 @@ impl CliFactory { .node_resolver .get_or_try_init_async(async { Ok(Arc::new(NodeResolver::new( - self.node_fs().clone(), + self.fs().clone(), self.npm_resolver().await?.clone(), ))) }) @@ -458,7 +454,7 @@ impl CliFactory { Ok(Arc::new(NodeCodeTranslator::new( cjs_esm_analyzer, - self.node_fs().clone(), + self.fs().clone(), self.node_resolver().await?.clone(), self.npm_resolver().await?.clone(), ))) @@ -554,8 +550,7 @@ impl CliFactory { let node_code_translator = self.node_code_translator().await?.clone(); let options = self.cli_options().clone(); let main_worker_options = self.create_cli_main_worker_options()?; - let fs = Arc::new(deno_fs::RealFs); - let node_fs = self.node_fs().clone(); + let fs = self.fs().clone(); let root_cert_store_provider = self.root_cert_store_provider().clone(); let node_resolver = self.node_resolver().await?.clone(); let npm_resolver = self.npm_resolver().await?.clone(); @@ -582,7 +577,6 @@ impl CliFactory { )), root_cert_store_provider.clone(), fs.clone(), - node_fs.clone(), maybe_inspector_server.clone(), main_worker_options.clone(), ) @@ -613,8 +607,7 @@ impl CliFactory { ), )), self.root_cert_store_provider().clone(), - Arc::new(deno_fs::RealFs), - self.node_fs().clone(), + self.fs().clone(), self.maybe_inspector_server().clone(), self.create_cli_main_worker_options()?, )) diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index 83657a8ef4..d00b8f3138 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -9,7 +9,7 @@ use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::serde_json::Value; use deno_core::ModuleSpecifier; -use deno_runtime::deno_node; +use deno_runtime::deno_fs; use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_node::PackageJson; use deno_runtime::deno_tls::rustls::RootCertStore; @@ -458,7 +458,7 @@ fn create_lsp_structs( let resolution = Arc::new(NpmResolution::from_serialized(api.clone(), None, None)); let fs_resolver = create_npm_fs_resolver( - Arc::new(deno_node::RealFs), + Arc::new(deno_fs::RealFs), npm_cache.clone(), &progress_bar, registry_url.clone(), @@ -709,7 +709,7 @@ impl Inner { self.npm_resolution.snapshot(), None, )); - let node_fs = Arc::new(deno_node::RealFs); + let node_fs = Arc::new(deno_fs::RealFs); let npm_resolver = Arc::new(CliNpmResolver::new( npm_resolution.clone(), create_npm_fs_resolver( diff --git a/cli/npm/resolvers/local.rs b/cli/npm/resolvers/local.rs index b4cf5af275..038d9eea1f 100644 --- a/cli/npm/resolvers/local.rs +++ b/cli/npm/resolvers/local.rs @@ -23,7 +23,7 @@ use deno_npm::resolution::NpmResolutionSnapshot; use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageId; use deno_runtime::deno_core::futures; -use deno_runtime::deno_node::NodeFs; +use deno_runtime::deno_fs; use deno_runtime::deno_node::NodePermissions; use deno_runtime::deno_node::NodeResolutionMode; use deno_runtime::deno_node::PackageJson; @@ -44,7 +44,7 @@ use super::common::NpmPackageFsResolver; /// and resolves packages from it. #[derive(Debug)] pub struct LocalNpmPackageResolver { - fs: Arc, + fs: Arc, cache: Arc, progress_bar: ProgressBar, resolution: Arc, @@ -55,7 +55,7 @@ pub struct LocalNpmPackageResolver { impl LocalNpmPackageResolver { pub fn new( - fs: Arc, + fs: Arc, cache: Arc, progress_bar: ProgressBar, registry_url: Url, @@ -94,7 +94,7 @@ impl LocalNpmPackageResolver { // Canonicalize the path so it's not pointing to the symlinked directory // in `node_modules` directory of the referrer. Some(path) => { - Ok(deno_core::strip_unc_prefix(self.fs.canonicalize(&path)?)) + Ok(deno_core::strip_unc_prefix(self.fs.realpath_sync(&path)?)) } None => bail!("could not find npm package for '{}'", specifier), } diff --git a/cli/npm/resolvers/mod.rs b/cli/npm/resolvers/mod.rs index 60402bd270..86d3840f31 100644 --- a/cli/npm/resolvers/mod.rs +++ b/cli/npm/resolvers/mod.rs @@ -18,7 +18,7 @@ use deno_npm::resolution::NpmResolutionSnapshot; use deno_npm::resolution::PackageReqNotFoundError; use deno_npm::resolution::SerializedNpmResolutionSnapshot; use deno_npm::NpmPackageId; -use deno_runtime::deno_node; +use deno_runtime::deno_fs; use deno_runtime::deno_node::NodePermissions; use deno_runtime::deno_node::NodeResolutionMode; use deno_runtime::deno_node::NpmResolver; @@ -270,7 +270,7 @@ impl NpmResolver for CliNpmResolver { } pub fn create_npm_fs_resolver( - fs: Arc, + fs: Arc, cache: Arc, progress_bar: &ProgressBar, registry_url: Url, diff --git a/cli/standalone/mod.rs b/cli/standalone/mod.rs index 0f65db679e..e00ab8ab25 100644 --- a/cli/standalone/mod.rs +++ b/cli/standalone/mod.rs @@ -31,7 +31,6 @@ use deno_core::ModuleType; use deno_core::ResolutionKind; use deno_graph::source::Resolver; use deno_runtime::deno_fs; -use deno_runtime::deno_node; use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_tls::rustls::RootCertStore; use deno_runtime::deno_tls::RootCertStoreProvider; @@ -208,11 +207,11 @@ pub async fn run( http_client.clone(), progress_bar.clone(), )); - let node_fs = Arc::new(deno_node::RealFs); + let fs = Arc::new(deno_fs::RealFs); let npm_resolution = Arc::new(NpmResolution::from_serialized(npm_api.clone(), None, None)); let npm_fs_resolver = create_npm_fs_resolver( - node_fs.clone(), + fs.clone(), npm_cache, &progress_bar, npm_registry_url, @@ -225,7 +224,7 @@ pub async fn run( None, )); let node_resolver = - Arc::new(NodeResolver::new(node_fs.clone(), npm_resolver.clone())); + Arc::new(NodeResolver::new(fs.clone(), npm_resolver.clone())); let module_loader_factory = StandaloneModuleLoaderFactory { loader: EmbeddedModuleLoader { eszip: Arc::new(eszip), @@ -254,8 +253,7 @@ pub async fn run( BlobStore::default(), Box::new(module_loader_factory), root_cert_store_provider, - Arc::new(deno_fs::RealFs), - node_fs, + fs, None, CliMainWorkerOptions { argv: metadata.argv, diff --git a/cli/worker.rs b/cli/worker.rs index 5216af2638..4d8e500b7a 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -99,7 +99,6 @@ struct SharedWorkerState { module_loader_factory: Box, root_cert_store_provider: Arc, fs: Arc, - node_fs: Arc, maybe_inspector_server: Option>, } @@ -311,7 +310,6 @@ impl CliMainWorkerFactory { module_loader_factory: Box, root_cert_store_provider: Arc, fs: Arc, - node_fs: Arc, maybe_inspector_server: Option>, options: CliMainWorkerOptions, ) -> Self { @@ -329,7 +327,6 @@ impl CliMainWorkerFactory { module_loader_factory, root_cert_store_provider, fs, - node_fs, maybe_inspector_server, }), } @@ -450,7 +447,6 @@ impl CliMainWorkerFactory { should_wait_for_inspector_session: shared.options.inspect_wait, module_loader, fs: shared.fs.clone(), - node_fs: Some(shared.node_fs.clone()), npm_resolver: Some(shared.npm_resolver.clone()), get_error_class_fn: Some(&errors::get_error_class_name), cache_storage_dir, @@ -576,7 +572,6 @@ fn create_web_worker_callback( source_map_getter: maybe_source_map_getter, module_loader, fs: shared.fs.clone(), - node_fs: Some(shared.node_fs.clone()), npm_resolver: Some(shared.npm_resolver.clone()), worker_type: args.worker_type, maybe_inspector_server, diff --git a/ext/fs/interface.rs b/ext/fs/interface.rs index 1847b59828..474089153e 100644 --- a/ext/fs/interface.rs +++ b/ext/fs/interface.rs @@ -73,7 +73,7 @@ pub struct FsDirEntry { } #[async_trait::async_trait(?Send)] -pub trait FileSystem: Send + Sync { +pub trait FileSystem: std::fmt::Debug + Send + Sync { fn cwd(&self) -> FsResult; fn tmp_dir(&self) -> FsResult; fn chdir(&self, path: &Path) -> FsResult<()>; @@ -225,4 +225,26 @@ pub trait FileSystem: Send + Sync { let buf = file.read_all_async().await?; Ok(buf) } + + fn is_file(&self, path: &Path) -> bool { + self.stat_sync(path).map(|m| m.is_file).unwrap_or(false) + } + + fn is_dir(&self, path: &Path) -> bool { + self + .stat_sync(path) + .map(|m| m.is_directory) + .unwrap_or(false) + } + + fn exists(&self, path: &Path) -> bool { + self.stat_sync(path).is_ok() + } + + fn read_to_string(&self, path: &Path) -> FsResult { + let buf = self.read_file_sync(path)?; + String::from_utf8(buf).map_err(|err| { + std::io::Error::new(std::io::ErrorKind::InvalidData, err).into() + }) + } } diff --git a/ext/fs/std_fs.rs b/ext/fs/std_fs.rs index a657939db2..fe6910f1b2 100644 --- a/ext/fs/std_fs.rs +++ b/ext/fs/std_fs.rs @@ -22,7 +22,7 @@ use crate::OpenOptions; #[cfg(not(unix))] use deno_io::fs::FsError; -#[derive(Clone)] +#[derive(Debug, Clone)] pub struct RealFs; #[async_trait::async_trait(?Send)] diff --git a/ext/io/fs.rs b/ext/io/fs.rs index bb6bdec4fc..a333e1dd5a 100644 --- a/ext/io/fs.rs +++ b/ext/io/fs.rs @@ -21,6 +21,16 @@ pub enum FsError { NotSupported, } +impl FsError { + pub fn kind(&self) -> io::ErrorKind { + match self { + Self::Io(err) => err.kind(), + Self::FileBusy => io::ErrorKind::Other, + Self::NotSupported => io::ErrorKind::Other, + } + } +} + impl From for FsError { fn from(err: io::Error) -> Self { Self::Io(err) diff --git a/ext/node/Cargo.toml b/ext/node/Cargo.toml index 38c8474dce..6a897a9a18 100644 --- a/ext/node/Cargo.toml +++ b/ext/node/Cargo.toml @@ -18,6 +18,7 @@ aes.workspace = true cbc.workspace = true data-encoding = "2.3.3" deno_core.workspace = true +deno_fs.workspace = true deno_media_type.workspace = true deno_npm.workspace = true deno_semver.workspace = true diff --git a/ext/node/analyze.rs b/ext/node/analyze.rs index 2622ce8dab..bad0906c5b 100644 --- a/ext/node/analyze.rs +++ b/ext/node/analyze.rs @@ -13,7 +13,6 @@ use once_cell::sync::Lazy; use deno_core::error::AnyError; -use crate::NodeFs; use crate::NodeModuleKind; use crate::NodePermissions; use crate::NodeResolutionMode; @@ -67,7 +66,7 @@ pub trait CjsEsmCodeAnalyzer { pub struct NodeCodeTranslator { cjs_esm_code_analyzer: TCjsEsmCodeAnalyzer, - fs: Arc, + fs: Arc, node_resolver: Arc, npm_resolver: Arc, } @@ -77,7 +76,7 @@ impl { pub fn new( cjs_esm_code_analyzer: TCjsEsmCodeAnalyzer, - fs: Arc, + fs: Arc, node_resolver: Arc, npm_resolver: Arc, ) -> Self { @@ -161,6 +160,7 @@ impl let reexport_file_text = self .fs .read_to_string(&resolved_reexport) + .map_err(AnyError::from) .with_context(|| { format!( "Could not find '{}' ({}) referenced from {}", diff --git a/ext/node/clippy.toml b/ext/node/clippy.toml index 94796f5a70..31d9d7d472 100644 --- a/ext/node/clippy.toml +++ b/ext/node/clippy.toml @@ -1,40 +1,40 @@ disallowed-methods = [ - { path = "std::env::current_dir", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::exists", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::canonicalize", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::is_dir", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::is_file", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::is_symlink", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::metadata", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::read_dir", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::read_link", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::symlink_metadata", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::Path::try_exists", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::exists", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::canonicalize", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::is_dir", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::is_file", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::is_symlink", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::metadata", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::read_dir", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::read_link", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::symlink_metadata", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::path::PathBuf::try_exists", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::canonicalize", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::copy", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::create_dir", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::create_dir_all", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::hard_link", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::metadata", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::read", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::read_dir", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::read_link", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::read_to_string", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::remove_dir", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::remove_dir_all", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::remove_file", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::rename", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::set_permissions", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::symlink_metadata", reason = "File system operations should be done using NodeFs trait" }, - { path = "std::fs::write", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::env::current_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::exists", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::canonicalize", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::is_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::is_file", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::is_symlink", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::metadata", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::read_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::read_link", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::symlink_metadata", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::try_exists", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::exists", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::canonicalize", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::is_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::is_file", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::is_symlink", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::metadata", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::read_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::read_link", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::symlink_metadata", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::PathBuf::try_exists", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::canonicalize", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::copy", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::create_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::create_dir_all", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::hard_link", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::metadata", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::read", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::read_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::read_link", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::read_to_string", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::remove_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::remove_dir_all", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::remove_file", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::rename", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::set_permissions", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::symlink_metadata", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::write", reason = "File system operations should be done using FileSystem trait" }, ] diff --git a/ext/node/lib.rs b/ext/node/lib.rs index 128f3a2fea..03ec730d84 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -14,7 +14,6 @@ use deno_semver::npm::NpmPackageReq; use deno_semver::npm::NpmPackageReqReference; use once_cell::sync::Lazy; use std::collections::HashSet; -use std::io; use std::path::Path; use std::path::PathBuf; use std::rc::Rc; @@ -51,71 +50,6 @@ impl NodePermissions for AllowAllNodePermissions { } } -#[derive(Default, Clone)] -pub struct NodeFsMetadata { - pub is_file: bool, - pub is_dir: bool, -} - -pub trait NodeFs: std::fmt::Debug + Send + Sync { - fn current_dir(&self) -> io::Result; - fn metadata(&self, path: &Path) -> io::Result; - fn is_file(&self, path: &Path) -> bool; - fn is_dir(&self, path: &Path) -> bool; - fn exists(&self, path: &Path) -> bool; - fn read_to_string(&self, path: &Path) -> io::Result; - fn canonicalize(&self, path: &Path) -> io::Result; -} - -#[derive(Debug)] -pub struct RealFs; - -impl NodeFs for RealFs { - fn current_dir(&self) -> io::Result { - #[allow(clippy::disallowed_methods)] - std::env::current_dir() - } - - fn metadata(&self, path: &Path) -> io::Result { - #[allow(clippy::disallowed_methods)] - std::fs::metadata(path).map(|metadata| { - // on most systems, calling is_file() and is_dir() is cheap - // and returns information already found in the metadata object - NodeFsMetadata { - is_file: metadata.is_file(), - is_dir: metadata.is_dir(), - } - }) - } - - fn exists(&self, path: &Path) -> bool { - #[allow(clippy::disallowed_methods)] - std::fs::metadata(path).is_ok() - } - - fn is_file(&self, path: &Path) -> bool { - #[allow(clippy::disallowed_methods)] - std::fs::metadata(path) - .map(|m| m.is_file()) - .unwrap_or(false) - } - - fn is_dir(&self, path: &Path) -> bool { - #[allow(clippy::disallowed_methods)] - std::fs::metadata(path).map(|m| m.is_dir()).unwrap_or(false) - } - - fn read_to_string(&self, path: &Path) -> io::Result { - #[allow(clippy::disallowed_methods)] - std::fs::read_to_string(path) - } - - fn canonicalize(&self, path: &Path) -> io::Result { - #[allow(clippy::disallowed_methods)] - std::path::Path::canonicalize(path) - } -} - pub trait NpmResolver: std::fmt::Debug + Send + Sync { /// Resolves an npm package folder path from an npm package referrer. fn resolve_package_folder_from_package( @@ -516,10 +450,10 @@ deno_core::extension!(deno_node, ], options = { maybe_npm_resolver: Option>, - fs: Option>, + fs: Arc, }, state = |state, options| { - let fs = options.fs.unwrap_or_else(|| Arc::new(RealFs)); + let fs = options.fs; state.put(fs.clone()); if let Some(npm_resolver) = options.maybe_npm_resolver { state.put(npm_resolver.clone()); diff --git a/ext/node/ops/require.rs b/ext/node/ops/require.rs index 4a2b97187a..972815995a 100644 --- a/ext/node/ops/require.rs +++ b/ext/node/ops/require.rs @@ -16,7 +16,6 @@ use std::rc::Rc; use std::sync::Arc; use crate::resolution; -use crate::NodeFs; use crate::NodeModuleKind; use crate::NodePermissions; use crate::NodeResolutionMode; @@ -94,11 +93,11 @@ pub fn op_require_node_module_paths

    ( where P: NodePermissions + 'static, { - let fs = state.borrow::>(); + let fs = state.borrow::>(); // Guarantee that "from" is absolute. let from = deno_core::resolve_path( &from, - &(fs.current_dir()).context("Unable to get CWD")?, + &(fs.cwd().map_err(AnyError::from)).context("Unable to get CWD")?, ) .unwrap() .to_file_path() @@ -263,8 +262,8 @@ where { let path = PathBuf::from(path); ensure_read_permission::

    (state, &path)?; - let fs = state.borrow::>(); - if let Ok(metadata) = fs.metadata(&path) { + let fs = state.borrow::>(); + if let Ok(metadata) = fs.stat_sync(&path) { if metadata.is_file { return Ok(0); } else { @@ -285,8 +284,9 @@ where { let path = PathBuf::from(request); ensure_read_permission::

    (state, &path)?; - let fs = state.borrow::>(); - let canonicalized_path = deno_core::strip_unc_prefix(fs.canonicalize(&path)?); + let fs = state.borrow::>(); + let canonicalized_path = + deno_core::strip_unc_prefix(fs.realpath_sync(&path)?); Ok(canonicalized_path.to_string_lossy().to_string()) } @@ -346,8 +346,8 @@ where if let Some(parent_id) = maybe_parent_id { if parent_id == "" || parent_id == "internal/preload" { - let fs = state.borrow::>(); - if let Ok(cwd) = fs.current_dir() { + let fs = state.borrow::>(); + if let Ok(cwd) = fs.cwd() { ensure_read_permission::

    (state, &cwd)?; return Ok(Some(cwd.to_string_lossy().to_string())); } @@ -429,7 +429,7 @@ where { let file_path = PathBuf::from(file_path); ensure_read_permission::

    (state, &file_path)?; - let fs = state.borrow::>(); + let fs = state.borrow::>(); Ok(fs.read_to_string(&file_path)?) } @@ -457,7 +457,7 @@ fn op_require_resolve_exports

    ( where P: NodePermissions + 'static, { - let fs = state.borrow::>(); + let fs = state.borrow::>(); let npm_resolver = state.borrow::>(); let node_resolver = state.borrow::>(); let permissions = state.borrow::

    (); diff --git a/ext/node/package_json.rs b/ext/node/package_json.rs index 940e326312..95ca8b5618 100644 --- a/ext/node/package_json.rs +++ b/ext/node/package_json.rs @@ -1,6 +1,5 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -use crate::NodeFs; use crate::NodeModuleKind; use crate::NodePermissions; @@ -63,7 +62,7 @@ impl PackageJson { } pub fn load( - fs: &dyn NodeFs, + fs: &dyn deno_fs::FileSystem, resolver: &dyn NpmResolver, permissions: &dyn NodePermissions, path: PathBuf, @@ -73,7 +72,7 @@ impl PackageJson { } pub fn load_skip_read_permission( - fs: &dyn NodeFs, + fs: &dyn deno_fs::FileSystem, path: PathBuf, ) -> Result { assert!(path.is_absolute()); @@ -90,7 +89,7 @@ impl PackageJson { Err(err) => bail!( "Error loading package.json at {}. {:#}", path.display(), - err + AnyError::from(err), ), }; diff --git a/ext/node/resolution.rs b/ext/node/resolution.rs index 046c774fa2..71b988c194 100644 --- a/ext/node/resolution.rs +++ b/ext/node/resolution.rs @@ -19,7 +19,6 @@ use deno_semver::npm::NpmPackageReqReference; use crate::errors; use crate::AllowAllNodePermissions; -use crate::NodeFs; use crate::NodePermissions; use crate::NpmResolver; use crate::PackageJson; @@ -107,12 +106,15 @@ impl NodeResolution { #[derive(Debug)] pub struct NodeResolver { - fs: Arc, + fs: Arc, npm_resolver: Arc, } impl NodeResolver { - pub fn new(fs: Arc, npm_resolver: Arc) -> Self { + pub fn new( + fs: Arc, + npm_resolver: Arc, + ) -> Self { Self { fs, npm_resolver } } @@ -280,8 +282,9 @@ impl NodeResolver { p_str.to_string() }; - let (is_dir, is_file) = if let Ok(stats) = self.fs.metadata(Path::new(&p)) { - (stats.is_dir, stats.is_file) + let (is_dir, is_file) = if let Ok(stats) = self.fs.stat_sync(Path::new(&p)) + { + (stats.is_directory, stats.is_file) } else { (false, false) }; @@ -491,7 +494,7 @@ impl NodeResolver { referrer_kind: NodeModuleKind, ) -> Option { fn probe_extensions( - fs: &dyn NodeFs, + fs: &dyn deno_fs::FileSystem, path: &Path, referrer_kind: NodeModuleKind, ) -> Option { @@ -1079,7 +1082,7 @@ impl NodeResolver { ) -> Result { let file_path = url.to_file_path().unwrap(); let current_dir = deno_core::strip_unc_prefix( - self.fs.canonicalize(file_path.parent().unwrap())?, + self.fs.realpath_sync(file_path.parent().unwrap())?, ); let mut current_dir = current_dir.as_path(); let package_json_path = current_dir.join("package.json"); diff --git a/runtime/build.rs b/runtime/build.rs index d096df7db1..4f49ba6816 100644 --- a/runtime/build.rs +++ b/runtime/build.rs @@ -273,6 +273,7 @@ mod startup_snapshot { pub fn create_runtime_snapshot(snapshot_path: PathBuf) { // NOTE(bartlomieju): ordering is important here, keep it in sync with // `runtime/worker.rs`, `runtime/web_worker.rs` and `cli/build.rs`! + let fs = std::sync::Arc::new(deno_fs::RealFs); let extensions: Vec = vec![ deno_webidl::deno_webidl::init_ops_and_esm(), deno_console::deno_console::init_ops_and_esm(), @@ -309,14 +310,11 @@ mod startup_snapshot { deno_napi::deno_napi::init_ops_and_esm::(), deno_http::deno_http::init_ops_and_esm(), deno_io::deno_io::init_ops_and_esm(Default::default()), - deno_fs::deno_fs::init_ops_and_esm::( - false, - std::sync::Arc::new(deno_fs::RealFs), - ), + deno_fs::deno_fs::init_ops_and_esm::(false, fs.clone()), runtime::init_ops_and_esm(), // FIXME(bartlomieju): these extensions are specified last, because they // depend on `runtime`, even though it should be other way around - deno_node::deno_node::init_ops_and_esm::(None, None), + deno_node::deno_node::init_ops_and_esm::(None, fs), #[cfg(not(feature = "snapshot_from_snapshot"))] runtime_main::init_ops_and_esm(), ]; diff --git a/runtime/web_worker.rs b/runtime/web_worker.rs index e485c0c35b..6487239f8b 100644 --- a/runtime/web_worker.rs +++ b/runtime/web_worker.rs @@ -333,7 +333,6 @@ pub struct WebWorkerOptions { pub seed: Option, pub fs: Arc, pub module_loader: Rc, - pub node_fs: Option>, pub npm_resolver: Option>, pub create_web_worker_cb: Arc, pub preload_module_cb: Arc, @@ -442,10 +441,13 @@ impl WebWorker { deno_napi::deno_napi::init_ops::(), deno_http::deno_http::init_ops(), deno_io::deno_io::init_ops(Some(options.stdio)), - deno_fs::deno_fs::init_ops::(unstable, options.fs), + deno_fs::deno_fs::init_ops::( + unstable, + options.fs.clone(), + ), deno_node::deno_node::init_ops::( options.npm_resolver, - options.node_fs, + options.fs, ), // Runtime ops that are always initialized for WebWorkers ops::web_worker::deno_web_worker::init_ops(), diff --git a/runtime/worker.rs b/runtime/worker.rs index b9db217804..77f16553b6 100644 --- a/runtime/worker.rs +++ b/runtime/worker.rs @@ -94,7 +94,6 @@ pub struct WorkerOptions { /// If not provided runtime will error if code being /// executed tries to load modules. pub module_loader: Rc, - pub node_fs: Option>, pub npm_resolver: Option>, // Callbacks invoked when creating new instance of WebWorker pub create_web_worker_cb: Arc, @@ -166,7 +165,6 @@ impl Default for WorkerOptions { broadcast_channel: Default::default(), source_map_getter: Default::default(), root_cert_store_provider: Default::default(), - node_fs: Default::default(), npm_resolver: Default::default(), blob_store: Default::default(), extensions: Default::default(), @@ -268,10 +266,13 @@ impl MainWorker { deno_napi::deno_napi::init_ops::(), deno_http::deno_http::init_ops(), deno_io::deno_io::init_ops(Some(options.stdio)), - deno_fs::deno_fs::init_ops::(unstable, options.fs), + deno_fs::deno_fs::init_ops::( + unstable, + options.fs.clone(), + ), deno_node::deno_node::init_ops::( options.npm_resolver, - options.node_fs, + options.fs, ), // Ops from this crate ops::runtime::deno_runtime::init_ops(main_module.clone()), From b8c936076162e77039063126ee882d995f2c45de Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Sat, 6 May 2023 16:01:05 +0200 Subject: [PATCH 124/320] refactor(core): remove run_in_task helper, use tokio::test macro (#19009) --- core/modules.rs | 68 +++++++++++++++++++++----------------------- core/runtime.rs | 75 ++++++++++++++++++++++++++----------------------- 2 files changed, 72 insertions(+), 71 deletions(-) diff --git a/core/modules.rs b/core/modules.rs index bc795de5cf..9352301ba8 100644 --- a/core/modules.rs +++ b/core/modules.rs @@ -1740,6 +1740,7 @@ mod tests { use crate::RuntimeOptions; use crate::Snapshot; use deno_ops::op; + use futures::future::poll_fn; use futures::future::FutureExt; use parking_lot::Mutex; use std::fmt; @@ -1754,12 +1755,6 @@ mod tests { pub use crate::*; } - // TODO(ry) Sadly FuturesUnordered requires the current task to be set. So - // even though we are only using poll() in these tests and not Tokio, we must - // nevertheless run it in the tokio executor. Ideally run_in_task can be - // removed in the future. - use crate::runtime::tests::run_in_task; - #[derive(Default)] struct MockLoader { pub loads: Arc>>, @@ -1907,7 +1902,7 @@ import "/a.js"; } if inner.url == "file:///slow.js" && inner.counter < 2 { // TODO(ry) Hopefully in the future we can remove current task - // notification. See comment above run_in_task. + // notification. cx.waker().wake_by_ref(); return Poll::Pending; } @@ -2263,8 +2258,8 @@ import "/a.js"; futures::executor::block_on(receiver).unwrap().unwrap(); } - #[test] - fn dyn_import_err() { + #[tokio::test] + async fn dyn_import_err() { #[derive(Clone, Default)] struct DynImportErrLoader { pub count: Arc, @@ -2302,7 +2297,7 @@ import "/a.js"; }); // Test an erroneous dynamic import where the specified module isn't found. - run_in_task(move |cx| { + poll_fn(move |cx| { runtime .execute_script_static( "file:///dyn_import2.js", @@ -2320,7 +2315,9 @@ import "/a.js"; unreachable!(); } assert_eq!(count.load(Ordering::Relaxed), 4); + Poll::Ready(()) }) + .await; } #[derive(Clone, Default)] @@ -2369,8 +2366,8 @@ import "/a.js"; } } - #[test] - fn dyn_import_ok() { + #[tokio::test] + async fn dyn_import_ok() { let loader = Rc::new(DynImportOkLoader::default()); let prepare_load_count = loader.prepare_load_count.clone(); let resolve_count = loader.resolve_count.clone(); @@ -2379,7 +2376,7 @@ import "/a.js"; module_loader: Some(loader), ..Default::default() }); - run_in_task(move |cx| { + poll_fn(move |cx| { // Dynamically import mod_b runtime .execute_script_static( @@ -2413,11 +2410,13 @@ import "/a.js"; )); assert_eq!(resolve_count.load(Ordering::Relaxed), 7); assert_eq!(load_count.load(Ordering::Relaxed), 1); + Poll::Ready(()) }) + .await; } - #[test] - fn dyn_import_borrow_mut_error() { + #[tokio::test] + async fn dyn_import_borrow_mut_error() { // https://github.com/denoland/deno/issues/6054 let loader = Rc::new(DynImportOkLoader::default()); let prepare_load_count = loader.prepare_load_count.clone(); @@ -2426,7 +2425,7 @@ import "/a.js"; ..Default::default() }); - run_in_task(move |cx| { + poll_fn(move |cx| { runtime .execute_script_static( "file:///dyn_import3.js", @@ -2445,7 +2444,9 @@ import "/a.js"; assert_eq!(prepare_load_count.load(Ordering::Relaxed), 1); // Second poll triggers error let _ = runtime.poll_event_loop(cx, false); + Poll::Ready(()) }) + .await; } // Regression test for https://github.com/denoland/deno/issues/3736. @@ -2671,8 +2672,8 @@ import "/a.js"; futures::executor::block_on(fut); } - #[test] - fn slow_never_ready_modules() { + #[tokio::test] + async fn slow_never_ready_modules() { let loader = MockLoader::new(); let loads = loader.loads.clone(); let mut runtime = JsRuntime::new(RuntimeOptions { @@ -2680,7 +2681,7 @@ import "/a.js"; ..Default::default() }); - run_in_task(move |cx| { + poll_fn(move |cx| { let spec = resolve_url("file:///main.js").unwrap(); let mut recursive_load = runtime.load_main_module(&spec, None).boxed_local(); @@ -2694,8 +2695,7 @@ import "/a.js"; // "file:///never_ready.js", // "file:///slow.js" // But due to current task notification in DelayedSourceCodeFuture they - // all get loaded in a single poll. Also see the comment above - // run_in_task. + // all get loaded in a single poll. for _ in 0..10 { let result = recursive_load.poll_unpin(cx); @@ -2714,30 +2714,26 @@ import "/a.js"; ] ); } + Poll::Ready(()) }) + .await; } - #[test] - fn loader_disappears_after_error() { + #[tokio::test] + async fn loader_disappears_after_error() { let loader = MockLoader::new(); let mut runtime = JsRuntime::new(RuntimeOptions { module_loader: Some(loader), ..Default::default() }); - run_in_task(move |cx| { - let spec = resolve_url("file:///bad_import.js").unwrap(); - let mut load_fut = runtime.load_main_module(&spec, None).boxed_local(); - let result = load_fut.poll_unpin(cx); - if let Poll::Ready(Err(err)) = result { - assert_eq!( - err.downcast_ref::().unwrap(), - &MockError::ResolveErr - ); - } else { - unreachable!(); - } - }) + let spec = resolve_url("file:///bad_import.js").unwrap(); + let result = runtime.load_main_module(&spec, None).await; + let err = result.unwrap_err(); + assert_eq!( + err.downcast_ref::().unwrap(), + &MockError::ResolveErr + ); } #[test] diff --git a/core/runtime.rs b/core/runtime.rs index 1cbefb6fe9..8c78be55b5 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -2610,8 +2610,6 @@ pub mod tests { use crate::modules::SymbolicModule; use crate::ZeroCopyBuf; use deno_ops::op; - use futures::future::lazy; - use std::ops::FnOnce; use std::pin::Pin; use std::rc::Rc; use std::sync::atomic::AtomicUsize; @@ -2623,13 +2621,6 @@ pub mod tests { pub use crate::*; } - pub fn run_in_task(f: F) - where - F: FnOnce(&mut Context) + 'static, - { - futures::executor::block_on(lazy(move |cx| f(cx))); - } - #[derive(Copy, Clone)] pub enum Mode { Async, @@ -2864,7 +2855,7 @@ pub mod tests { #[tokio::test] async fn test_poll_value() { let mut runtime = JsRuntime::new(Default::default()); - run_in_task(move |cx| { + poll_fn(move |cx| { let value_global = runtime .execute_script_static("a.js", "Promise.resolve(1 + 2)") .unwrap(); @@ -2903,7 +2894,8 @@ pub mod tests { .unwrap(); let v = runtime.poll_value(&value_global, cx); matches!(v, Poll::Ready(Err(e)) if e.to_string() == "Promise resolution is still pending but the event loop has already resolved."); - }); + Poll::Ready(()) + }).await; } #[tokio::test] @@ -3061,10 +3053,10 @@ pub mod tests { assert_eq!(frame.column_number, Some(12)); } - #[test] - fn test_encode_decode() { + #[tokio::test] + async fn test_encode_decode() { let (mut runtime, _dispatch_count) = setup(Mode::Async); - run_in_task(move |cx| { + poll_fn(move |cx| { runtime .execute_script( "encode_decode_test.js", @@ -3075,13 +3067,15 @@ pub mod tests { if let Poll::Ready(Err(_)) = runtime.poll_event_loop(cx, false) { unreachable!(); } - }); + Poll::Ready(()) + }) + .await; } - #[test] - fn test_serialize_deserialize() { + #[tokio::test] + async fn test_serialize_deserialize() { let (mut runtime, _dispatch_count) = setup(Mode::Async); - run_in_task(move |cx| { + poll_fn(move |cx| { runtime .execute_script( "serialize_deserialize_test.js", @@ -3091,11 +3085,13 @@ pub mod tests { if let Poll::Ready(Err(_)) = runtime.poll_event_loop(cx, false) { unreachable!(); } - }); + Poll::Ready(()) + }) + .await; } - #[test] - fn test_error_builder() { + #[tokio::test] + async fn test_error_builder() { #[op] fn op_err() -> Result<(), Error> { Err(custom_error("DOMExceptionOperationError", "abc")) @@ -3111,7 +3107,7 @@ pub mod tests { get_error_class_fn: Some(&get_error_class_name), ..Default::default() }); - run_in_task(move |cx| { + poll_fn(move |cx| { runtime .execute_script_static( "error_builder_test.js", @@ -3121,7 +3117,9 @@ pub mod tests { if let Poll::Ready(Err(_)) = runtime.poll_event_loop(cx, false) { unreachable!(); } - }); + Poll::Ready(()) + }) + .await; } #[test] @@ -3646,10 +3644,10 @@ main(); assert_eq!(result.unwrap_err().to_string(), expected_error); } - #[test] - fn test_error_async_stack() { + #[tokio::test] + async fn test_error_async_stack() { let mut runtime = JsRuntime::new(RuntimeOptions::default()); - run_in_task(move |cx| { + poll_fn(move |cx| { runtime .execute_script_static( "error_async_stack.js", @@ -3680,11 +3678,13 @@ main(); } _ => panic!(), }; + Poll::Ready(()) }) + .await; } - #[test] - fn test_error_context() { + #[tokio::test] + async fn test_error_context() { use anyhow::anyhow; #[op] @@ -3703,7 +3703,7 @@ main(); ..Default::default() }); - run_in_task(move |cx| { + poll_fn(move |cx| { runtime .execute_script_static( "test_error_context_sync.js", @@ -3746,13 +3746,14 @@ if (errMessage !== "higher-level sync error: original sync error") { Poll::Ready(Err(err)) => panic!("{err:?}"), _ => panic!(), } - }) + Poll::Ready(()) + }).await; } - #[test] - fn test_pump_message_loop() { + #[tokio::test] + async fn test_pump_message_loop() { let mut runtime = JsRuntime::new(RuntimeOptions::default()); - run_in_task(move |cx| { + poll_fn(move |cx| { runtime .execute_script_static( "pump_message_loop.js", @@ -3797,7 +3798,9 @@ assertEquals(1, notify_return_value); r#"assertEquals(globalThis.resolved, true);"#, ) .unwrap(); + Poll::Ready(()) }) + .await; } #[test] @@ -4695,7 +4698,7 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { ..Default::default() }); - run_in_task(move |cx| { + poll_fn(move |cx| { let main_realm = runtime.global_realm(); let other_realm = runtime.create_realm().unwrap(); @@ -4747,7 +4750,9 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { runtime.poll_event_loop(cx, false), Poll::Ready(Ok(())) )); - }); + Poll::Ready(()) + }) + .await; } #[test] From 0536ae86588328b773f82d9724cd816a86217583 Mon Sep 17 00:00:00 2001 From: Aapo Alasuutari Date: Sun, 7 May 2023 13:31:01 +0300 Subject: [PATCH 125/320] fix(ext/ffi): UnsafeCallback can hang with 'deno test' (#19018) --- ext/ffi/00_ffi.js | 2 +- ext/ffi/callback.rs | 82 +++++++++++++++++++++++++++++---------------- ext/ffi/lib.rs | 8 ++--- 3 files changed, 57 insertions(+), 35 deletions(-) diff --git a/ext/ffi/00_ffi.js b/ext/ffi/00_ffi.js index 2091a55b38..67cb13ab6d 100644 --- a/ext/ffi/00_ffi.js +++ b/ext/ffi/00_ffi.js @@ -426,7 +426,7 @@ class UnsafeCallback { close() { this.#refcount = 0; - core.close(this.#rid); + ops.op_ffi_unsafe_callback_close(this.#rid); } } diff --git a/ext/ffi/callback.rs b/ext/ffi/callback.rs index bd4d6a5454..ef613b3ede 100644 --- a/ext/ffi/callback.rs +++ b/ext/ffi/callback.rs @@ -6,7 +6,6 @@ use crate::FfiPermissions; use crate::FfiState; use crate::ForeignFunction; use crate::PendingFfiAsyncWork; -use crate::LOCAL_ISOLATE_POINTER; use crate::MAX_SAFE_INTEGER; use crate::MIN_SAFE_INTEGER; use deno_core::error::AnyError; @@ -30,9 +29,18 @@ use std::pin::Pin; use std::ptr; use std::ptr::NonNull; use std::rc::Rc; +use std::sync::atomic; +use std::sync::atomic::AtomicU32; use std::sync::mpsc::sync_channel; use std::task::Poll; use std::task::Waker; + +static THREAD_ID_COUNTER: AtomicU32 = AtomicU32::new(1); + +thread_local! { + static LOCAL_THREAD_ID: RefCell = RefCell::new(0); +} + #[derive(Clone)] pub struct PtrSymbol { pub cif: libffi::middle::Cif, @@ -81,26 +89,16 @@ impl Resource for UnsafeCallbackResource { fn close(self: Rc) { self.cancel.cancel(); - // SAFETY: This drops the closure and the callback info associated with it. - // Any retained function pointers to the closure become dangling pointers. - // It is up to the user to know that it is safe to call the `close()` on the - // UnsafeCallback instance. - unsafe { - let info = Box::from_raw(self.info); - let isolate = info.isolate.as_mut().unwrap(); - let _ = v8::Global::from_raw(isolate, info.callback); - let _ = v8::Global::from_raw(isolate, info.context); - } } } struct CallbackInfo { - pub parameters: Vec, - pub result: NativeType, pub async_work_sender: mpsc::UnboundedSender, pub callback: NonNull, pub context: NonNull, - pub isolate: *mut v8::Isolate, + pub parameters: Vec, + pub result: NativeType, + pub thread_id: u32, pub waker: Option, } @@ -122,8 +120,8 @@ unsafe extern "C" fn deno_ffi_callback( args: *const *const c_void, info: &CallbackInfo, ) { - LOCAL_ISOLATE_POINTER.with(|s| { - if ptr::eq(*s.borrow(), info.isolate) { + LOCAL_THREAD_ID.with(|s| { + if *s.borrow() == info.thread_id { // Own isolate thread, okay to call directly do_ffi_callback(cif, info, result, args); } else { @@ -155,9 +153,6 @@ unsafe fn do_ffi_callback( ) { let callback: NonNull = info.callback; let context: NonNull = info.context; - let isolate: *mut v8::Isolate = info.isolate; - let isolate = &mut *isolate; - let callback = v8::Global::from_raw(isolate, callback); let context = std::mem::transmute::< NonNull, v8::Local, @@ -174,7 +169,10 @@ unsafe fn do_ffi_callback( // refer the same `let bool_value`. let mut cb_scope = v8::CallbackScope::new(context); let scope = &mut v8::HandleScope::new(&mut cb_scope); - let func = callback.open(scope); + let func = std::mem::transmute::< + NonNull, + v8::Local, + >(callback); let result = result as *mut c_void; let vals: &[*const c_void] = std::slice::from_raw_parts(args, info.parameters.len()); @@ -267,7 +265,6 @@ unsafe fn do_ffi_callback( let recv = v8::undefined(scope); let call_result = func.call(scope, recv.into(), ¶ms); - std::mem::forget(callback); if call_result.is_none() { // JS function threw an exception. Set the return value to zero and return. @@ -555,13 +552,21 @@ where let v8_value = cb.v8_value; let cb = v8::Local::::try_from(v8_value)?; - let isolate: *mut v8::Isolate = &mut *scope as &mut v8::Isolate; - LOCAL_ISOLATE_POINTER.with(|s| { - if s.borrow().is_null() { - s.replace(isolate); + let thread_id: u32 = LOCAL_THREAD_ID.with(|s| { + let value = *s.borrow(); + if value == 0 { + let res = THREAD_ID_COUNTER.fetch_add(1, atomic::Ordering::SeqCst); + s.replace(res); + res + } else { + value } }); + if thread_id == 0 { + panic!("Isolate ID counter overflowed u32"); + } + let async_work_sender = state.borrow_mut::().async_work_sender.clone(); let callback = v8::Global::new(scope, cb).into_raw(); @@ -569,12 +574,12 @@ where let context = v8::Global::new(scope, current_context).into_raw(); let info: *mut CallbackInfo = Box::leak(Box::new(CallbackInfo { - parameters: args.parameters.clone(), - result: args.result.clone(), async_work_sender, callback, context, - isolate, + parameters: args.parameters.clone(), + result: args.result.clone(), + thread_id, waker: None, })); let cif = Cif::new( @@ -607,3 +612,24 @@ where Ok(array_value.into()) } + +#[op(v8)] +pub fn op_ffi_unsafe_callback_close( + state: &mut OpState, + scope: &mut v8::HandleScope, + rid: ResourceId, +) -> Result<(), AnyError> { + // SAFETY: This drops the closure and the callback info associated with it. + // Any retained function pointers to the closure become dangling pointers. + // It is up to the user to know that it is safe to call the `close()` on the + // UnsafeCallback instance. + unsafe { + let callback_resource = + state.resource_table.take::(rid)?; + let info = Box::from_raw(callback_resource.info); + let _ = v8::Global::from_raw(scope, info.callback); + let _ = v8::Global::from_raw(scope, info.context); + callback_resource.close(); + } + Ok(()) +} diff --git a/ext/ffi/lib.rs b/ext/ffi/lib.rs index c11f08dd8e..ccad69d738 100644 --- a/ext/ffi/lib.rs +++ b/ext/ffi/lib.rs @@ -2,7 +2,6 @@ use deno_core::error::AnyError; use deno_core::futures::channel::mpsc; -use deno_core::v8; use deno_core::OpState; use std::cell::RefCell; @@ -10,7 +9,6 @@ use std::mem::size_of; use std::os::raw::c_char; use std::os::raw::c_short; use std::path::Path; -use std::ptr; use std::rc::Rc; mod call; @@ -25,6 +23,7 @@ mod turbocall; use call::op_ffi_call_nonblocking; use call::op_ffi_call_ptr; use call::op_ffi_call_ptr_nonblocking; +use callback::op_ffi_unsafe_callback_close; use callback::op_ffi_unsafe_callback_create; use callback::op_ffi_unsafe_callback_ref; use dlfcn::op_ffi_load; @@ -43,10 +42,6 @@ const _: () = { assert!(size_of::<*const ()>() == 8); }; -thread_local! { - static LOCAL_ISOLATE_POINTER: RefCell<*const v8::Isolate> = RefCell::new(ptr::null()); -} - pub(crate) const MAX_SAFE_INTEGER: isize = 9007199254740991; pub(crate) const MIN_SAFE_INTEGER: isize = -9007199254740991; @@ -109,6 +104,7 @@ deno_core::extension!(deno_ffi, op_ffi_read_f64

    , op_ffi_read_ptr

    , op_ffi_unsafe_callback_create

    , + op_ffi_unsafe_callback_close, op_ffi_unsafe_callback_ref, ], esm = [ "00_ffi.js" ], From 1de1a265fff438d9e299eec9ac2cedd47acde451 Mon Sep 17 00:00:00 2001 From: Aapo Alasuutari Date: Sun, 7 May 2023 17:27:16 +0300 Subject: [PATCH 126/320] fix(ext/ffi): Callbacks panic on returning isize (#19022) --- ext/ffi/callback.rs | 51 +++++++++++++++++++++++---------------------- 1 file changed, 26 insertions(+), 25 deletions(-) diff --git a/ext/ffi/callback.rs b/ext/ffi/callback.rs index ef613b3ede..d1abd0c704 100644 --- a/ext/ffi/callback.rs +++ b/ext/ffi/callback.rs @@ -320,17 +320,6 @@ unsafe fn do_ffi_callback( }; *(result as *mut bool) = value; } - NativeType::I32 => { - let value = if let Ok(value) = v8::Local::::try_from(value) { - value.value() as i32 - } else { - // Fallthrough, probably UB. - value - .int32_value(scope) - .expect("Unable to deserialize result parameter.") - }; - *(result as *mut i32) = value; - } NativeType::F32 => { let value = if let Ok(value) = v8::Local::::try_from(value) { value.value() as f32 @@ -392,7 +381,7 @@ unsafe fn do_ffi_callback( *(result as *mut *mut c_void) = pointer; } NativeType::I8 => { - let value = if let Ok(value) = v8::Local::::try_from(value) { + let value = if let Ok(value) = v8::Local::::try_from(value) { value.value() as i8 } else { // Fallthrough, essentially UB. @@ -403,7 +392,7 @@ unsafe fn do_ffi_callback( *(result as *mut i8) = value; } NativeType::U8 => { - let value = if let Ok(value) = v8::Local::::try_from(value) { + let value = if let Ok(value) = v8::Local::::try_from(value) { value.value() as u8 } else { // Fallthrough, essentially UB. @@ -414,7 +403,7 @@ unsafe fn do_ffi_callback( *(result as *mut u8) = value; } NativeType::I16 => { - let value = if let Ok(value) = v8::Local::::try_from(value) { + let value = if let Ok(value) = v8::Local::::try_from(value) { value.value() as i16 } else { // Fallthrough, essentially UB. @@ -425,7 +414,7 @@ unsafe fn do_ffi_callback( *(result as *mut i16) = value; } NativeType::U16 => { - let value = if let Ok(value) = v8::Local::::try_from(value) { + let value = if let Ok(value) = v8::Local::::try_from(value) { value.value() as u16 } else { // Fallthrough, essentially UB. @@ -435,9 +424,20 @@ unsafe fn do_ffi_callback( }; *(result as *mut u16) = value; } + NativeType::I32 => { + let value = if let Ok(value) = v8::Local::::try_from(value) { + value.value() + } else { + // Fallthrough, essentially UB. + value + .int32_value(scope) + .expect("Unable to deserialize result parameter.") + }; + *(result as *mut i32) = value; + } NativeType::U32 => { - let value = if let Ok(value) = v8::Local::::try_from(value) { - value.value() as u32 + let value = if let Ok(value) = v8::Local::::try_from(value) { + value.value() } else { // Fallthrough, essentially UB. value @@ -446,21 +446,25 @@ unsafe fn do_ffi_callback( }; *(result as *mut u32) = value; } - NativeType::I64 => { + NativeType::I64 | NativeType::ISize => { if let Ok(value) = v8::Local::::try_from(value) { *(result as *mut i64) = value.i64_value().0; - } else if let Ok(value) = v8::Local::::try_from(value) { - *(result as *mut i64) = value.value(); + } else if let Ok(value) = v8::Local::::try_from(value) { + *(result as *mut i64) = value.value() as i64; + } else if let Ok(value) = v8::Local::::try_from(value) { + *(result as *mut i64) = value.value() as i64; } else { *(result as *mut i64) = value .integer_value(scope) .expect("Unable to deserialize result parameter."); } } - NativeType::U64 => { + NativeType::U64 | NativeType::USize => { if let Ok(value) = v8::Local::::try_from(value) { *(result as *mut u64) = value.u64_value().0; - } else if let Ok(value) = v8::Local::::try_from(value) { + } else if let Ok(value) = v8::Local::::try_from(value) { + *(result as *mut u64) = value.value() as u64; + } else if let Ok(value) = v8::Local::::try_from(value) { *(result as *mut u64) = value.value() as u64; } else { *(result as *mut u64) = value @@ -501,9 +505,6 @@ unsafe fn do_ffi_callback( NativeType::Void => { // nop } - _ => { - unreachable!(); - } }; } From 7e1ae655720de72fd555bb1746bb35f5d17f39f6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Sun, 7 May 2023 22:43:16 +0200 Subject: [PATCH 127/320] chore: lint ext/fs/std_fs.rs (#19036) --- ext/fs/std_fs.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ext/fs/std_fs.rs b/ext/fs/std_fs.rs index fe6910f1b2..6ac935bbd2 100644 --- a/ext/fs/std_fs.rs +++ b/ext/fs/std_fs.rs @@ -434,7 +434,7 @@ fn copy_file(from: &Path, to: &Path) -> FsResult<()> { // Do a regular copy. fcopyfile() is an overkill for < 128KB // files. let mut buf = [0u8; 128 * 1024]; - let mut from_file = fs::File::open(&from)?; + let mut from_file = fs::File::open(from)?; let perm = from_file.metadata()?.permissions(); let mut to_file = fs::OpenOptions::new() @@ -443,7 +443,7 @@ fn copy_file(from: &Path, to: &Path) -> FsResult<()> { .write(true) .create(true) .truncate(true) - .open(&to)?; + .open(to)?; let writer_metadata = to_file.metadata()?; if writer_metadata.is_file() { // Set the correct file permissions, in case the file already existed. From 40987178c4f9baf54599b502f943be76f42d6f85 Mon Sep 17 00:00:00 2001 From: ud2 Date: Mon, 8 May 2023 06:27:59 +0800 Subject: [PATCH 128/320] fix(core): always report the first error on unhandled rejection (#18992) The root cause of denoland/deno_std#3320, I believe, is that `pending_promise_rejections` is a `HashMap` whose entries are in arbitrary order, and as a result either of the two errors (`AddrInUse` and `TypeError`) may be selected when determining which one to report. I changed the field to a `VecDeque` so that the first error (`AddrInUse` in this case) is always selected. --- core/bindings.rs | 4 ++-- core/ops_builtin_v8.rs | 7 ++++--- core/realm.rs | 19 +++---------------- core/runtime.rs | 19 ++++++++++++++++++- 4 files changed, 27 insertions(+), 22 deletions(-) diff --git a/core/bindings.rs b/core/bindings.rs index 1437bc6575..8ad3948a56 100644 --- a/core/bindings.rs +++ b/core/bindings.rs @@ -499,12 +499,12 @@ pub extern "C" fn promise_reject_callback(message: v8::PromiseRejectMessage) { let error_global = v8::Global::new(scope, error); context_state .pending_promise_rejections - .insert(promise_global, error_global); + .push_back((promise_global, error_global)); } PromiseHandlerAddedAfterReject => { context_state .pending_promise_rejections - .remove(&promise_global); + .retain(|(key, _)| key != &promise_global); } PromiseRejectAfterResolved => {} PromiseResolveAfterResolved => { diff --git a/core/ops_builtin_v8.rs b/core/ops_builtin_v8.rs index a77e7a7e6a..8da4842258 100644 --- a/core/ops_builtin_v8.rs +++ b/core/ops_builtin_v8.rs @@ -894,7 +894,7 @@ fn op_store_pending_promise_rejection<'a>( let error_global = v8::Global::new(scope, reason.v8_value); context_state .pending_promise_rejections - .insert(promise_global, error_global); + .push_back((promise_global, error_global)); } #[op(v8)] @@ -909,7 +909,7 @@ fn op_remove_pending_promise_rejection<'a>( let promise_global = v8::Global::new(scope, promise_value); context_state .pending_promise_rejections - .remove(&promise_global); + .retain(|(key, _)| key != &promise_global); } #[op(v8)] @@ -924,7 +924,8 @@ fn op_has_pending_promise_rejection<'a>( let promise_global = v8::Global::new(scope, promise_value); context_state .pending_promise_rejections - .contains_key(&promise_global) + .iter() + .any(|(key, _)| key == &promise_global) } #[op(v8)] diff --git a/core/realm.rs b/core/realm.rs index f907553f08..375f740887 100644 --- a/core/realm.rs +++ b/core/realm.rs @@ -7,8 +7,8 @@ use crate::runtime::exception_to_err_result; use crate::JsRuntime; use anyhow::Error; use std::cell::RefCell; -use std::collections::HashMap; use std::collections::HashSet; +use std::collections::VecDeque; use std::hash::BuildHasherDefault; use std::hash::Hasher; use std::option::Option; @@ -43,7 +43,7 @@ pub(crate) struct ContextState { pub(crate) js_format_exception_cb: Option>>, pub(crate) js_wasm_streaming_cb: Option>>, pub(crate) pending_promise_rejections: - HashMap, v8::Global>, + VecDeque<(v8::Global, v8::Global)>, pub(crate) unrefed_ops: HashSet>, // We don't explicitly re-read this prop but need the slice to live alongside // the context @@ -270,22 +270,9 @@ impl<'s> JsRealmLocal<'s> { let context_state_rc = self.state(scope); let mut context_state = context_state_rc.borrow_mut(); - if context_state.pending_promise_rejections.is_empty() { + let Some((_, handle)) = context_state.pending_promise_rejections.pop_front() else { return Ok(()); - } - - let key = { - context_state - .pending_promise_rejections - .keys() - .next() - .unwrap() - .clone() }; - let handle = context_state - .pending_promise_rejections - .remove(&key) - .unwrap(); drop(context_state); let exception = v8::Local::new(scope, handle); diff --git a/core/runtime.rs b/core/runtime.rs index 8c78be55b5..bb77bb25a8 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -1831,7 +1831,7 @@ impl JsRuntime { .state(tc_scope) .borrow_mut() .pending_promise_rejections - .remove(&promise_global); + .retain(|(key, _)| key != &promise_global); } } let promise_global = v8::Global::new(tc_scope, promise); @@ -4138,6 +4138,23 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { .contains("JavaScript execution has been terminated")); } + #[tokio::test] + async fn test_unhandled_rejection_order() { + let mut runtime = JsRuntime::new(Default::default()); + runtime + .execute_script_static( + "", + r#" + for (let i = 0; i < 100; i++) { + Promise.reject(i); + } + "#, + ) + .unwrap(); + let err = runtime.run_event_loop(false).await.unwrap_err(); + assert_eq!(err.to_string(), "Uncaught (in promise) 0"); + } + #[tokio::test] async fn test_set_promise_reject_callback() { static PROMISE_REJECT: AtomicUsize = AtomicUsize::new(0); From 687a9395889c2653449c0453e35a12b889c56519 Mon Sep 17 00:00:00 2001 From: Matt Mastracci Date: Mon, 8 May 2023 09:52:56 +0200 Subject: [PATCH 129/320] fix(ext/http): Ensure Deno.serve works across --watch restarts (#18998) Fixes #16699 and #18960 by ensuring that we release our HTTP `spawn_local` tasks when the HTTP resource is dropped. Because our cancel handle was being projected from the resource via `RcMap`, the resource was never `Drop`ped. By splitting the handle out into its own `Rc`, we can avoid keeping the resource alive and let it drop to cancel everything. --- cli/tests/integration/watcher_tests.rs | 40 ++++++++++++++ cli/tests/unit/serve_test.ts | 3 +- cli/util/file_watcher.rs | 7 +++ ext/http/http_next.rs | 73 ++++++++++++++------------ 4 files changed, 88 insertions(+), 35 deletions(-) diff --git a/cli/tests/integration/watcher_tests.rs b/cli/tests/integration/watcher_tests.rs index 04320060b2..2d41a74ed5 100644 --- a/cli/tests/integration/watcher_tests.rs +++ b/cli/tests/integration/watcher_tests.rs @@ -1371,6 +1371,46 @@ async fn run_watch_reload_once() { check_alive_then_kill(child); } +/// Regression test for https://github.com/denoland/deno/issues/18960. Ensures that Deno.serve +/// operates properly after a watch restart. +#[tokio::test] +async fn test_watch_serve() { + let t = TempDir::new(); + let file_to_watch = t.path().join("file_to_watch.js"); + let file_content = r#" + console.error("serving"); + await Deno.serve({port: 4600, handler: () => new Response("hello")}); + "#; + write(&file_to_watch, file_content).unwrap(); + + let mut child = util::deno_cmd() + .current_dir(util::testdata_path()) + .arg("run") + .arg("--watch") + .arg("--unstable") + .arg("--allow-net") + .arg("-L") + .arg("debug") + .arg(&file_to_watch) + .env("NO_COLOR", "1") + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::piped()) + .spawn() + .unwrap(); + let (mut stdout_lines, mut stderr_lines) = child_lines(&mut child); + + wait_contains("Listening on", &mut stdout_lines).await; + // Note that we start serving very quickly, so we specifically want to wait for this message + wait_contains(r#"Watching paths: [""#, &mut stderr_lines).await; + + write(&file_to_watch, file_content).unwrap(); + + wait_contains("serving", &mut stderr_lines).await; + wait_contains("Listening on", &mut stdout_lines).await; + + check_alive_then_kill(child); +} + #[tokio::test] async fn run_watch_dynamic_imports() { let t = TempDir::new(); diff --git a/cli/tests/unit/serve_test.ts b/cli/tests/unit/serve_test.ts index 5d5d0428f9..ce7267f580 100644 --- a/cli/tests/unit/serve_test.ts +++ b/cli/tests/unit/serve_test.ts @@ -94,8 +94,9 @@ Deno.test(async function httpServerRejectsOnAddrInUse() { onListen: onListen(listeningPromise), onError: createOnErrorCb(ac), }); + await listeningPromise; - assertRejects( + await assertRejects( () => Deno.serve({ handler: (_req) => new Response("ok"), diff --git a/cli/util/file_watcher.rs b/cli/util/file_watcher.rs index 05415f2a63..1ad5e9ba07 100644 --- a/cli/util/file_watcher.rs +++ b/cli/util/file_watcher.rs @@ -304,6 +304,13 @@ where } loop { + // We may need to give the runtime a tick to settle, as cancellations may need to propagate + // to tasks. We choose yielding 10 times to the runtime as a decent heuristic. If watch tests + // start to fail, this may need to be increased. + for _ in 0..10 { + tokio::task::yield_now().await; + } + let mut watcher = new_watcher(watcher_sender.clone())?; consume_paths_to_watch(&mut watcher, &mut paths_to_watch_receiver); diff --git a/ext/http/http_next.rs b/ext/http/http_next.rs index 593a9c8166..5ed443142e 100644 --- a/ext/http/http_next.rs +++ b/ext/http/http_next.rs @@ -625,83 +625,80 @@ impl> Future for SlabFuture { fn serve_http11_unconditional( io: impl HttpServeStream, svc: impl HttpService + 'static, - cancel: RcRef, ) -> impl Future> + 'static { let conn = http1::Builder::new() .keep_alive(true) .serve_connection(io, svc); - conn - .with_upgrades() - .map_err(AnyError::from) - .try_or_cancel(cancel) + conn.with_upgrades().map_err(AnyError::from) } fn serve_http2_unconditional( io: impl HttpServeStream, svc: impl HttpService + 'static, - cancel: RcRef, ) -> impl Future> + 'static { let conn = http2::Builder::new(LocalExecutor).serve_connection(io, svc); - conn.map_err(AnyError::from).try_or_cancel(cancel) + conn.map_err(AnyError::from) } async fn serve_http2_autodetect( io: impl HttpServeStream, svc: impl HttpService + 'static, - cancel: RcRef, ) -> Result<(), AnyError> { let prefix = NetworkStreamPrefixCheck::new(io, HTTP2_PREFIX); let (matches, io) = prefix.match_prefix().await?; if matches { - serve_http2_unconditional(io, svc, cancel).await + serve_http2_unconditional(io, svc).await } else { - serve_http11_unconditional(io, svc, cancel).await + serve_http11_unconditional(io, svc).await } } fn serve_https( mut io: TlsStream, request_info: HttpConnectionProperties, - cancel: RcRef, + cancel: Rc, tx: tokio::sync::mpsc::Sender, ) -> JoinHandle> { // TODO(mmastrac): This is faster if we can use tokio::spawn but then the send bounds get us let svc = service_fn(move |req: Request| { new_slab_future(req, request_info.clone(), tx.clone()) }); - spawn_local(async { - io.handshake().await?; - // If the client specifically negotiates a protocol, we will use it. If not, we'll auto-detect - // based on the prefix bytes - let handshake = io.get_ref().1.alpn_protocol(); - if handshake == Some(TLS_ALPN_HTTP_2) { - serve_http2_unconditional(io, svc, cancel).await - } else if handshake == Some(TLS_ALPN_HTTP_11) { - serve_http11_unconditional(io, svc, cancel).await - } else { - serve_http2_autodetect(io, svc, cancel).await + spawn_local( + async { + io.handshake().await?; + // If the client specifically negotiates a protocol, we will use it. If not, we'll auto-detect + // based on the prefix bytes + let handshake = io.get_ref().1.alpn_protocol(); + if handshake == Some(TLS_ALPN_HTTP_2) { + serve_http2_unconditional(io, svc).await + } else if handshake == Some(TLS_ALPN_HTTP_11) { + serve_http11_unconditional(io, svc).await + } else { + serve_http2_autodetect(io, svc).await + } } - }) + .try_or_cancel(cancel), + ) } fn serve_http( io: impl HttpServeStream, request_info: HttpConnectionProperties, - cancel: RcRef, + cancel: Rc, tx: tokio::sync::mpsc::Sender, ) -> JoinHandle> { // TODO(mmastrac): This is faster if we can use tokio::spawn but then the send bounds get us let svc = service_fn(move |req: Request| { new_slab_future(req, request_info.clone(), tx.clone()) }); - spawn_local(serve_http2_autodetect(io, svc, cancel)) + spawn_local(serve_http2_autodetect(io, svc).try_or_cancel(cancel)) } fn serve_http_on( network_stream: NetworkStream, listen_properties: &HttpListenProperties, - cancel: RcRef, + cancel: Rc, tx: tokio::sync::mpsc::Sender, ) -> JoinHandle> { // We always want some sort of peer address. If we can't get one, just make up one. @@ -733,13 +730,14 @@ fn serve_http_on( struct HttpJoinHandle( AsyncRefCell>>>, - CancelHandle, + // Cancel handle must live in a separate Rc to avoid keeping the outer join handle ref'd + Rc, AsyncRefCell>, ); impl HttpJoinHandle { - fn cancel_handle(self: &Rc) -> RcRef { - RcRef::map(self, |this| &this.1) + fn cancel_handle(self: &Rc) -> Rc { + self.1.clone() } } @@ -753,6 +751,13 @@ impl Resource for HttpJoinHandle { } } +impl Drop for HttpJoinHandle { + fn drop(&mut self) { + // In some cases we may be dropped without closing, so let's cancel everything on the way out + self.1.cancel(); + } +} + #[op(v8)] pub fn op_serve_http( state: Rc>, @@ -773,12 +778,12 @@ pub fn op_serve_http( let (tx, rx) = tokio::sync::mpsc::channel(10); let resource: Rc = Rc::new(HttpJoinHandle( AsyncRefCell::new(None), - CancelHandle::new(), + CancelHandle::new_rc(), AsyncRefCell::new(rx), )); let cancel_clone = resource.cancel_handle(); - let listen_properties_clone = listen_properties.clone(); + let listen_properties_clone: HttpListenProperties = listen_properties.clone(); let handle = spawn_local(async move { loop { let conn = listener @@ -813,7 +818,7 @@ pub fn op_serve_http_on( state: Rc>, conn: ResourceId, ) -> Result<(ResourceId, &'static str, String), AnyError> { - let network_stream = + let network_stream: NetworkStream = DefaultHttpRequestProperties::get_network_stream_for_rid( &mut state.borrow_mut(), conn, @@ -828,7 +833,7 @@ pub fn op_serve_http_on( let (tx, rx) = tokio::sync::mpsc::channel(10); let resource: Rc = Rc::new(HttpJoinHandle( AsyncRefCell::new(None), - CancelHandle::new(), + CancelHandle::new_rc(), AsyncRefCell::new(rx), )); @@ -862,7 +867,7 @@ pub async fn op_http_wait( .resource_table .get::(rid)?; - let cancel = join_handle.clone().cancel_handle(); + let cancel = join_handle.cancel_handle(); let next = async { let mut recv = RcRef::map(&join_handle, |this| &this.2).borrow_mut().await; recv.recv().await From 0aa2d7c9c16c514e47bbd07ca90552f9159901ef Mon Sep 17 00:00:00 2001 From: Aapo Alasuutari Date: Mon, 8 May 2023 10:57:38 +0300 Subject: [PATCH 130/320] perf(ext/ffi): Use `Box<[NativeType]>` in CallbackInfo parameters (#19032) --- ext/ffi/callback.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ext/ffi/callback.rs b/ext/ffi/callback.rs index d1abd0c704..2d2cf491be 100644 --- a/ext/ffi/callback.rs +++ b/ext/ffi/callback.rs @@ -96,7 +96,7 @@ struct CallbackInfo { pub async_work_sender: mpsc::UnboundedSender, pub callback: NonNull, pub context: NonNull, - pub parameters: Vec, + pub parameters: Box<[NativeType]>, pub result: NativeType, pub thread_id: u32, pub waker: Option, @@ -578,7 +578,7 @@ where async_work_sender, callback, context, - parameters: args.parameters.clone(), + parameters: args.parameters.clone().into(), result: args.result.clone(), thread_id, waker: None, From df1ca4a158eda08846e11ceb03dd68d6fcffda75 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Mon, 8 May 2023 11:02:02 -0400 Subject: [PATCH 131/320] refactor(ext/fs): `deno_fs::FileSystem` - conditional `Send + Sync` (#18993) This allows for having a conditional `Send + Sync` on the file system trait for Deploy. --- ext/fs/Cargo.toml | 3 ++ ext/fs/clippy.toml | 3 ++ ext/fs/interface.rs | 8 +++- ext/fs/lib.rs | 11 +++-- ext/fs/ops.rs | 96 ++++++++++++++++++++--------------------- ext/fs/sync.rs | 22 ++++++++++ ext/node/analyze.rs | 17 ++++---- ext/node/clippy.toml | 3 ++ ext/node/lib.rs | 21 +++++---- ext/node/ops/require.rs | 24 +++++------ ext/node/resolution.rs | 16 +++---- runtime/Cargo.toml | 4 +- 12 files changed, 134 insertions(+), 94 deletions(-) create mode 100644 ext/fs/sync.rs diff --git a/ext/fs/Cargo.toml b/ext/fs/Cargo.toml index f6d563b64d..67c59a4b48 100644 --- a/ext/fs/Cargo.toml +++ b/ext/fs/Cargo.toml @@ -13,6 +13,9 @@ description = "Ops for interacting with the file system" [lib] path = "lib.rs" +[features] +sync_fs = [] + [dependencies] async-trait.workspace = true deno_core.workspace = true diff --git a/ext/fs/clippy.toml b/ext/fs/clippy.toml index 53676a90e6..023769214b 100644 --- a/ext/fs/clippy.toml +++ b/ext/fs/clippy.toml @@ -43,3 +43,6 @@ disallowed-methods = [ { path = "std::path::Path::canonicalize", reason = "File system operations should be done using FileSystem trait" }, { path = "std::path::Path::exists", reason = "File system operations should be done using FileSystem trait" }, ] +disallowed-types = [ + { path = "std::sync::Arc", reason = "use crate::sync::MaybeArc instead" }, +] diff --git a/ext/fs/interface.rs b/ext/fs/interface.rs index 474089153e..2d9b68f55d 100644 --- a/ext/fs/interface.rs +++ b/ext/fs/interface.rs @@ -11,6 +11,9 @@ use deno_io::fs::File; use deno_io::fs::FsResult; use deno_io::fs::FsStat; +use crate::sync::MaybeSend; +use crate::sync::MaybeSync; + #[derive(Deserialize, Default, Debug, Clone, Copy)] #[serde(rename_all = "camelCase")] #[serde(default)] @@ -72,8 +75,11 @@ pub struct FsDirEntry { pub is_symlink: bool, } +#[allow(clippy::disallowed_types)] +pub type FileSystemRc = crate::sync::MaybeArc; + #[async_trait::async_trait(?Send)] -pub trait FileSystem: std::fmt::Debug + Send + Sync { +pub trait FileSystem: std::fmt::Debug + MaybeSend + MaybeSync { fn cwd(&self) -> FsResult; fn tmp_dir(&self) -> FsResult; fn chdir(&self, path: &Path) -> FsResult<()>; diff --git a/ext/fs/lib.rs b/ext/fs/lib.rs index 4fdf6b3f11..fb0a6ffedb 100644 --- a/ext/fs/lib.rs +++ b/ext/fs/lib.rs @@ -3,14 +3,18 @@ mod interface; mod ops; mod std_fs; +pub mod sync; pub use crate::interface::FileSystem; +pub use crate::interface::FileSystemRc; pub use crate::interface::FsDirEntry; pub use crate::interface::FsFileType; pub use crate::interface::OpenOptions; -use crate::ops::*; - pub use crate::std_fs::RealFs; +pub use crate::sync::MaybeSend; +pub use crate::sync::MaybeSync; + +use crate::ops::*; use deno_core::error::AnyError; use deno_core::OpState; @@ -18,7 +22,6 @@ use std::cell::RefCell; use std::convert::From; use std::path::Path; use std::rc::Rc; -use std::sync::Arc; pub trait FsPermissions { fn check_read(&mut self, p: &Path, api_name: &str) -> Result<(), AnyError>; @@ -153,7 +156,7 @@ deno_core::extension!(deno_fs, esm = [ "30_fs.js" ], options = { unstable: bool, - fs: Arc, + fs: FileSystemRc, }, state = |state, options| { state.put(UnstableChecker { unstable: options.unstable }); diff --git a/ext/fs/ops.rs b/ext/fs/ops.rs index c9996d8ce7..b866f86458 100644 --- a/ext/fs/ops.rs +++ b/ext/fs/ops.rs @@ -7,7 +7,6 @@ use std::io::SeekFrom; use std::path::Path; use std::path::PathBuf; use std::rc::Rc; -use std::sync::Arc; use deno_core::error::custom_error; use deno_core::error::type_error; @@ -28,9 +27,9 @@ use serde::Serialize; use crate::check_unstable; use crate::check_unstable2; +use crate::interface::FileSystemRc; use crate::interface::FsDirEntry; use crate::interface::FsFileType; -use crate::FileSystem; use crate::FsPermissions; use crate::OpenOptions; @@ -39,7 +38,7 @@ pub fn op_cwd

    (state: &mut OpState) -> Result where P: FsPermissions + 'static, { - let fs = state.borrow::>(); + let fs = state.borrow::(); let path = fs.cwd()?; state .borrow_mut::

    () @@ -56,7 +55,7 @@ where let d = PathBuf::from(&directory); state.borrow_mut::

    ().check_read(&d, "Deno.chdir()")?; state - .borrow::>() + .borrow::() .chdir(&d) .context_path("chdir", &d) } @@ -66,10 +65,7 @@ fn op_umask(state: &mut OpState, mask: Option) -> Result where { check_unstable(state, "Deno.umask"); - state - .borrow::>() - .umask(mask) - .context("umask") + state.borrow::().umask(mask).context("umask") } #[op] @@ -87,7 +83,7 @@ where let permissions = state.borrow_mut::

    (); permissions.check(&options, &path, "Deno.openSync()")?; - let fs = state.borrow::>(); + let fs = state.borrow::(); let file = fs.open_sync(&path, options).context_path("open", &path)?; let rid = state @@ -112,7 +108,7 @@ where let mut state = state.borrow_mut(); let permissions = state.borrow_mut::

    (); permissions.check(&options, &path, "Deno.open()")?; - state.borrow::>().clone() + state.borrow::().clone() }; let file = fs .open_async(path.clone(), options) @@ -144,7 +140,7 @@ where .borrow_mut::

    () .check_write(&path, "Deno.mkdirSync()")?; - let fs = state.borrow::>(); + let fs = state.borrow::(); fs.mkdir_sync(&path, recursive, mode) .context_path("mkdir", &path)?; @@ -168,7 +164,7 @@ where let fs = { let mut state = state.borrow_mut(); state.borrow_mut::

    ().check_write(&path, "Deno.mkdir()")?; - state.borrow::>().clone() + state.borrow::().clone() }; fs.mkdir_async(path.clone(), recursive, mode) @@ -191,7 +187,7 @@ where state .borrow_mut::

    () .check_write(&path, "Deno.chmodSync()")?; - let fs = state.borrow::>(); + let fs = state.borrow::(); fs.chmod_sync(&path, mode).context_path("chmod", &path)?; Ok(()) } @@ -209,7 +205,7 @@ where let fs = { let mut state = state.borrow_mut(); state.borrow_mut::

    ().check_write(&path, "Deno.chmod()")?; - state.borrow::>().clone() + state.borrow::().clone() }; fs.chmod_async(path.clone(), mode) .await @@ -231,7 +227,7 @@ where state .borrow_mut::

    () .check_write(&path, "Deno.chownSync()")?; - let fs = state.borrow::>(); + let fs = state.borrow::(); fs.chown_sync(&path, uid, gid) .context_path("chown", &path)?; Ok(()) @@ -251,7 +247,7 @@ where let fs = { let mut state = state.borrow_mut(); state.borrow_mut::

    ().check_write(&path, "Deno.chown()")?; - state.borrow::>().clone() + state.borrow::().clone() }; fs.chown_async(path.clone(), uid, gid) .await @@ -274,7 +270,7 @@ where .borrow_mut::

    () .check_write(&path, "Deno.removeSync()")?; - let fs = state.borrow::>(); + let fs = state.borrow::(); fs.remove_sync(&path, recursive) .context_path("remove", &path)?; @@ -297,7 +293,7 @@ where state .borrow_mut::

    () .check_write(&path, "Deno.remove()")?; - state.borrow::>().clone() + state.borrow::().clone() }; fs.remove_async(path.clone(), recursive) @@ -323,7 +319,7 @@ where permissions.check_read(&from, "Deno.copyFileSync()")?; permissions.check_write(&to, "Deno.copyFileSync()")?; - let fs = state.borrow::>(); + let fs = state.borrow::(); fs.copy_file_sync(&from, &to) .context_two_path("copy", &from, &to)?; @@ -347,7 +343,7 @@ where let permissions = state.borrow_mut::

    (); permissions.check_read(&from, "Deno.copyFile()")?; permissions.check_write(&to, "Deno.copyFile()")?; - state.borrow::>().clone() + state.borrow::().clone() }; fs.copy_file_async(from.clone(), to.clone()) @@ -370,7 +366,7 @@ where state .borrow_mut::

    () .check_read(&path, "Deno.statSync()")?; - let fs = state.borrow::>(); + let fs = state.borrow::(); let stat = fs.stat_sync(&path).context_path("stat", &path)?; let serializable_stat = SerializableStat::from(stat); serializable_stat.write(stat_out_buf); @@ -390,7 +386,7 @@ where let mut state = state.borrow_mut(); let permissions = state.borrow_mut::

    (); permissions.check_read(&path, "Deno.stat()")?; - state.borrow::>().clone() + state.borrow::().clone() }; let stat = fs .stat_async(path.clone()) @@ -412,7 +408,7 @@ where state .borrow_mut::

    () .check_read(&path, "Deno.lstatSync()")?; - let fs = state.borrow::>(); + let fs = state.borrow::(); let stat = fs.lstat_sync(&path).context_path("lstat", &path)?; let serializable_stat = SerializableStat::from(stat); serializable_stat.write(stat_out_buf); @@ -432,7 +428,7 @@ where let mut state = state.borrow_mut(); let permissions = state.borrow_mut::

    (); permissions.check_read(&path, "Deno.lstat()")?; - state.borrow::>().clone() + state.borrow::().clone() }; let stat = fs .lstat_async(path.clone()) @@ -451,7 +447,7 @@ where { let path = PathBuf::from(path); - let fs = state.borrow::>().clone(); + let fs = state.borrow::().clone(); let permissions = state.borrow_mut::

    (); permissions.check_read(&path, "Deno.realPathSync()")?; if path.is_relative() { @@ -478,7 +474,7 @@ where let fs; { let mut state = state.borrow_mut(); - fs = state.borrow::>().clone(); + fs = state.borrow::().clone(); let permissions = state.borrow_mut::

    (); permissions.check_read(&path, "Deno.realPath()")?; if path.is_relative() { @@ -508,7 +504,7 @@ where .borrow_mut::

    () .check_read(&path, "Deno.readDirSync()")?; - let fs = state.borrow::>(); + let fs = state.borrow::(); let entries = fs.read_dir_sync(&path).context_path("readdir", &path)?; Ok(entries) @@ -529,7 +525,7 @@ where state .borrow_mut::

    () .check_read(&path, "Deno.readDir()")?; - state.borrow::>().clone() + state.borrow::().clone() }; let entries = fs @@ -557,7 +553,7 @@ where permissions.check_write(&oldpath, "Deno.renameSync()")?; permissions.check_write(&newpath, "Deno.renameSync()")?; - let fs = state.borrow::>(); + let fs = state.borrow::(); fs.rename_sync(&oldpath, &newpath) .context_two_path("rename", &oldpath, &newpath)?; @@ -582,7 +578,7 @@ where permissions.check_read(&oldpath, "Deno.rename()")?; permissions.check_write(&oldpath, "Deno.rename()")?; permissions.check_write(&newpath, "Deno.rename()")?; - state.borrow::>().clone() + state.borrow::().clone() }; fs.rename_async(oldpath.clone(), newpath.clone()) @@ -610,7 +606,7 @@ where permissions.check_read(&newpath, "Deno.linkSync()")?; permissions.check_write(&newpath, "Deno.linkSync()")?; - let fs = state.borrow::>(); + let fs = state.borrow::(); fs.link_sync(&oldpath, &newpath) .context_two_path("link", &oldpath, &newpath)?; @@ -636,7 +632,7 @@ where permissions.check_write(&oldpath, "Deno.link()")?; permissions.check_read(&newpath, "Deno.link()")?; permissions.check_write(&newpath, "Deno.link()")?; - state.borrow::>().clone() + state.borrow::().clone() }; fs.link_async(oldpath.clone(), newpath.clone()) @@ -663,7 +659,7 @@ where permissions.check_write_all("Deno.symlinkSync()")?; permissions.check_read_all("Deno.symlinkSync()")?; - let fs = state.borrow::>(); + let fs = state.borrow::(); fs.symlink_sync(&oldpath, &newpath, file_type) .context_two_path("symlink", &oldpath, &newpath)?; @@ -688,7 +684,7 @@ where let permissions = state.borrow_mut::

    (); permissions.check_write_all("Deno.symlink()")?; permissions.check_read_all("Deno.symlink()")?; - state.borrow::>().clone() + state.borrow::().clone() }; fs.symlink_async(oldpath.clone(), newpath.clone(), file_type) @@ -712,7 +708,7 @@ where .borrow_mut::

    () .check_read(&path, "Deno.readLink()")?; - let fs = state.borrow::>(); + let fs = state.borrow::(); let target = fs.read_link_sync(&path).context_path("readlink", &path)?; let target_string = path_into_string(target.into_os_string())?; @@ -734,7 +730,7 @@ where state .borrow_mut::

    () .check_read(&path, "Deno.readLink()")?; - state.borrow::>().clone() + state.borrow::().clone() }; let target = fs @@ -760,7 +756,7 @@ where .borrow_mut::

    () .check_write(&path, "Deno.truncateSync()")?; - let fs = state.borrow::>(); + let fs = state.borrow::(); fs.truncate_sync(&path, len) .context_path("truncate", &path)?; @@ -783,7 +779,7 @@ where state .borrow_mut::

    () .check_write(&path, "Deno.truncate()")?; - state.borrow::>().clone() + state.borrow::().clone() }; fs.truncate_async(path.clone(), len) @@ -809,7 +805,7 @@ where state.borrow_mut::

    ().check_write(&path, "Deno.utime()")?; - let fs = state.borrow::>(); + let fs = state.borrow::(); fs.utime_sync(&path, atime_secs, atime_nanos, mtime_secs, mtime_nanos) .context_path("utime", &path)?; @@ -833,7 +829,7 @@ where let fs = { let mut state = state.borrow_mut(); state.borrow_mut::

    ().check_write(&path, "Deno.utime()")?; - state.borrow::>().clone() + state.borrow::().clone() }; fs.utime_async( @@ -997,11 +993,11 @@ where fn make_temp_check_sync

    ( state: &mut OpState, dir: Option, -) -> Result<(PathBuf, Arc), AnyError> +) -> Result<(PathBuf, FileSystemRc), AnyError> where P: FsPermissions + 'static, { - let fs = state.borrow::>().clone(); + let fs = state.borrow::().clone(); let dir = match dir { Some(dir) => { let dir = PathBuf::from(dir); @@ -1026,12 +1022,12 @@ where fn make_temp_check_async

    ( state: Rc>, dir: Option, -) -> Result<(PathBuf, Arc), AnyError> +) -> Result<(PathBuf, FileSystemRc), AnyError> where P: FsPermissions + 'static, { let mut state = state.borrow_mut(); - let fs = state.borrow::>().clone(); + let fs = state.borrow::().clone(); let dir = match dir { Some(dir) => { let dir = PathBuf::from(dir); @@ -1089,7 +1085,7 @@ where let options = OpenOptions::write(create, append, create_new, mode); permissions.check(&options, &path, "Deno.writeFileSync()")?; - let fs = state.borrow::>(); + let fs = state.borrow::(); fs.write_file_sync(&path, options, &data) .context_path("writefile", &path)?; @@ -1121,7 +1117,7 @@ where permissions.check(&options, &path, "Deno.writeFile()")?; let cancel_handle = cancel_rid .and_then(|rid| state.resource_table.get::(rid).ok()); - (state.borrow::>().clone(), cancel_handle) + (state.borrow::().clone(), cancel_handle) }; let fut = fs.write_file_async(path.clone(), options, data.to_vec()); @@ -1154,7 +1150,7 @@ where let permissions = state.borrow_mut::

    (); permissions.check_read(&path, "Deno.readFileSync()")?; - let fs = state.borrow::>(); + let fs = state.borrow::(); let buf = fs.read_file_sync(&path).context("readfile")?; Ok(buf.into()) @@ -1177,7 +1173,7 @@ where permissions.check_read(&path, "Deno.readFile()")?; let cancel_handle = cancel_rid .and_then(|rid| state.resource_table.get::(rid).ok()); - (state.borrow::>().clone(), cancel_handle) + (state.borrow::().clone(), cancel_handle) }; let fut = fs.read_file_async(path.clone()); @@ -1210,7 +1206,7 @@ where let permissions = state.borrow_mut::

    (); permissions.check_read(&path, "Deno.readFileSync()")?; - let fs = state.borrow::>(); + let fs = state.borrow::(); let buf = fs.read_file_sync(&path).context("readfile")?; Ok(string_from_utf8_lossy(buf)) @@ -1233,7 +1229,7 @@ where permissions.check_read(&path, "Deno.readFile()")?; let cancel_handle = cancel_rid .and_then(|rid| state.resource_table.get::(rid).ok()); - (state.borrow::>().clone(), cancel_handle) + (state.borrow::().clone(), cancel_handle) }; let fut = fs.read_file_async(path.clone()); diff --git a/ext/fs/sync.rs b/ext/fs/sync.rs new file mode 100644 index 0000000000..c43850c287 --- /dev/null +++ b/ext/fs/sync.rs @@ -0,0 +1,22 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +pub use inner::*; + +#[cfg(feature = "sync_fs")] +mod inner { + #![allow(clippy::disallowed_types)] + pub use std::sync::Arc as MaybeArc; + + pub use core::marker::Send as MaybeSend; + pub use core::marker::Sync as MaybeSync; +} + +#[cfg(not(feature = "sync_fs"))] +mod inner { + pub use std::rc::Rc as MaybeArc; + + pub trait MaybeSync {} + impl MaybeSync for T where T: ?Sized {} + pub trait MaybeSend {} + impl MaybeSend for T where T: ?Sized {} +} diff --git a/ext/node/analyze.rs b/ext/node/analyze.rs index bad0906c5b..6d32c68beb 100644 --- a/ext/node/analyze.rs +++ b/ext/node/analyze.rs @@ -5,7 +5,6 @@ use std::collections::VecDeque; use std::fmt::Write; use std::path::Path; use std::path::PathBuf; -use std::sync::Arc; use deno_core::anyhow::Context; use deno_core::ModuleSpecifier; @@ -13,11 +12,11 @@ use once_cell::sync::Lazy; use deno_core::error::AnyError; +use crate::resolution::NodeResolverRc; use crate::NodeModuleKind; use crate::NodePermissions; use crate::NodeResolutionMode; -use crate::NodeResolver; -use crate::NpmResolver; +use crate::NpmResolverRc; use crate::PackageJson; use crate::PathClean; use crate::NODE_GLOBAL_THIS_NAME; @@ -66,9 +65,9 @@ pub trait CjsEsmCodeAnalyzer { pub struct NodeCodeTranslator { cjs_esm_code_analyzer: TCjsEsmCodeAnalyzer, - fs: Arc, - node_resolver: Arc, - npm_resolver: Arc, + fs: deno_fs::FileSystemRc, + node_resolver: NodeResolverRc, + npm_resolver: NpmResolverRc, } impl @@ -76,9 +75,9 @@ impl { pub fn new( cjs_esm_code_analyzer: TCjsEsmCodeAnalyzer, - fs: Arc, - node_resolver: Arc, - npm_resolver: Arc, + fs: deno_fs::FileSystemRc, + node_resolver: NodeResolverRc, + npm_resolver: NpmResolverRc, ) -> Self { Self { cjs_esm_code_analyzer, diff --git a/ext/node/clippy.toml b/ext/node/clippy.toml index 31d9d7d472..02fd259d09 100644 --- a/ext/node/clippy.toml +++ b/ext/node/clippy.toml @@ -38,3 +38,6 @@ disallowed-methods = [ { path = "std::fs::symlink_metadata", reason = "File system operations should be done using FileSystem trait" }, { path = "std::fs::write", reason = "File system operations should be done using FileSystem trait" }, ] +disallowed-types = [ + { path = "std::sync::Arc", reason = "use deno_fs::sync::MaybeArc instead" }, +] diff --git a/ext/node/lib.rs b/ext/node/lib.rs index 03ec730d84..e01954109a 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -1,11 +1,18 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use std::collections::HashSet; +use std::path::Path; +use std::path::PathBuf; +use std::rc::Rc; + use deno_core::error::AnyError; use deno_core::located_script_name; use deno_core::op; use deno_core::serde_json; use deno_core::JsRuntime; use deno_core::ModuleSpecifier; +use deno_fs::sync::MaybeSend; +use deno_fs::sync::MaybeSync; use deno_npm::resolution::PackageReqNotFoundError; use deno_npm::NpmPackageId; use deno_semver::npm::NpmPackageNv; @@ -13,11 +20,6 @@ use deno_semver::npm::NpmPackageNvReference; use deno_semver::npm::NpmPackageReq; use deno_semver::npm::NpmPackageReqReference; use once_cell::sync::Lazy; -use std::collections::HashSet; -use std::path::Path; -use std::path::PathBuf; -use std::rc::Rc; -use std::sync::Arc; pub mod analyze; pub mod errors; @@ -50,7 +52,10 @@ impl NodePermissions for AllowAllNodePermissions { } } -pub trait NpmResolver: std::fmt::Debug + Send + Sync { +#[allow(clippy::disallowed_types)] +pub type NpmResolverRc = deno_fs::sync::MaybeArc; + +pub trait NpmResolver: std::fmt::Debug + MaybeSend + MaybeSync { /// Resolves an npm package folder path from an npm package referrer. fn resolve_package_folder_from_package( &self, @@ -449,8 +454,8 @@ deno_core::extension!(deno_node, "zlib.ts", ], options = { - maybe_npm_resolver: Option>, - fs: Arc, + maybe_npm_resolver: Option, + fs: deno_fs::FileSystemRc, }, state = |state, options| { let fs = options.fs; diff --git a/ext/node/ops/require.rs b/ext/node/ops/require.rs index 972815995a..9e13681aed 100644 --- a/ext/node/ops/require.rs +++ b/ext/node/ops/require.rs @@ -9,18 +9,18 @@ use deno_core::url::Url; use deno_core::JsRuntimeInspector; use deno_core::ModuleSpecifier; use deno_core::OpState; +use deno_fs::FileSystemRc; use std::cell::RefCell; use std::path::Path; use std::path::PathBuf; use std::rc::Rc; -use std::sync::Arc; use crate::resolution; use crate::NodeModuleKind; use crate::NodePermissions; use crate::NodeResolutionMode; use crate::NodeResolver; -use crate::NpmResolver; +use crate::NpmResolverRc; use crate::PackageJson; fn ensure_read_permission

    ( @@ -30,7 +30,7 @@ fn ensure_read_permission

    ( where P: NodePermissions + 'static, { - let resolver = state.borrow::>(); + let resolver = state.borrow::(); let permissions = state.borrow::

    (); resolver.ensure_read_permission(permissions, file_path) } @@ -93,7 +93,7 @@ pub fn op_require_node_module_paths

    ( where P: NodePermissions + 'static, { - let fs = state.borrow::>(); + let fs = state.borrow::(); // Guarantee that "from" is absolute. let from = deno_core::resolve_path( &from, @@ -189,7 +189,7 @@ fn op_require_resolve_deno_dir( request: String, parent_filename: String, ) -> Option { - let resolver = state.borrow::>(); + let resolver = state.borrow::(); resolver .resolve_package_folder_from_package( &request, @@ -202,7 +202,7 @@ fn op_require_resolve_deno_dir( #[op] fn op_require_is_deno_dir_package(state: &mut OpState, path: String) -> bool { - let resolver = state.borrow::>(); + let resolver = state.borrow::(); resolver.in_npm_package_at_path(&PathBuf::from(path)) } @@ -262,7 +262,7 @@ where { let path = PathBuf::from(path); ensure_read_permission::

    (state, &path)?; - let fs = state.borrow::>(); + let fs = state.borrow::(); if let Ok(metadata) = fs.stat_sync(&path) { if metadata.is_file { return Ok(0); @@ -284,7 +284,7 @@ where { let path = PathBuf::from(request); ensure_read_permission::

    (state, &path)?; - let fs = state.borrow::>(); + let fs = state.borrow::(); let canonicalized_path = deno_core::strip_unc_prefix(fs.realpath_sync(&path)?); Ok(canonicalized_path.to_string_lossy().to_string()) @@ -346,7 +346,7 @@ where if let Some(parent_id) = maybe_parent_id { if parent_id == "" || parent_id == "internal/preload" { - let fs = state.borrow::>(); + let fs = state.borrow::(); if let Ok(cwd) = fs.cwd() { ensure_read_permission::

    (state, &cwd)?; return Ok(Some(cwd.to_string_lossy().to_string())); @@ -429,7 +429,7 @@ where { let file_path = PathBuf::from(file_path); ensure_read_permission::

    (state, &file_path)?; - let fs = state.borrow::>(); + let fs = state.borrow::(); Ok(fs.read_to_string(&file_path)?) } @@ -457,8 +457,8 @@ fn op_require_resolve_exports

    ( where P: NodePermissions + 'static, { - let fs = state.borrow::>(); - let npm_resolver = state.borrow::>(); + let fs = state.borrow::(); + let npm_resolver = state.borrow::(); let node_resolver = state.borrow::>(); let permissions = state.borrow::

    (); diff --git a/ext/node/resolution.rs b/ext/node/resolution.rs index 71b988c194..16720f22c0 100644 --- a/ext/node/resolution.rs +++ b/ext/node/resolution.rs @@ -2,7 +2,6 @@ use std::path::Path; use std::path::PathBuf; -use std::sync::Arc; use deno_core::anyhow::bail; use deno_core::anyhow::Context; @@ -12,6 +11,7 @@ use deno_core::serde_json::Map; use deno_core::serde_json::Value; use deno_core::url::Url; use deno_core::ModuleSpecifier; +use deno_fs::FileSystemRc; use deno_media_type::MediaType; use deno_semver::npm::NpmPackageNv; use deno_semver::npm::NpmPackageNvReference; @@ -20,7 +20,7 @@ use deno_semver::npm::NpmPackageReqReference; use crate::errors; use crate::AllowAllNodePermissions; use crate::NodePermissions; -use crate::NpmResolver; +use crate::NpmResolverRc; use crate::PackageJson; use crate::PathClean; @@ -104,17 +104,17 @@ impl NodeResolution { } } +#[allow(clippy::disallowed_types)] +pub type NodeResolverRc = deno_fs::sync::MaybeArc; + #[derive(Debug)] pub struct NodeResolver { - fs: Arc, - npm_resolver: Arc, + fs: FileSystemRc, + npm_resolver: NpmResolverRc, } impl NodeResolver { - pub fn new( - fs: Arc, - npm_resolver: Arc, - ) -> Self { + pub fn new(fs: FileSystemRc, npm_resolver: NpmResolverRc) -> Self { Self { fs, npm_resolver } } diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index 8618714dca..9f9c65af1d 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -41,7 +41,7 @@ deno_core.workspace = true deno_crypto.workspace = true deno_fetch.workspace = true deno_ffi.workspace = true -deno_fs.workspace = true +deno_fs = { workspace = true, features = ["sync_fs"] } deno_http.workspace = true deno_io.workspace = true deno_net.workspace = true @@ -67,7 +67,7 @@ deno_core.workspace = true deno_crypto.workspace = true deno_fetch.workspace = true deno_ffi.workspace = true -deno_fs.workspace = true +deno_fs = { workspace = true, features = ["sync_fs"] } deno_http.workspace = true deno_io.workspace = true deno_kv.workspace = true From 7da8b1d9bbc65a1794df4850954d22e654d49aee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Mon, 8 May 2023 20:42:34 +0200 Subject: [PATCH 132/320] Revert "perf(core): use jemalloc for V8 array buffer allocator (#18875)" (#19046) This reverts commit 022aae9854bed6219d75eeb82fcf46652c21050d. --- Cargo.lock | 1 - core/Cargo.toml | 3 --- core/runtime.rs | 64 ------------------------------------------------- 3 files changed, 68 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c07936bd1a..cee7665d72 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -876,7 +876,6 @@ dependencies = [ "serde_v8", "smallvec", "sourcemap", - "tikv-jemalloc-sys", "tokio", "url", "v8", diff --git a/core/Cargo.toml b/core/Cargo.toml index 77a1ca1a21..0bdac5703f 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -39,9 +39,6 @@ sourcemap = "6.1" url.workspace = true v8.workspace = true -[target.'cfg(not(target_env = "msvc"))'.dependencies] -tikv-jemalloc-sys.workspace = true - [[example]] name = "http_bench_json_ops" path = "examples/http_bench_json_ops/main.rs" diff --git a/core/runtime.rs b/core/runtime.rs index bb77bb25a8..fb4716e7ca 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -72,48 +72,6 @@ struct IsolateAllocations { Option<(Box>, v8::NearHeapLimitCallback)>, } -/// A custom allocator for array buffers for V8. It uses `jemalloc` so it's -/// not available on Windows. -#[cfg(not(target_env = "msvc"))] -mod custom_allocator { - use std::ffi::c_void; - - pub struct RustAllocator; - - pub unsafe extern "C" fn allocate( - _alloc: &RustAllocator, - n: usize, - ) -> *mut c_void { - tikv_jemalloc_sys::calloc(1, n) - } - - pub unsafe extern "C" fn allocate_uninitialized( - _alloc: &RustAllocator, - n: usize, - ) -> *mut c_void { - tikv_jemalloc_sys::malloc(n) - } - - pub unsafe extern "C" fn free( - _alloc: &RustAllocator, - data: *mut c_void, - _n: usize, - ) { - tikv_jemalloc_sys::free(data) - } - - pub unsafe extern "C" fn reallocate( - _alloc: &RustAllocator, - prev: *mut c_void, - _oldlen: usize, - newlen: usize, - ) -> *mut c_void { - tikv_jemalloc_sys::realloc(prev, newlen) - } - - pub unsafe extern "C" fn drop(_alloc: *const RustAllocator) {} -} - /// A single execution context of JavaScript. Corresponds roughly to the "Web /// Worker" concept in the DOM. A JsRuntime is a Future that can be used with /// an event loop (Tokio, async_std). @@ -435,20 +393,6 @@ impl JsRuntime { } isolate } else { - #[cfg(not(target_env = "msvc"))] - let vtable: &'static v8::RustAllocatorVtable< - custom_allocator::RustAllocator, - > = &v8::RustAllocatorVtable { - allocate: custom_allocator::allocate, - allocate_uninitialized: custom_allocator::allocate_uninitialized, - free: custom_allocator::free, - reallocate: custom_allocator::reallocate, - drop: custom_allocator::drop, - }; - #[cfg(not(target_env = "msvc"))] - let allocator = Arc::new(custom_allocator::RustAllocator); - - #[allow(unused_mut)] let mut params = options .create_params .take() @@ -460,14 +404,6 @@ impl JsRuntime { }) .external_references(&**refs); - #[cfg(not(target_env = "msvc"))] - // SAFETY: We are leaking the created `allocator` variable so we're sure - // it will outlive the created isolate. We also made sure that the vtable - // is correct. - let mut params = params.array_buffer_allocator(unsafe { - v8::new_rust_allocator(Arc::into_raw(allocator), vtable) - }); - if let Some(snapshot) = options.startup_snapshot { params = match snapshot { Snapshot::Static(data) => params.snapshot_blob(data), From 71db518b7625acb106d93643619f648aed8222cc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Mon, 8 May 2023 20:59:38 +0200 Subject: [PATCH 133/320] refactor(core): verify there are no ops with duplicate names (#19047) This commit adds a "debug build" only check that verifies on startup that there are no duplicate ops (ie. the op names are unique). --- core/runtime.rs | 59 +++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 57 insertions(+), 2 deletions(-) diff --git a/core/runtime.rs b/core/runtime.rs index fb4716e7ca..bd79ca6cc0 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -769,7 +769,7 @@ impl JsRuntime { let macroware = move |d| middleware.iter().fold(d, |d, m| m(d)); // Flatten ops, apply middlware & override disabled ops - exts + let ops: Vec<_> = exts .iter_mut() .filter_map(|e| e.init_ops()) .flatten() @@ -777,7 +777,37 @@ impl JsRuntime { name: d.name, ..macroware(d) }) - .collect() + .collect(); + + // In debug build verify there are no duplicate ops. + #[cfg(debug_assertions)] + { + let mut count_by_name = HashMap::new(); + + for op in ops.iter() { + count_by_name + .entry(&op.name) + .or_insert(vec![]) + .push(op.name.to_string()); + } + + let mut duplicate_ops = vec![]; + for (op_name, _count) in + count_by_name.iter().filter(|(_k, v)| v.len() > 1) + { + duplicate_ops.push(op_name.to_string()); + } + if !duplicate_ops.is_empty() { + let mut msg = "Found ops with duplicate names:\n".to_string(); + for op_name in duplicate_ops { + msg.push_str(&format!(" - {}\n", op_name)); + } + msg.push_str("Op names need to be unique."); + panic!("{}", msg); + } + } + + ops } /// Initializes ops of provided Extensions @@ -4787,4 +4817,29 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { "Cannot load extension module from external code" ); } + + #[cfg(debug_assertions)] + #[test] + #[should_panic(expected = "Found ops with duplicate names:")] + fn duplicate_op_names() { + mod a { + use super::*; + + #[op] + fn op_test() -> Result { + Ok(String::from("Test")) + } + } + + #[op] + fn op_test() -> Result { + Ok(String::from("Test")) + } + + deno_core::extension!(test_ext, ops = [a::op_test, op_test]); + JsRuntime::new(RuntimeOptions { + extensions: vec![test_ext::init_ops()], + ..Default::default() + }); + } } From e021070a2a564b2e972851360265f2466f7e4b22 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Mon, 8 May 2023 21:37:29 +0200 Subject: [PATCH 134/320] refactor(core): make sure to always set embedder wrapper offset (#19048) --- core/runtime.rs | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/core/runtime.rs b/core/runtime.rs index bd79ca6cc0..3c2e6f3e11 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -396,12 +396,11 @@ impl JsRuntime { let mut params = options .create_params .take() - .unwrap_or_else(|| { - v8::CreateParams::default().embedder_wrapper_type_info_offsets( - V8_WRAPPER_TYPE_INDEX, - V8_WRAPPER_OBJECT_INDEX, - ) - }) + .unwrap_or_default() + .embedder_wrapper_type_info_offsets( + V8_WRAPPER_TYPE_INDEX, + V8_WRAPPER_OBJECT_INDEX, + ) .external_references(&**refs); if let Some(snapshot) = options.startup_snapshot { From 1f9d47b174a148dcfef2c86cfabd51b0b75f0dc7 Mon Sep 17 00:00:00 2001 From: Luca Casonato Date: Mon, 8 May 2023 23:07:45 +0200 Subject: [PATCH 135/320] refactor: prefix ops w/ crate they are defined in (#19044) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Bartek Iwańczuk --- ext/crypto/00_crypto.js | 28 ++++---- ext/crypto/ed25519.rs | 31 ++++++--- ext/crypto/lib.rs | 28 ++++---- ext/crypto/x25519.rs | 20 ++++-- ext/fs/30_fs.js | 146 +++++++++++++++++++++------------------- ext/fs/lib.rs | 122 ++++++++++++++++----------------- ext/fs/ops.rs | 128 ++++++++++++++++++----------------- ext/http/00_serve.js | 94 +++++++++++++------------- ext/http/http_next.rs | 38 ++++++----- ext/http/lib.rs | 40 +++++------ 10 files changed, 358 insertions(+), 317 deletions(-) diff --git a/ext/crypto/00_crypto.js b/ext/crypto/00_crypto.js index 1008f4cf6d..5253c5784c 100644 --- a/ext/crypto/00_crypto.js +++ b/ext/crypto/00_crypto.js @@ -884,7 +884,7 @@ class SubtleCrypto { // https://briansmith.org/rustdoc/src/ring/ec/curve25519/ed25519/signing.rs.html#260 const SIGNATURE_LEN = 32 * 2; // ELEM_LEN + SCALAR_LEN const signature = new Uint8Array(SIGNATURE_LEN); - if (!ops.op_sign_ed25519(keyData, data, signature)) { + if (!ops.op_crypto_sign_ed25519(keyData, data, signature)) { throw new DOMException( "Failed to sign", "OperationError", @@ -1363,7 +1363,7 @@ class SubtleCrypto { ); } - return ops.op_verify_ed25519(keyData, data, signature); + return ops.op_crypto_verify_ed25519(keyData, data, signature); } } @@ -1997,7 +1997,7 @@ async function generateKey(normalizedAlgorithm, extractable, usages) { } const privateKeyData = new Uint8Array(32); const publicKeyData = new Uint8Array(32); - ops.op_generate_x25519_keypair(privateKeyData, publicKeyData); + ops.op_crypto_generate_x25519_keypair(privateKeyData, publicKeyData); const handle = {}; WeakMapPrototypeSet(KEY_STORE, handle, privateKeyData); @@ -2042,7 +2042,7 @@ async function generateKey(normalizedAlgorithm, extractable, usages) { const privateKeyData = new Uint8Array(ED25519_SEED_LEN); const publicKeyData = new Uint8Array(ED25519_PUBLIC_KEY_LEN); if ( - !ops.op_generate_ed25519_keypair(privateKeyData, publicKeyData) + !ops.op_crypto_generate_ed25519_keypair(privateKeyData, publicKeyData) ) { throw new DOMException("Failed to generate key", "OperationError"); } @@ -2179,7 +2179,7 @@ function importKeyEd25519( } const publicKeyData = new Uint8Array(32); - if (!ops.op_import_spki_ed25519(keyData, publicKeyData)) { + if (!ops.op_crypto_import_spki_ed25519(keyData, publicKeyData)) { throw new DOMException("Invalid key data", "DataError"); } @@ -2210,7 +2210,7 @@ function importKeyEd25519( } const privateKeyData = new Uint8Array(32); - if (!ops.op_import_pkcs8_ed25519(keyData, privateKeyData)) { + if (!ops.op_crypto_import_pkcs8_ed25519(keyData, privateKeyData)) { throw new DOMException("Invalid key data", "DataError"); } @@ -2397,7 +2397,7 @@ function importKeyX25519( } const publicKeyData = new Uint8Array(32); - if (!ops.op_import_spki_x25519(keyData, publicKeyData)) { + if (!ops.op_crypto_import_spki_x25519(keyData, publicKeyData)) { throw new DOMException("Invalid key data", "DataError"); } @@ -2428,7 +2428,7 @@ function importKeyX25519( } const privateKeyData = new Uint8Array(32); - if (!ops.op_import_pkcs8_x25519(keyData, privateKeyData)) { + if (!ops.op_crypto_import_pkcs8_x25519(keyData, privateKeyData)) { throw new DOMException("Invalid key data", "DataError"); } @@ -4055,7 +4055,7 @@ function exportKeyEd25519(format, key, innerKey) { ); } - const spkiDer = ops.op_export_spki_ed25519(innerKey); + const spkiDer = ops.op_crypto_export_spki_ed25519(innerKey); return TypedArrayPrototypeGetBuffer(spkiDer); } case "pkcs8": { @@ -4067,7 +4067,7 @@ function exportKeyEd25519(format, key, innerKey) { ); } - const pkcs8Der = ops.op_export_pkcs8_ed25519( + const pkcs8Der = ops.op_crypto_export_pkcs8_ed25519( new Uint8Array([0x04, 0x22, ...new SafeArrayIterator(innerKey)]), ); pkcs8Der[15] = 0x20; @@ -4075,7 +4075,7 @@ function exportKeyEd25519(format, key, innerKey) { } case "jwk": { const x = key[_type] === "private" - ? ops.op_jwk_x_ed25519(innerKey) + ? ops.op_crypto_jwk_x_ed25519(innerKey) : ops.op_crypto_base64url_encode(innerKey); const jwk = { kty: "OKP", @@ -4118,7 +4118,7 @@ function exportKeyX25519(format, key, innerKey) { ); } - const spkiDer = ops.op_export_spki_x25519(innerKey); + const spkiDer = ops.op_crypto_export_spki_x25519(innerKey); return TypedArrayPrototypeGetBuffer(spkiDer); } case "pkcs8": { @@ -4130,7 +4130,7 @@ function exportKeyX25519(format, key, innerKey) { ); } - const pkcs8Der = ops.op_export_pkcs8_x25519( + const pkcs8Der = ops.op_crypto_export_pkcs8_x25519( new Uint8Array([0x04, 0x22, ...new SafeArrayIterator(innerKey)]), ); pkcs8Der[15] = 0x20; @@ -4476,7 +4476,7 @@ async function deriveBits(normalizedAlgorithm, baseKey, length) { const u = WeakMapPrototypeGet(KEY_STORE, uHandle); const secret = new Uint8Array(32); - const isIdentity = ops.op_derive_bits_x25519(k, u, secret); + const isIdentity = ops.op_crypto_derive_bits_x25519(k, u, secret); // 6. if (isIdentity) { diff --git a/ext/crypto/ed25519.rs b/ext/crypto/ed25519.rs index 898366bbc1..784583c6b8 100644 --- a/ext/crypto/ed25519.rs +++ b/ext/crypto/ed25519.rs @@ -12,7 +12,10 @@ use spki::der::Decode; use spki::der::Encode; #[op(fast)] -pub fn op_generate_ed25519_keypair(pkey: &mut [u8], pubkey: &mut [u8]) -> bool { +pub fn op_crypto_generate_ed25519_keypair( + pkey: &mut [u8], + pubkey: &mut [u8], +) -> bool { let mut rng = OsRng; rng.fill_bytes(pkey); @@ -25,7 +28,11 @@ pub fn op_generate_ed25519_keypair(pkey: &mut [u8], pubkey: &mut [u8]) -> bool { } #[op(fast)] -pub fn op_sign_ed25519(key: &[u8], data: &[u8], signature: &mut [u8]) -> bool { +pub fn op_crypto_sign_ed25519( + key: &[u8], + data: &[u8], + signature: &mut [u8], +) -> bool { let pair = match Ed25519KeyPair::from_seed_unchecked(key) { Ok(p) => p, Err(_) => return false, @@ -35,7 +42,11 @@ pub fn op_sign_ed25519(key: &[u8], data: &[u8], signature: &mut [u8]) -> bool { } #[op(fast)] -pub fn op_verify_ed25519(pubkey: &[u8], data: &[u8], signature: &[u8]) -> bool { +pub fn op_crypto_verify_ed25519( + pubkey: &[u8], + data: &[u8], + signature: &[u8], +) -> bool { ring::signature::UnparsedPublicKey::new(&ring::signature::ED25519, pubkey) .verify(data, signature) .is_ok() @@ -46,7 +57,7 @@ pub const ED25519_OID: const_oid::ObjectIdentifier = const_oid::ObjectIdentifier::new_unwrap("1.3.101.112"); #[op(fast)] -pub fn op_import_spki_ed25519(key_data: &[u8], out: &mut [u8]) -> bool { +pub fn op_crypto_import_spki_ed25519(key_data: &[u8], out: &mut [u8]) -> bool { // 2-3. let pk_info = match spki::SubjectPublicKeyInfo::from_der(key_data) { Ok(pk_info) => pk_info, @@ -66,7 +77,7 @@ pub fn op_import_spki_ed25519(key_data: &[u8], out: &mut [u8]) -> bool { } #[op(fast)] -pub fn op_import_pkcs8_ed25519(key_data: &[u8], out: &mut [u8]) -> bool { +pub fn op_crypto_import_pkcs8_ed25519(key_data: &[u8], out: &mut [u8]) -> bool { // 2-3. // This should probably use OneAsymmetricKey instead let pk_info = match PrivateKeyInfo::from_der(key_data) { @@ -92,7 +103,9 @@ pub fn op_import_pkcs8_ed25519(key_data: &[u8], out: &mut [u8]) -> bool { } #[op] -pub fn op_export_spki_ed25519(pubkey: &[u8]) -> Result { +pub fn op_crypto_export_spki_ed25519( + pubkey: &[u8], +) -> Result { let key_info = spki::SubjectPublicKeyInfo { algorithm: spki::AlgorithmIdentifier { // id-Ed25519 @@ -105,7 +118,9 @@ pub fn op_export_spki_ed25519(pubkey: &[u8]) -> Result { } #[op] -pub fn op_export_pkcs8_ed25519(pkey: &[u8]) -> Result { +pub fn op_crypto_export_pkcs8_ed25519( + pkey: &[u8], +) -> Result { // This should probably use OneAsymmetricKey instead let pk_info = rsa::pkcs8::PrivateKeyInfo { public_key: None, @@ -123,7 +138,7 @@ pub fn op_export_pkcs8_ed25519(pkey: &[u8]) -> Result { // 'x' from Section 2 of RFC 8037 // https://www.rfc-editor.org/rfc/rfc8037#section-2 #[op] -pub fn op_jwk_x_ed25519(pkey: &[u8]) -> Result { +pub fn op_crypto_jwk_x_ed25519(pkey: &[u8]) -> Result { let pair = Ed25519KeyPair::from_seed_unchecked(pkey)?; Ok(base64::encode_config( pair.public_key().as_ref(), diff --git a/ext/crypto/lib.rs b/ext/crypto/lib.rs index 6056b02a45..695cc3abdf 100644 --- a/ext/crypto/lib.rs +++ b/ext/crypto/lib.rs @@ -88,20 +88,20 @@ deno_core::extension!(deno_crypto, op_crypto_unwrap_key, op_crypto_base64url_decode, op_crypto_base64url_encode, - x25519::op_generate_x25519_keypair, - x25519::op_derive_bits_x25519, - x25519::op_import_spki_x25519, - x25519::op_import_pkcs8_x25519, - ed25519::op_generate_ed25519_keypair, - ed25519::op_import_spki_ed25519, - ed25519::op_import_pkcs8_ed25519, - ed25519::op_sign_ed25519, - ed25519::op_verify_ed25519, - ed25519::op_export_spki_ed25519, - ed25519::op_export_pkcs8_ed25519, - ed25519::op_jwk_x_ed25519, - x25519::op_export_spki_x25519, - x25519::op_export_pkcs8_x25519, + x25519::op_crypto_generate_x25519_keypair, + x25519::op_crypto_derive_bits_x25519, + x25519::op_crypto_import_spki_x25519, + x25519::op_crypto_import_pkcs8_x25519, + ed25519::op_crypto_generate_ed25519_keypair, + ed25519::op_crypto_import_spki_ed25519, + ed25519::op_crypto_import_pkcs8_ed25519, + ed25519::op_crypto_sign_ed25519, + ed25519::op_crypto_verify_ed25519, + ed25519::op_crypto_export_spki_ed25519, + ed25519::op_crypto_export_pkcs8_ed25519, + ed25519::op_crypto_jwk_x_ed25519, + x25519::op_crypto_export_spki_x25519, + x25519::op_crypto_export_pkcs8_x25519, ], esm = [ "00_crypto.js", "01_webidl.js" ], options = { diff --git a/ext/crypto/x25519.rs b/ext/crypto/x25519.rs index 0ecdf4ddc0..99914e14e5 100644 --- a/ext/crypto/x25519.rs +++ b/ext/crypto/x25519.rs @@ -12,7 +12,7 @@ use spki::der::Decode; use spki::der::Encode; #[op(fast)] -pub fn op_generate_x25519_keypair(pkey: &mut [u8], pubkey: &mut [u8]) { +pub fn op_crypto_generate_x25519_keypair(pkey: &mut [u8], pubkey: &mut [u8]) { // u-coordinate of the base point. const X25519_BASEPOINT_BYTES: [u8; 32] = [ 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, @@ -32,7 +32,11 @@ pub fn op_generate_x25519_keypair(pkey: &mut [u8], pubkey: &mut [u8]) { const MONTGOMERY_IDENTITY: MontgomeryPoint = MontgomeryPoint([0; 32]); #[op(fast)] -pub fn op_derive_bits_x25519(k: &[u8], u: &[u8], secret: &mut [u8]) -> bool { +pub fn op_crypto_derive_bits_x25519( + k: &[u8], + u: &[u8], + secret: &mut [u8], +) -> bool { let k: [u8; 32] = k.try_into().expect("Expected byteLength 32"); let u: [u8; 32] = u.try_into().expect("Expected byteLength 32"); let sh_sec = x25519_dalek::x25519(k, u); @@ -49,7 +53,7 @@ pub const X25519_OID: const_oid::ObjectIdentifier = const_oid::ObjectIdentifier::new_unwrap("1.3.101.110"); #[op(fast)] -pub fn op_import_spki_x25519(key_data: &[u8], out: &mut [u8]) -> bool { +pub fn op_crypto_import_spki_x25519(key_data: &[u8], out: &mut [u8]) -> bool { // 2-3. let pk_info = match spki::SubjectPublicKeyInfo::from_der(key_data) { Ok(pk_info) => pk_info, @@ -69,7 +73,7 @@ pub fn op_import_spki_x25519(key_data: &[u8], out: &mut [u8]) -> bool { } #[op(fast)] -pub fn op_import_pkcs8_x25519(key_data: &[u8], out: &mut [u8]) -> bool { +pub fn op_crypto_import_pkcs8_x25519(key_data: &[u8], out: &mut [u8]) -> bool { // 2-3. // This should probably use OneAsymmetricKey instead let pk_info = match PrivateKeyInfo::from_der(key_data) { @@ -95,7 +99,9 @@ pub fn op_import_pkcs8_x25519(key_data: &[u8], out: &mut [u8]) -> bool { } #[op] -pub fn op_export_spki_x25519(pubkey: &[u8]) -> Result { +pub fn op_crypto_export_spki_x25519( + pubkey: &[u8], +) -> Result { let key_info = spki::SubjectPublicKeyInfo { algorithm: spki::AlgorithmIdentifier { // id-X25519 @@ -108,7 +114,9 @@ pub fn op_export_spki_x25519(pubkey: &[u8]) -> Result { } #[op] -pub fn op_export_pkcs8_x25519(pkey: &[u8]) -> Result { +pub fn op_crypto_export_pkcs8_x25519( + pkey: &[u8], +) -> Result { // This should probably use OneAsymmetricKey instead let pk_info = rsa::pkcs8::PrivateKeyInfo { public_key: None, diff --git a/ext/fs/30_fs.js b/ext/fs/30_fs.js index 70cfcee6ef..dbe064ab8a 100644 --- a/ext/fs/30_fs.js +++ b/ext/fs/30_fs.js @@ -5,17 +5,17 @@ const core = globalThis.Deno.core; const ops = core.ops; const { - op_chmod_async, - op_ftruncate_async, - op_truncate_async, - op_link_async, - op_flock_async, + op_fs_chmod_async, + op_fs_ftruncate_async, + op_fs_truncate_async, + op_fs_link_async, + op_fs_flock_async, } = Deno.core.generateAsyncOpHandler( - "op_chmod_async", - "op_ftruncate_async", - "op_truncate_async", - "op_link_async", - "op_flock_async", + "op_fs_chmod_async", + "op_fs_ftruncate_async", + "op_fs_truncate_async", + "op_fs_link_async", + "op_fs_flock_async", ); const primordials = globalThis.__bootstrap.primordials; const { @@ -45,11 +45,11 @@ import { import { pathFromURL } from "ext:deno_web/00_infra.js"; function chmodSync(path, mode) { - ops.op_chmod_sync(pathFromURL(path), mode); + ops.op_fs_chmod_sync(pathFromURL(path), mode); } async function chmod(path, mode) { - await op_chmod_async(pathFromURL(path), mode); + await op_fs_chmod_async(pathFromURL(path), mode); } function chownSync( @@ -57,7 +57,7 @@ function chownSync( uid, gid, ) { - ops.op_chown_sync(pathFromURL(path), uid, gid); + ops.op_fs_chown_sync(pathFromURL(path), uid, gid); } async function chown( @@ -66,7 +66,7 @@ async function chown( gid, ) { await core.opAsync( - "op_chown_async", + "op_fs_chown_async", pathFromURL(path), uid, gid, @@ -77,7 +77,7 @@ function copyFileSync( fromPath, toPath, ) { - ops.op_copy_file_sync( + ops.op_fs_copy_file_sync( pathFromURL(fromPath), pathFromURL(toPath), ); @@ -88,27 +88,31 @@ async function copyFile( toPath, ) { await core.opAsync( - "op_copy_file_async", + "op_fs_copy_file_async", pathFromURL(fromPath), pathFromURL(toPath), ); } function cwd() { - return ops.op_cwd(); + return ops.op_fs_cwd(); } function chdir(directory) { - ops.op_chdir(pathFromURL(directory)); + ops.op_fs_chdir(pathFromURL(directory)); } function makeTempDirSync(options = {}) { - return ops.op_make_temp_dir_sync(options.dir, options.prefix, options.suffix); + return ops.op_fs_make_temp_dir_sync( + options.dir, + options.prefix, + options.suffix, + ); } function makeTempDir(options = {}) { return core.opAsync( - "op_make_temp_dir_async", + "op_fs_make_temp_dir_async", options.dir, options.prefix, options.suffix, @@ -116,7 +120,7 @@ function makeTempDir(options = {}) { } function makeTempFileSync(options = {}) { - return ops.op_make_temp_file_sync( + return ops.op_fs_make_temp_file_sync( options.dir, options.prefix, options.suffix, @@ -125,7 +129,7 @@ function makeTempFileSync(options = {}) { function makeTempFile(options = {}) { return core.opAsync( - "op_make_temp_file_async", + "op_fs_make_temp_file_async", options.dir, options.prefix, options.suffix, @@ -133,7 +137,7 @@ function makeTempFile(options = {}) { } function mkdirSync(path, options) { - ops.op_mkdir_sync( + ops.op_fs_mkdir_sync( pathFromURL(path), options?.recursive ?? false, options?.mode, @@ -142,7 +146,7 @@ function mkdirSync(path, options) { async function mkdir(path, options) { await core.opAsync( - "op_mkdir_async", + "op_fs_mkdir_async", pathFromURL(path), options?.recursive ?? false, options?.mode, @@ -150,14 +154,14 @@ async function mkdir(path, options) { } function readDirSync(path) { - return ops.op_read_dir_sync(pathFromURL(path))[ + return ops.op_fs_read_dir_sync(pathFromURL(path))[ SymbolIterator ](); } function readDir(path) { const array = core.opAsync( - "op_read_dir_async", + "op_fs_read_dir_async", pathFromURL(path), ); return { @@ -171,26 +175,26 @@ function readDir(path) { } function readLinkSync(path) { - return ops.op_read_link_sync(pathFromURL(path)); + return ops.op_fs_read_link_sync(pathFromURL(path)); } function readLink(path) { - return core.opAsync("op_read_link_async", pathFromURL(path)); + return core.opAsync("op_fs_read_link_async", pathFromURL(path)); } function realPathSync(path) { - return ops.op_realpath_sync(pathFromURL(path)); + return ops.op_fs_realpath_sync(pathFromURL(path)); } function realPath(path) { - return core.opAsync("op_realpath_async", pathFromURL(path)); + return core.opAsync("op_fs_realpath_async", pathFromURL(path)); } function removeSync( path, options = {}, ) { - ops.op_remove_sync( + ops.op_fs_remove_sync( pathFromURL(path), !!options.recursive, ); @@ -201,14 +205,14 @@ async function remove( options = {}, ) { await core.opAsync( - "op_remove_async", + "op_fs_remove_async", pathFromURL(path), !!options.recursive, ); } function renameSync(oldpath, newpath) { - ops.op_rename_sync( + ops.op_fs_rename_sync( pathFromURL(oldpath), pathFromURL(newpath), ); @@ -216,7 +220,7 @@ function renameSync(oldpath, newpath) { async function rename(oldpath, newpath) { await core.opAsync( - "op_rename_async", + "op_fs_rename_async", pathFromURL(oldpath), pathFromURL(newpath), ); @@ -322,31 +326,31 @@ function parseFileInfo(response) { } function fstatSync(rid) { - ops.op_fstat_sync(rid, statBuf); + ops.op_fs_fstat_sync(rid, statBuf); return statStruct(statBuf); } async function fstat(rid) { - return parseFileInfo(await core.opAsync("op_fstat_async", rid)); + return parseFileInfo(await core.opAsync("op_fs_fstat_async", rid)); } async function lstat(path) { - const res = await core.opAsync("op_lstat_async", pathFromURL(path)); + const res = await core.opAsync("op_fs_lstat_async", pathFromURL(path)); return parseFileInfo(res); } function lstatSync(path) { - ops.op_lstat_sync(pathFromURL(path), statBuf); + ops.op_fs_lstat_sync(pathFromURL(path), statBuf); return statStruct(statBuf); } async function stat(path) { - const res = await core.opAsync("op_stat_async", pathFromURL(path)); + const res = await core.opAsync("op_fs_stat_async", pathFromURL(path)); return parseFileInfo(res); } function statSync(path) { - ops.op_stat_sync(pathFromURL(path), statBuf); + ops.op_fs_stat_sync(pathFromURL(path), statBuf); return statStruct(statBuf); } @@ -358,31 +362,31 @@ function coerceLen(len) { } function ftruncateSync(rid, len) { - ops.op_ftruncate_sync(rid, coerceLen(len)); + ops.op_fs_ftruncate_sync(rid, coerceLen(len)); } async function ftruncate(rid, len) { - await op_ftruncate_async(rid, coerceLen(len)); + await op_fs_ftruncate_async(rid, coerceLen(len)); } function truncateSync(path, len) { - ops.op_truncate_sync(path, coerceLen(len)); + ops.op_fs_truncate_sync(path, coerceLen(len)); } async function truncate(path, len) { - await op_truncate_async(path, coerceLen(len)); + await op_fs_truncate_async(path, coerceLen(len)); } function umask(mask) { - return ops.op_umask(mask); + return ops.op_fs_umask(mask); } function linkSync(oldpath, newpath) { - ops.op_link_sync(oldpath, newpath); + ops.op_fs_link_sync(oldpath, newpath); } async function link(oldpath, newpath) { - await op_link_async(oldpath, newpath); + await op_fs_link_async(oldpath, newpath); } function toUnixTimeFromEpoch(value) { @@ -413,7 +417,7 @@ function futimeSync( ) { const { 0: atimeSec, 1: atimeNsec } = toUnixTimeFromEpoch(atime); const { 0: mtimeSec, 1: mtimeNsec } = toUnixTimeFromEpoch(mtime); - ops.op_futime_sync(rid, atimeSec, atimeNsec, mtimeSec, mtimeNsec); + ops.op_fs_futime_sync(rid, atimeSec, atimeNsec, mtimeSec, mtimeNsec); } async function futime( @@ -424,7 +428,7 @@ async function futime( const { 0: atimeSec, 1: atimeNsec } = toUnixTimeFromEpoch(atime); const { 0: mtimeSec, 1: mtimeNsec } = toUnixTimeFromEpoch(mtime); await core.opAsync( - "op_futime_async", + "op_fs_futime_async", rid, atimeSec, atimeNsec, @@ -440,7 +444,7 @@ function utimeSync( ) { const { 0: atimeSec, 1: atimeNsec } = toUnixTimeFromEpoch(atime); const { 0: mtimeSec, 1: mtimeNsec } = toUnixTimeFromEpoch(mtime); - ops.op_utime_sync( + ops.op_fs_utime_sync( pathFromURL(path), atimeSec, atimeNsec, @@ -457,7 +461,7 @@ async function utime( const { 0: atimeSec, 1: atimeNsec } = toUnixTimeFromEpoch(atime); const { 0: mtimeSec, 1: mtimeNsec } = toUnixTimeFromEpoch(mtime); await core.opAsync( - "op_utime_async", + "op_fs_utime_async", pathFromURL(path), atimeSec, atimeNsec, @@ -471,7 +475,7 @@ function symlinkSync( newpath, options, ) { - ops.op_symlink_sync( + ops.op_fs_symlink_sync( pathFromURL(oldpath), pathFromURL(newpath), options?.type, @@ -484,7 +488,7 @@ async function symlink( options, ) { await core.opAsync( - "op_symlink_async", + "op_fs_symlink_async", pathFromURL(oldpath), pathFromURL(newpath), options?.type, @@ -492,35 +496,35 @@ async function symlink( } function fdatasyncSync(rid) { - ops.op_fdatasync_sync(rid); + ops.op_fs_fdatasync_sync(rid); } async function fdatasync(rid) { - await core.opAsync("op_fdatasync_async", rid); + await core.opAsync("op_fs_fdatasync_async", rid); } function fsyncSync(rid) { - ops.op_fsync_sync(rid); + ops.op_fs_fsync_sync(rid); } async function fsync(rid) { - await core.opAsync("op_fsync_async", rid); + await core.opAsync("op_fs_fsync_async", rid); } function flockSync(rid, exclusive) { - ops.op_flock_sync(rid, exclusive === true); + ops.op_fs_flock_sync(rid, exclusive === true); } async function flock(rid, exclusive) { - await op_flock_async(rid, exclusive === true); + await op_fs_flock_async(rid, exclusive === true); } function funlockSync(rid) { - ops.op_funlock_sync(rid); + ops.op_fs_funlock_sync(rid); } async function funlock(rid) { - await core.opAsync("op_funlock_async", rid); + await core.opAsync("op_fs_funlock_async", rid); } function seekSync( @@ -528,7 +532,7 @@ function seekSync( offset, whence, ) { - return ops.op_seek_sync(rid, offset, whence); + return ops.op_fs_seek_sync(rid, offset, whence); } function seek( @@ -536,7 +540,7 @@ function seek( offset, whence, ) { - return core.opAsync("op_seek_async", rid, offset, whence); + return core.opAsync("op_fs_seek_async", rid, offset, whence); } function openSync( @@ -544,7 +548,7 @@ function openSync( options, ) { if (options) checkOpenOptions(options); - const rid = ops.op_open_sync( + const rid = ops.op_fs_open_sync( pathFromURL(path), options, ); @@ -558,7 +562,7 @@ async function open( ) { if (options) checkOpenOptions(options); const rid = await core.opAsync( - "op_open_async", + "op_fs_open_async", pathFromURL(path), options, ); @@ -685,7 +689,7 @@ function checkOpenOptions(options) { const File = FsFile; function readFileSync(path) { - return ops.op_read_file_sync(pathFromURL(path)); + return ops.op_fs_read_file_sync(pathFromURL(path)); } async function readFile(path, options) { @@ -700,7 +704,7 @@ async function readFile(path, options) { try { const read = await core.opAsync( - "op_read_file_async", + "op_fs_read_file_async", pathFromURL(path), cancelRid, ); @@ -716,7 +720,7 @@ async function readFile(path, options) { } function readTextFileSync(path) { - return ops.op_read_file_text_sync(pathFromURL(path)); + return ops.op_fs_read_file_text_sync(pathFromURL(path)); } async function readTextFile(path, options) { @@ -731,7 +735,7 @@ async function readTextFile(path, options) { try { const read = await core.opAsync( - "op_read_file_text_async", + "op_fs_read_file_text_async", pathFromURL(path), cancelRid, ); @@ -752,7 +756,7 @@ function writeFileSync( options = {}, ) { options.signal?.throwIfAborted(); - ops.op_write_file_sync( + ops.op_fs_write_file_sync( pathFromURL(path), options.mode, options.append ?? false, @@ -789,7 +793,7 @@ async function writeFile( }); } else { await core.opAsync( - "op_write_file_async", + "op_fs_write_file_async", pathFromURL(path), options.mode, options.append ?? false, diff --git a/ext/fs/lib.rs b/ext/fs/lib.rs index fb0a6ffedb..7ba6cd7cac 100644 --- a/ext/fs/lib.rs +++ b/ext/fs/lib.rs @@ -88,69 +88,69 @@ deno_core::extension!(deno_fs, deps = [ deno_web ], parameters = [P: FsPermissions], ops = [ - op_cwd

    , - op_umask, - op_chdir

    , + op_fs_cwd

    , + op_fs_umask, + op_fs_chdir

    , - op_open_sync

    , - op_open_async

    , - op_mkdir_sync

    , - op_mkdir_async

    , - op_chmod_sync

    , - op_chmod_async

    , - op_chown_sync

    , - op_chown_async

    , - op_remove_sync

    , - op_remove_async

    , - op_copy_file_sync

    , - op_copy_file_async

    , - op_stat_sync

    , - op_stat_async

    , - op_lstat_sync

    , - op_lstat_async

    , - op_realpath_sync

    , - op_realpath_async

    , - op_read_dir_sync

    , - op_read_dir_async

    , - op_rename_sync

    , - op_rename_async

    , - op_link_sync

    , - op_link_async

    , - op_symlink_sync

    , - op_symlink_async

    , - op_read_link_sync

    , - op_read_link_async

    , - op_truncate_sync

    , - op_truncate_async

    , - op_utime_sync

    , - op_utime_async

    , - op_make_temp_dir_sync

    , - op_make_temp_dir_async

    , - op_make_temp_file_sync

    , - op_make_temp_file_async

    , - op_write_file_sync

    , - op_write_file_async

    , - op_read_file_sync

    , - op_read_file_async

    , - op_read_file_text_sync

    , - op_read_file_text_async

    , + op_fs_open_sync

    , + op_fs_open_async

    , + op_fs_mkdir_sync

    , + op_fs_mkdir_async

    , + op_fs_chmod_sync

    , + op_fs_chmod_async

    , + op_fs_chown_sync

    , + op_fs_chown_async

    , + op_fs_remove_sync

    , + op_fs_remove_async

    , + op_fs_copy_file_sync

    , + op_fs_copy_file_async

    , + op_fs_stat_sync

    , + op_fs_stat_async

    , + op_fs_lstat_sync

    , + op_fs_lstat_async

    , + op_fs_realpath_sync

    , + op_fs_realpath_async

    , + op_fs_read_dir_sync

    , + op_fs_read_dir_async

    , + op_fs_rename_sync

    , + op_fs_rename_async

    , + op_fs_link_sync

    , + op_fs_link_async

    , + op_fs_symlink_sync

    , + op_fs_symlink_async

    , + op_fs_read_link_sync

    , + op_fs_read_link_async

    , + op_fs_truncate_sync

    , + op_fs_truncate_async

    , + op_fs_utime_sync

    , + op_fs_utime_async

    , + op_fs_make_temp_dir_sync

    , + op_fs_make_temp_dir_async

    , + op_fs_make_temp_file_sync

    , + op_fs_make_temp_file_async

    , + op_fs_write_file_sync

    , + op_fs_write_file_async

    , + op_fs_read_file_sync

    , + op_fs_read_file_async

    , + op_fs_read_file_text_sync

    , + op_fs_read_file_text_async

    , - op_seek_sync, - op_seek_async, - op_fdatasync_sync, - op_fdatasync_async, - op_fsync_sync, - op_fsync_async, - op_fstat_sync, - op_fstat_async, - op_flock_sync, - op_flock_async, - op_funlock_sync, - op_funlock_async, - op_ftruncate_sync, - op_ftruncate_async, - op_futime_sync, - op_futime_async, + op_fs_seek_sync, + op_fs_seek_async, + op_fs_fdatasync_sync, + op_fs_fdatasync_async, + op_fs_fsync_sync, + op_fs_fsync_async, + op_fs_fstat_sync, + op_fs_fstat_async, + op_fs_flock_sync, + op_fs_flock_async, + op_fs_funlock_sync, + op_fs_funlock_async, + op_fs_ftruncate_sync, + op_fs_ftruncate_async, + op_fs_futime_sync, + op_fs_futime_async, ], esm = [ "30_fs.js" ], diff --git a/ext/fs/ops.rs b/ext/fs/ops.rs index b866f86458..71526b217a 100644 --- a/ext/fs/ops.rs +++ b/ext/fs/ops.rs @@ -34,7 +34,7 @@ use crate::FsPermissions; use crate::OpenOptions; #[op] -pub fn op_cwd

    (state: &mut OpState) -> Result +pub fn op_fs_cwd

    (state: &mut OpState) -> Result where P: FsPermissions + 'static, { @@ -48,7 +48,7 @@ where } #[op] -fn op_chdir

    (state: &mut OpState, directory: &str) -> Result<(), AnyError> +fn op_fs_chdir

    (state: &mut OpState, directory: &str) -> Result<(), AnyError> where P: FsPermissions + 'static, { @@ -61,7 +61,10 @@ where } #[op] -fn op_umask(state: &mut OpState, mask: Option) -> Result +fn op_fs_umask( + state: &mut OpState, + mask: Option, +) -> Result where { check_unstable(state, "Deno.umask"); @@ -69,7 +72,7 @@ where } #[op] -fn op_open_sync

    ( +fn op_fs_open_sync

    ( state: &mut OpState, path: String, options: Option, @@ -93,7 +96,7 @@ where } #[op] -async fn op_open_async

    ( +async fn op_fs_open_async

    ( state: Rc>, path: String, options: Option, @@ -123,7 +126,7 @@ where } #[op] -fn op_mkdir_sync

    ( +fn op_fs_mkdir_sync

    ( state: &mut OpState, path: String, recursive: bool, @@ -148,7 +151,7 @@ where } #[op] -async fn op_mkdir_async

    ( +async fn op_fs_mkdir_async

    ( state: Rc>, path: String, recursive: bool, @@ -175,7 +178,7 @@ where } #[op] -fn op_chmod_sync

    ( +fn op_fs_chmod_sync

    ( state: &mut OpState, path: String, mode: u32, @@ -193,7 +196,7 @@ where } #[op] -async fn op_chmod_async

    ( +async fn op_fs_chmod_async

    ( state: Rc>, path: String, mode: u32, @@ -214,7 +217,7 @@ where } #[op] -fn op_chown_sync

    ( +fn op_fs_chown_sync

    ( state: &mut OpState, path: String, uid: Option, @@ -234,7 +237,7 @@ where } #[op] -async fn op_chown_async

    ( +async fn op_fs_chown_async

    ( state: Rc>, path: String, uid: Option, @@ -256,7 +259,7 @@ where } #[op] -fn op_remove_sync

    ( +fn op_fs_remove_sync

    ( state: &mut OpState, path: &str, recursive: bool, @@ -278,7 +281,7 @@ where } #[op] -async fn op_remove_async

    ( +async fn op_fs_remove_async

    ( state: Rc>, path: String, recursive: bool, @@ -304,7 +307,7 @@ where } #[op] -fn op_copy_file_sync

    ( +fn op_fs_copy_file_sync

    ( state: &mut OpState, from: &str, to: &str, @@ -327,7 +330,7 @@ where } #[op] -async fn op_copy_file_async

    ( +async fn op_fs_copy_file_async

    ( state: Rc>, from: String, to: String, @@ -354,7 +357,7 @@ where } #[op] -fn op_stat_sync

    ( +fn op_fs_stat_sync

    ( state: &mut OpState, path: String, stat_out_buf: &mut [u32], @@ -374,7 +377,7 @@ where } #[op] -async fn op_stat_async

    ( +async fn op_fs_stat_async

    ( state: Rc>, path: String, ) -> Result @@ -396,7 +399,7 @@ where } #[op] -fn op_lstat_sync

    ( +fn op_fs_lstat_sync

    ( state: &mut OpState, path: String, stat_out_buf: &mut [u32], @@ -416,7 +419,7 @@ where } #[op] -async fn op_lstat_async

    ( +async fn op_fs_lstat_async

    ( state: Rc>, path: String, ) -> Result @@ -438,7 +441,7 @@ where } #[op] -fn op_realpath_sync

    ( +fn op_fs_realpath_sync

    ( state: &mut OpState, path: String, ) -> Result @@ -462,7 +465,7 @@ where } #[op] -async fn op_realpath_async

    ( +async fn op_fs_realpath_async

    ( state: Rc>, path: String, ) -> Result @@ -491,7 +494,7 @@ where } #[op] -fn op_read_dir_sync

    ( +fn op_fs_read_dir_sync

    ( state: &mut OpState, path: String, ) -> Result, AnyError> @@ -511,7 +514,7 @@ where } #[op] -async fn op_read_dir_async

    ( +async fn op_fs_read_dir_async

    ( state: Rc>, path: String, ) -> Result, AnyError> @@ -537,7 +540,7 @@ where } #[op] -fn op_rename_sync

    ( +fn op_fs_rename_sync

    ( state: &mut OpState, oldpath: String, newpath: String, @@ -561,7 +564,7 @@ where } #[op] -async fn op_rename_async

    ( +async fn op_fs_rename_async

    ( state: Rc>, oldpath: String, newpath: String, @@ -589,7 +592,7 @@ where } #[op] -fn op_link_sync

    ( +fn op_fs_link_sync

    ( state: &mut OpState, oldpath: &str, newpath: &str, @@ -614,7 +617,7 @@ where } #[op] -async fn op_link_async

    ( +async fn op_fs_link_async

    ( state: Rc>, oldpath: String, newpath: String, @@ -643,7 +646,7 @@ where } #[op] -fn op_symlink_sync

    ( +fn op_fs_symlink_sync

    ( state: &mut OpState, oldpath: &str, newpath: &str, @@ -667,7 +670,7 @@ where } #[op] -async fn op_symlink_async

    ( +async fn op_fs_symlink_async

    ( state: Rc>, oldpath: String, newpath: String, @@ -695,7 +698,7 @@ where } #[op] -fn op_read_link_sync

    ( +fn op_fs_read_link_sync

    ( state: &mut OpState, path: String, ) -> Result @@ -716,7 +719,7 @@ where } #[op] -async fn op_read_link_async

    ( +async fn op_fs_read_link_async

    ( state: Rc>, path: String, ) -> Result @@ -742,7 +745,7 @@ where } #[op] -fn op_truncate_sync

    ( +fn op_fs_truncate_sync

    ( state: &mut OpState, path: &str, len: u64, @@ -764,7 +767,7 @@ where } #[op] -async fn op_truncate_async

    ( +async fn op_fs_truncate_async

    ( state: Rc>, path: String, len: u64, @@ -790,7 +793,7 @@ where } #[op] -fn op_utime_sync

    ( +fn op_fs_utime_sync

    ( state: &mut OpState, path: &str, atime_secs: i64, @@ -813,7 +816,7 @@ where } #[op] -async fn op_utime_async

    ( +async fn op_fs_utime_async

    ( state: Rc>, path: String, atime_secs: i64, @@ -846,7 +849,7 @@ where } #[op] -fn op_make_temp_dir_sync

    ( +fn op_fs_make_temp_dir_sync

    ( state: &mut OpState, dir: Option, prefix: Option, @@ -879,7 +882,7 @@ where } #[op] -async fn op_make_temp_dir_async

    ( +async fn op_fs_make_temp_dir_async

    ( state: Rc>, dir: Option, prefix: Option, @@ -912,7 +915,7 @@ where } #[op] -fn op_make_temp_file_sync

    ( +fn op_fs_make_temp_file_sync

    ( state: &mut OpState, dir: Option, prefix: Option, @@ -952,7 +955,7 @@ where } #[op] -async fn op_make_temp_file_async

    ( +async fn op_fs_make_temp_file_async

    ( state: Rc>, dir: Option, prefix: Option, @@ -1067,7 +1070,7 @@ fn tmp_name( } #[op] -fn op_write_file_sync

    ( +fn op_fs_write_file_sync

    ( state: &mut OpState, path: String, mode: Option, @@ -1094,7 +1097,7 @@ where } #[op] -async fn op_write_file_async

    ( +async fn op_fs_write_file_async

    ( state: Rc>, path: String, mode: Option, @@ -1138,7 +1141,7 @@ where } #[op] -fn op_read_file_sync

    ( +fn op_fs_read_file_sync

    ( state: &mut OpState, path: String, ) -> Result @@ -1157,7 +1160,7 @@ where } #[op] -async fn op_read_file_async

    ( +async fn op_fs_read_file_async

    ( state: Rc>, path: String, cancel_rid: Option, @@ -1194,7 +1197,7 @@ where } #[op] -fn op_read_file_text_sync

    ( +fn op_fs_read_file_text_sync

    ( state: &mut OpState, path: String, ) -> Result @@ -1213,7 +1216,7 @@ where } #[op] -async fn op_read_file_text_async

    ( +async fn op_fs_read_file_text_async

    ( state: Rc>, path: String, cancel_rid: Option, @@ -1273,7 +1276,7 @@ fn to_seek_from(offset: i64, whence: i32) -> Result { } #[op] -fn op_seek_sync( +fn op_fs_seek_sync( state: &mut OpState, rid: ResourceId, offset: i64, @@ -1286,7 +1289,7 @@ fn op_seek_sync( } #[op] -async fn op_seek_async( +async fn op_fs_seek_async( state: Rc>, rid: ResourceId, offset: i64, @@ -1299,7 +1302,7 @@ async fn op_seek_async( } #[op] -fn op_fdatasync_sync( +fn op_fs_fdatasync_sync( state: &mut OpState, rid: ResourceId, ) -> Result<(), AnyError> { @@ -1309,7 +1312,7 @@ fn op_fdatasync_sync( } #[op] -async fn op_fdatasync_async( +async fn op_fs_fdatasync_async( state: Rc>, rid: ResourceId, ) -> Result<(), AnyError> { @@ -1319,14 +1322,17 @@ async fn op_fdatasync_async( } #[op] -fn op_fsync_sync(state: &mut OpState, rid: ResourceId) -> Result<(), AnyError> { +fn op_fs_fsync_sync( + state: &mut OpState, + rid: ResourceId, +) -> Result<(), AnyError> { let file = FileResource::get_file(state, rid)?; file.sync_sync()?; Ok(()) } #[op] -async fn op_fsync_async( +async fn op_fs_fsync_async( state: Rc>, rid: ResourceId, ) -> Result<(), AnyError> { @@ -1336,7 +1342,7 @@ async fn op_fsync_async( } #[op] -fn op_fstat_sync( +fn op_fs_fstat_sync( state: &mut OpState, rid: ResourceId, stat_out_buf: &mut [u32], @@ -1349,7 +1355,7 @@ fn op_fstat_sync( } #[op] -async fn op_fstat_async( +async fn op_fs_fstat_async( state: Rc>, rid: ResourceId, ) -> Result { @@ -1359,7 +1365,7 @@ async fn op_fstat_async( } #[op] -fn op_flock_sync( +fn op_fs_flock_sync( state: &mut OpState, rid: ResourceId, exclusive: bool, @@ -1371,7 +1377,7 @@ fn op_flock_sync( } #[op] -async fn op_flock_async( +async fn op_fs_flock_async( state: Rc>, rid: ResourceId, exclusive: bool, @@ -1383,7 +1389,7 @@ async fn op_flock_async( } #[op] -fn op_funlock_sync( +fn op_fs_funlock_sync( state: &mut OpState, rid: ResourceId, ) -> Result<(), AnyError> { @@ -1394,7 +1400,7 @@ fn op_funlock_sync( } #[op] -async fn op_funlock_async( +async fn op_fs_funlock_async( state: Rc>, rid: ResourceId, ) -> Result<(), AnyError> { @@ -1405,7 +1411,7 @@ async fn op_funlock_async( } #[op] -fn op_ftruncate_sync( +fn op_fs_ftruncate_sync( state: &mut OpState, rid: ResourceId, len: u64, @@ -1416,7 +1422,7 @@ fn op_ftruncate_sync( } #[op] -async fn op_ftruncate_async( +async fn op_fs_ftruncate_async( state: Rc>, rid: ResourceId, len: u64, @@ -1427,7 +1433,7 @@ async fn op_ftruncate_async( } #[op] -fn op_futime_sync( +fn op_fs_futime_sync( state: &mut OpState, rid: ResourceId, atime_secs: i64, @@ -1441,7 +1447,7 @@ fn op_futime_sync( } #[op] -async fn op_futime_async( +async fn op_fs_futime_async( state: Rc>, rid: ResourceId, atime_secs: i64, diff --git a/ext/http/00_serve.js b/ext/http/00_serve.js index b18c26e800..1746b1d47c 100644 --- a/ext/http/00_serve.js +++ b/ext/http/00_serve.js @@ -50,35 +50,35 @@ const { const { op_http_wait, - op_upgrade, - op_get_request_headers, - op_get_request_method_and_url, - op_read_request_body, - op_serve_http, - op_set_promise_complete, - op_set_response_body_bytes, - op_set_response_body_resource, - op_set_response_body_stream, - op_set_response_body_text, - op_set_response_header, - op_set_response_headers, - op_upgrade_raw, + op_http_upgrade_next, + op_http_get_request_headers, + op_http_get_request_method_and_url, + op_http_read_request_body, + op_http_serve, + op_http_set_promise_complete, + op_http_set_response_body_bytes, + op_http_set_response_body_resource, + op_http_set_response_body_stream, + op_http_set_response_body_text, + op_http_set_response_header, + op_http_set_response_headers, + op_http_upgrade_raw, op_ws_server_create, } = core.generateAsyncOpHandler( "op_http_wait", - "op_upgrade", - "op_get_request_headers", - "op_get_request_method_and_url", - "op_read_request_body", - "op_serve_http", - "op_set_promise_complete", - "op_set_response_body_bytes", - "op_set_response_body_resource", - "op_set_response_body_stream", - "op_set_response_body_text", - "op_set_response_header", - "op_set_response_headers", - "op_upgrade_raw", + "op_http_upgrade_next", + "op_http_get_request_headers", + "op_http_get_request_method_and_url", + "op_http_read_request_body", + "op_http_serve", + "op_http_set_promise_complete", + "op_http_set_response_body_bytes", + "op_http_set_response_body_resource", + "op_http_set_response_body_stream", + "op_http_set_response_body_text", + "op_http_set_response_header", + "op_http_set_response_headers", + "op_http_upgrade_raw", "op_ws_server_create", ); const _upgraded = Symbol("_upgraded"); @@ -178,7 +178,7 @@ class InnerRequest { this.#upgraded = () => {}; - const upgradeRid = op_upgrade_raw(slabId); + const upgradeRid = op_http_upgrade_raw(slabId); const conn = new TcpConn( upgradeRid, @@ -209,7 +209,7 @@ class InnerRequest { (async () => { try { // Returns the connection and extra bytes, which we can pass directly to op_ws_server_create - const upgrade = await op_upgrade( + const upgrade = await op_http_upgrade_next( slabId, response.headerList, ); @@ -248,7 +248,7 @@ class InnerRequest { } // TODO(mmastrac): This is quite slow as we're serializing a large number of values. We may want to consider // splitting this up into multiple ops. - this.#methodAndUri = op_get_request_method_and_url(this.#slabId); + this.#methodAndUri = op_http_get_request_method_and_url(this.#slabId); } const path = this.#methodAndUri[2]; @@ -283,7 +283,7 @@ class InnerRequest { if (this.#slabId === undefined) { throw new TypeError("request closed"); } - this.#methodAndUri = op_get_request_method_and_url(this.#slabId); + this.#methodAndUri = op_http_get_request_method_and_url(this.#slabId); } return { transport: "tcp", @@ -297,7 +297,7 @@ class InnerRequest { if (this.#slabId === undefined) { throw new TypeError("request closed"); } - this.#methodAndUri = op_get_request_method_and_url(this.#slabId); + this.#methodAndUri = op_http_get_request_method_and_url(this.#slabId); } return this.#methodAndUri[0]; } @@ -315,7 +315,7 @@ class InnerRequest { this.#body = null; return null; } - this.#streamRid = op_read_request_body(this.#slabId); + this.#streamRid = op_http_read_request_body(this.#slabId); this.#body = new InnerBody(readableStreamForRid(this.#streamRid, false)); return this.#body; } @@ -324,7 +324,7 @@ class InnerRequest { if (this.#slabId === undefined) { throw new TypeError("request closed"); } - return op_get_request_headers(this.#slabId); + return op_http_get_request_headers(this.#slabId); } get slabId() { @@ -365,12 +365,12 @@ function fastSyncResponseOrStream(req, respBody) { const body = stream.body; if (ObjectPrototypeIsPrototypeOf(Uint8ArrayPrototype, body)) { - op_set_response_body_bytes(req, body); + op_http_set_response_body_bytes(req, body); return null; } if (typeof body === "string") { - op_set_response_body_text(req, body); + op_http_set_response_body_text(req, body); return null; } @@ -380,7 +380,7 @@ function fastSyncResponseOrStream(req, respBody) { } const resourceBacking = getReadableStreamResourceBacking(stream); if (resourceBacking) { - op_set_response_body_resource( + op_http_set_response_body_resource( req, resourceBacking.rid, resourceBacking.autoClose, @@ -416,9 +416,9 @@ async function asyncResponse(responseBodies, req, status, stream) { // and we race it. let timeoutPromise; timeout = setTimeout(() => { - responseRid = op_set_response_body_stream(req); + responseRid = op_http_set_response_body_stream(req); SetPrototypeAdd(responseBodies, responseRid); - op_set_promise_complete(req, status); + op_http_set_promise_complete(req, status); timeoutPromise = core.writeAll(responseRid, value1); }, 250); const { value: value2, done: done2 } = await reader.read(); @@ -443,13 +443,13 @@ async function asyncResponse(responseBodies, req, status, stream) { // Reader will be closed by finally block // No response stream closed = true; - op_set_response_body_bytes(req, value1); + op_http_set_response_body_bytes(req, value1); return; } - responseRid = op_set_response_body_stream(req); + responseRid = op_http_set_response_body_stream(req); SetPrototypeAdd(responseBodies, responseRid); - op_set_promise_complete(req, status); + op_http_set_promise_complete(req, status); // Write our first packet await core.writeAll(responseRid, value1); } @@ -481,7 +481,7 @@ async function asyncResponse(responseBodies, req, status, stream) { core.tryClose(responseRid); SetPrototypeDelete(responseBodies, responseRid); } else { - op_set_promise_complete(req, status); + op_http_set_promise_complete(req, status); } } } @@ -545,9 +545,9 @@ function mapToCallback(responseBodies, context, signal, callback, onError) { const headers = inner.headerList; if (headers && headers.length > 0) { if (headers.length == 1) { - op_set_response_header(req, headers[0][0], headers[0][1]); + op_http_set_response_header(req, headers[0][0], headers[0][1]); } else { - op_set_response_headers(req, headers); + op_http_set_response_headers(req, headers); } } @@ -557,7 +557,7 @@ function mapToCallback(responseBodies, context, signal, callback, onError) { // Handle the stream asynchronously await asyncResponse(responseBodies, req, status, stream); } else { - op_set_promise_complete(req, status); + op_http_set_promise_complete(req, status); } innerRequest?.close(); @@ -625,13 +625,13 @@ async function serve(arg1, arg2) { listenOpts.alpnProtocols = ["h2", "http/1.1"]; const listener = Deno.listenTls(listenOpts); listenOpts.port = listener.addr.port; - context.initialize(op_serve_http( + context.initialize(op_http_serve( listener.rid, )); } else { const listener = Deno.listen(listenOpts); listenOpts.port = listener.addr.port; - context.initialize(op_serve_http( + context.initialize(op_http_serve( listener.rid, )); } diff --git a/ext/http/http_next.rs b/ext/http/http_next.rs index 5ed443142e..f3d37f7516 100644 --- a/ext/http/http_next.rs +++ b/ext/http/http_next.rs @@ -235,7 +235,7 @@ fn slab_insert( } #[op] -pub fn op_upgrade_raw( +pub fn op_http_upgrade_raw( state: &mut OpState, index: u32, ) -> Result { @@ -310,7 +310,7 @@ pub fn op_upgrade_raw( } #[op] -pub async fn op_upgrade( +pub async fn op_http_upgrade_next( state: Rc>, index: u32, headers: Vec<(ByteString, ByteString)>, @@ -353,7 +353,7 @@ pub async fn op_upgrade( } #[op(fast)] -pub fn op_set_promise_complete(index: u32, status: u16) { +pub fn op_http_set_promise_complete(index: u32, status: u16) { with_resp_mut(index, |resp| { // The Javascript code will never provide a status that is invalid here (see 23_response.js) *resp.as_mut().unwrap().status_mut() = @@ -365,7 +365,7 @@ pub fn op_set_promise_complete(index: u32, status: u16) { } #[op] -pub fn op_get_request_method_and_url( +pub fn op_http_get_request_method_and_url( index: u32, ) -> (String, Option, String, String, Option) { // TODO(mmastrac): Passing method can be optimized @@ -393,7 +393,10 @@ pub fn op_get_request_method_and_url( } #[op] -pub fn op_get_request_header(index: u32, name: String) -> Option { +pub fn op_http_get_request_header( + index: u32, + name: String, +) -> Option { with_req(index, |req| { let value = req.headers.get(name); value.map(|value| value.as_bytes().into()) @@ -401,7 +404,9 @@ pub fn op_get_request_header(index: u32, name: String) -> Option { } #[op] -pub fn op_get_request_headers(index: u32) -> Vec<(ByteString, ByteString)> { +pub fn op_http_get_request_headers( + index: u32, +) -> Vec<(ByteString, ByteString)> { with_req(index, |req| { let headers = &req.headers; let mut vec = Vec::with_capacity(headers.len()); @@ -436,7 +441,10 @@ pub fn op_get_request_headers(index: u32) -> Vec<(ByteString, ByteString)> { } #[op(fast)] -pub fn op_read_request_body(state: &mut OpState, index: u32) -> ResourceId { +pub fn op_http_read_request_body( + state: &mut OpState, + index: u32, +) -> ResourceId { let incoming = with_req_body_mut(index, |body| body.take().unwrap()); let body_resource = Rc::new(HttpRequestBody::new(incoming)); let res = state.resource_table.add_rc(body_resource.clone()); @@ -447,7 +455,7 @@ pub fn op_read_request_body(state: &mut OpState, index: u32) -> ResourceId { } #[op(fast)] -pub fn op_set_response_header(index: u32, name: &str, value: &str) { +pub fn op_http_set_response_header(index: u32, name: &str, value: &str) { with_resp_mut(index, |resp| { let resp_headers = resp.as_mut().unwrap().headers_mut(); // These are valid latin-1 strings @@ -458,7 +466,7 @@ pub fn op_set_response_header(index: u32, name: &str, value: &str) { } #[op] -pub fn op_set_response_headers( +pub fn op_http_set_response_headers( index: u32, headers: Vec<(ByteString, ByteString)>, ) { @@ -476,7 +484,7 @@ pub fn op_set_response_headers( } #[op(fast)] -pub fn op_set_response_body_resource( +pub fn op_http_set_response_body_resource( state: &mut OpState, index: u32, stream_rid: ResourceId, @@ -502,7 +510,7 @@ pub fn op_set_response_body_resource( } #[op(fast)] -pub fn op_set_response_body_stream( +pub fn op_http_set_response_body_stream( state: &mut OpState, index: u32, ) -> Result { @@ -521,7 +529,7 @@ pub fn op_set_response_body_stream( } #[op(fast)] -pub fn op_set_response_body_text(index: u32, text: String) { +pub fn op_http_set_response_body_text(index: u32, text: String) { if !text.is_empty() { with_resp_mut(index, move |response| { response @@ -534,7 +542,7 @@ pub fn op_set_response_body_text(index: u32, text: String) { } #[op(fast)] -pub fn op_set_response_body_bytes(index: u32, buffer: &[u8]) { +pub fn op_http_set_response_body_bytes(index: u32, buffer: &[u8]) { if !buffer.is_empty() { with_resp_mut(index, |response| { response @@ -759,7 +767,7 @@ impl Drop for HttpJoinHandle { } #[op(v8)] -pub fn op_serve_http( +pub fn op_http_serve( state: Rc>, listener_rid: ResourceId, ) -> Result<(ResourceId, &'static str, String), AnyError> { @@ -814,7 +822,7 @@ pub fn op_serve_http( } #[op(v8)] -pub fn op_serve_http_on( +pub fn op_http_serve_on( state: Rc>, conn: ResourceId, ) -> Result<(ResourceId, &'static str, String), AnyError> { diff --git a/ext/http/lib.rs b/ext/http/lib.rs index cde15af88c..6dab375a1a 100644 --- a/ext/http/lib.rs +++ b/ext/http/lib.rs @@ -88,30 +88,30 @@ deno_core::extension!( deps = [deno_web, deno_net, deno_fetch, deno_websocket], ops = [ op_http_accept, - op_http_write_headers, op_http_headers, - op_http_write, - op_http_write_resource, op_http_shutdown, - op_http_websocket_accept_header, op_http_upgrade_websocket, - http_next::op_serve_http, - http_next::op_serve_http_on, - http_next::op_http_wait, + op_http_websocket_accept_header, + op_http_write_headers, + op_http_write_resource, + op_http_write, + http_next::op_http_get_request_header, + http_next::op_http_get_request_headers, + http_next::op_http_get_request_method_and_url, + http_next::op_http_read_request_body, + http_next::op_http_serve_on, + http_next::op_http_serve, + http_next::op_http_set_promise_complete, + http_next::op_http_set_response_body_bytes, + http_next::op_http_set_response_body_resource, + http_next::op_http_set_response_body_stream, + http_next::op_http_set_response_body_text, + http_next::op_http_set_response_header, + http_next::op_http_set_response_headers, http_next::op_http_track, - http_next::op_set_response_header, - http_next::op_set_response_headers, - http_next::op_set_response_body_text, - http_next::op_set_promise_complete, - http_next::op_set_response_body_bytes, - http_next::op_set_response_body_resource, - http_next::op_set_response_body_stream, - http_next::op_get_request_header, - http_next::op_get_request_headers, - http_next::op_get_request_method_and_url, - http_next::op_read_request_body, - http_next::op_upgrade, - http_next::op_upgrade_raw, + http_next::op_http_upgrade_raw, + http_next::op_http_upgrade_next, + http_next::op_http_wait, ], esm = ["00_serve.js", "01_http.js"], ); From 723d4b038203e35f5be6d11088a7288e6d709869 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Mon, 8 May 2023 18:53:58 -0400 Subject: [PATCH 136/320] perf(fmt): faster formatting for minified object literals (#19050) Has fix for https://github.com/dprint/dprint-plugin-typescript/issues/520 --- .dprint.json | 2 +- Cargo.lock | 4 ++-- cli/Cargo.toml | 2 +- ext/node/polyfills/_stream.d.ts | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.dprint.json b/.dprint.json index 07124d6625..e1caea7fe3 100644 --- a/.dprint.json +++ b/.dprint.json @@ -54,7 +54,7 @@ "ext/websocket/autobahn/reports" ], "plugins": [ - "https://plugins.dprint.dev/typescript-0.84.2.wasm", + "https://plugins.dprint.dev/typescript-0.84.4.wasm", "https://plugins.dprint.dev/json-0.17.2.wasm", "https://plugins.dprint.dev/markdown-0.15.2.wasm", "https://plugins.dprint.dev/toml-0.5.4.wasm", diff --git a/Cargo.lock b/Cargo.lock index cee7665d72..1d51220e03 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1566,9 +1566,9 @@ dependencies = [ [[package]] name = "dprint-plugin-typescript" -version = "0.84.2" +version = "0.84.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8979688409764dd95b356c0d278023cad45fbb24cf788eab8c972ae069a7a3f8" +checksum = "9945b1fae98529bd905d66b3c5efd45408b928cd10b7a3e0764049cf9aaf2167" dependencies = [ "anyhow", "deno_ast", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 8067d147d5..7d0e99d395 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -67,7 +67,7 @@ data-url.workspace = true dissimilar = "=1.0.4" dprint-plugin-json = "=0.17.2" dprint-plugin-markdown = "=0.15.2" -dprint-plugin-typescript = "=0.84.2" +dprint-plugin-typescript = "=0.84.4" encoding_rs.workspace = true env_logger = "=0.9.0" eszip = "=0.41.0" diff --git a/ext/node/polyfills/_stream.d.ts b/ext/node/polyfills/_stream.d.ts index 467ac9f36b..382bb9093c 100644 --- a/ext/node/polyfills/_stream.d.ts +++ b/ext/node/polyfills/_stream.d.ts @@ -1190,7 +1190,7 @@ type PipelineDestinationPromiseFunction = ( source: AsyncIterable, ) => Promise

    ; type PipelineDestination, P> = S extends - PipelineTransformSource ? + PipelineTransformSource ? | WritableStream | PipelineDestinationIterableFunction | PipelineDestinationPromiseFunction From f34fcd16ea4d504c8a87c0873c65598d70bb1d07 Mon Sep 17 00:00:00 2001 From: Luca Casonato Date: Tue, 9 May 2023 12:37:13 +0200 Subject: [PATCH 137/320] fix(core): let V8 drive extension ESM loads (#18997) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This now allows circular imports across extensions. Instead of load + eval of all ESM files in declaration order, all files are only loaded. Eval is done recursively by V8, only evaluating files that are listed in `Extension::esm_entry_point` fields. --------- Co-authored-by: Bartek Iwańczuk --- cli/build.rs | 4 +- cli/js/99_main.js | 3 + core/modules.rs | 24 +- core/runtime.rs | 108 ++++++--- ext/crypto/00_crypto.js | 502 ++++++++++++++++++++++++++++++++++++++ ext/crypto/01_webidl.js | 516 ---------------------------------------- ext/crypto/lib.rs | 2 +- ext/web/09_file.js | 28 +++ ext/web/11_blob_url.js | 45 ---- ext/web/lib.rs | 1 - runtime/build.rs | 12 +- 11 files changed, 640 insertions(+), 605 deletions(-) create mode 100644 cli/js/99_main.js delete mode 100644 ext/crypto/01_webidl.js delete mode 100644 ext/web/11_blob_url.js diff --git a/cli/build.rs b/cli/build.rs index 8e6b670e29..94b49dfe02 100644 --- a/cli/build.rs +++ b/cli/build.rs @@ -309,9 +309,11 @@ mod ts { // deps = [runtime] deno_core::extension!( cli, + esm_entry_point = "ext:cli/99_main.js", esm = [ dir "js", - "40_testing.js" + "40_testing.js", + "99_main.js" ], customizer = |ext: &mut deno_core::ExtensionBuilder| { ext.esm(vec![ExtensionFileSource { diff --git a/cli/js/99_main.js b/cli/js/99_main.js new file mode 100644 index 0000000000..dc9d74fb06 --- /dev/null +++ b/cli/js/99_main.js @@ -0,0 +1,3 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +import "ext:cli/40_testing.js"; +import "ext:cli/runtime/js/99_main.js"; diff --git a/core/modules.rs b/core/modules.rs index 9352301ba8..d1e871ba90 100644 --- a/core/modules.rs +++ b/core/modules.rs @@ -1093,6 +1093,28 @@ impl ModuleMap { output } + pub(crate) fn assert_all_modules_evaluated( + &self, + scope: &mut v8::HandleScope, + ) { + let mut not_evaluated = vec![]; + + for (i, handle) in self.handles.iter().enumerate() { + let module = v8::Local::new(scope, handle); + if !matches!(module.get_status(), v8::ModuleStatus::Evaluated) { + not_evaluated.push(self.info[i].name.as_str().to_string()); + } + } + + if !not_evaluated.is_empty() { + let mut msg = "Following modules were not evaluated; make sure they are imported from other code:\n".to_string(); + for m in not_evaluated { + msg.push_str(&format!(" - {}\n", m)); + } + panic!("{}", msg); + } + } + pub fn serialize_for_snapshotting( &self, scope: &mut v8::HandleScope, @@ -1366,7 +1388,7 @@ impl ModuleMap { /// Get module id, following all aliases in case of module specifier /// that had been redirected. - fn get_id( + pub(crate) fn get_id( &self, name: impl AsRef, asserted_module_type: AssertedModuleType, diff --git a/core/runtime.rs b/core/runtime.rs index 3c2e6f3e11..b56ef5d659 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -8,6 +8,7 @@ use crate::extensions::OpDecl; use crate::extensions::OpEventLoopFn; use crate::inspector::JsRuntimeInspector; use crate::module_specifier::ModuleSpecifier; +use crate::modules::AssertedModuleType; use crate::modules::ExtModuleLoaderCb; use crate::modules::ModuleCode; use crate::modules::ModuleError; @@ -23,7 +24,6 @@ use crate::snapshot_util; use crate::source_map::SourceMapCache; use crate::source_map::SourceMapGetter; use crate::Extension; -use crate::ExtensionFileSource; use crate::NoopModuleLoader; use crate::OpMiddlewareFn; use crate::OpResult; @@ -687,46 +687,49 @@ impl JsRuntime { module_map } - /// Initializes JS of provided Extensions in the given realm + /// Initializes JS of provided Extensions in the given realm. fn init_extension_js(&mut self, realm: &JsRealm) -> Result<(), Error> { - fn load_and_evaluate_module( - runtime: &mut JsRuntime, - file_source: &ExtensionFileSource, - ) -> Result<(), Error> { - futures::executor::block_on(async { - let id = runtime - .load_side_module( - &ModuleSpecifier::parse(file_source.specifier)?, - None, - ) - .await?; - let receiver = runtime.mod_evaluate(id); - runtime.run_event_loop(false).await?; - receiver.await? - }) - .with_context(|| format!("Couldn't execute '{}'", file_source.specifier)) - } + // Initalization of JS happens in phases: + // 1. Iterate through all extensions: + // a. Execute all extension "script" JS files + // b. Load all extension "module" JS files (but do not execute them yet) + // 2. Iterate through all extensions: + // a. If an extension has a `esm_entry_point`, execute it. + + let mut esm_entrypoints = vec![]; // Take extensions to avoid double-borrow let extensions = std::mem::take(&mut self.extensions); - for ext in extensions.borrow().iter() { - { - if let Some(esm_files) = ext.get_esm_sources() { - if let Some(entry_point) = ext.get_esm_entry_point() { - let file_source = esm_files - .iter() - .find(|file| file.specifier == entry_point) - .unwrap(); - load_and_evaluate_module(self, file_source)?; - } else { - for file_source in esm_files { - load_and_evaluate_module(self, file_source)?; - } + + futures::executor::block_on(async { + let num_of_extensions = extensions.borrow().len(); + for i in 0..num_of_extensions { + let (maybe_esm_files, maybe_esm_entry_point) = { + let exts = extensions.borrow(); + ( + exts[i].get_esm_sources().map(|e| e.to_owned()), + exts[i].get_esm_entry_point(), + ) + }; + + if let Some(esm_files) = maybe_esm_files { + for file_source in esm_files { + self + .load_side_module( + &ModuleSpecifier::parse(file_source.specifier)?, + None, + ) + .await?; } } - } - { + if let Some(entry_point) = maybe_esm_entry_point { + esm_entrypoints.push(entry_point); + } + + let exts = extensions.borrow(); + let ext = &exts[i]; + if let Some(js_files) = ext.get_js_sources() { for file_source in js_files { realm.execute_script( @@ -736,14 +739,41 @@ impl JsRuntime { )?; } } + + if ext.is_core { + self.init_cbs(realm); + } } - // TODO(bartlomieju): this not great that we need to have this conditional - // here, but I haven't found a better way to do it yet. - if ext.is_core { - self.init_cbs(realm); + for specifier in esm_entrypoints { + let mod_id = { + let module_map = self.module_map.as_ref().unwrap(); + + module_map + .borrow() + .get_id(specifier, AssertedModuleType::JavaScriptOrWasm) + .unwrap_or_else(|| { + panic!("{} not present in the module map", specifier) + }) + }; + let receiver = self.mod_evaluate(mod_id); + self.run_event_loop(false).await?; + receiver + .await? + .with_context(|| format!("Couldn't execute '{specifier}'"))?; } - } + + #[cfg(debug_assertions)] + { + let module_map_rc = self.module_map.clone().unwrap(); + let mut scope = realm.handle_scope(self.v8_isolate()); + let module_map = module_map_rc.borrow(); + module_map.assert_all_modules_evaluated(&mut scope); + } + + Ok::<_, anyhow::Error>(()) + })?; + // Restore extensions self.extensions = extensions; diff --git a/ext/crypto/00_crypto.js b/ext/crypto/00_crypto.js index 5253c5784c..19e669acd0 100644 --- a/ext/crypto/00_crypto.js +++ b/ext/crypto/00_crypto.js @@ -4734,4 +4734,506 @@ webidl.configurePrototype(Crypto); const CryptoPrototype = Crypto.prototype; const crypto = webidl.createBranded(Crypto); + +webidl.converters.AlgorithmIdentifier = (V, prefix, context, opts) => { + // Union for (object or DOMString) + if (webidl.type(V) == "Object") { + return webidl.converters.object(V, prefix, context, opts); + } + return webidl.converters.DOMString(V, prefix, context, opts); +}; + +webidl.converters["BufferSource or JsonWebKey"] = ( + V, + prefix, + context, + opts, +) => { + // Union for (BufferSource or JsonWebKey) + if ( + ArrayBufferIsView(V) || + ObjectPrototypeIsPrototypeOf(ArrayBufferPrototype, V) + ) { + return webidl.converters.BufferSource(V, prefix, context, opts); + } + return webidl.converters.JsonWebKey(V, prefix, context, opts); +}; + +webidl.converters.KeyType = webidl.createEnumConverter("KeyType", [ + "public", + "private", + "secret", +]); + +webidl.converters.KeyFormat = webidl.createEnumConverter("KeyFormat", [ + "raw", + "pkcs8", + "spki", + "jwk", +]); + +webidl.converters.KeyUsage = webidl.createEnumConverter("KeyUsage", [ + "encrypt", + "decrypt", + "sign", + "verify", + "deriveKey", + "deriveBits", + "wrapKey", + "unwrapKey", +]); + +webidl.converters["sequence"] = webidl.createSequenceConverter( + webidl.converters.KeyUsage, +); + +webidl.converters.HashAlgorithmIdentifier = + webidl.converters.AlgorithmIdentifier; + +/** @type {webidl.Dictionary} */ +const dictAlgorithm = [{ + key: "name", + converter: webidl.converters.DOMString, + required: true, +}]; + +webidl.converters.Algorithm = webidl + .createDictionaryConverter("Algorithm", dictAlgorithm); + +webidl.converters.BigInteger = webidl.converters.Uint8Array; + +/** @type {webidl.Dictionary} */ +const dictRsaKeyGenParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "modulusLength", + converter: (V, prefix, context, opts) => + webidl.converters["unsigned long"](V, prefix, context, { + ...opts, + enforceRange: true, + }), + required: true, + }, + { + key: "publicExponent", + converter: webidl.converters.BigInteger, + required: true, + }, +]; + +webidl.converters.RsaKeyGenParams = webidl + .createDictionaryConverter("RsaKeyGenParams", dictRsaKeyGenParams); + +const dictRsaHashedKeyGenParams = [ + ...new SafeArrayIterator(dictRsaKeyGenParams), + { + key: "hash", + converter: webidl.converters.HashAlgorithmIdentifier, + required: true, + }, +]; + +webidl.converters.RsaHashedKeyGenParams = webidl.createDictionaryConverter( + "RsaHashedKeyGenParams", + dictRsaHashedKeyGenParams, +); + +const dictRsaHashedImportParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "hash", + converter: webidl.converters.HashAlgorithmIdentifier, + required: true, + }, +]; + +webidl.converters.RsaHashedImportParams = webidl.createDictionaryConverter( + "RsaHashedImportParams", + dictRsaHashedImportParams, +); + +webidl.converters.NamedCurve = webidl.converters.DOMString; + +const dictEcKeyImportParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "namedCurve", + converter: webidl.converters.NamedCurve, + required: true, + }, +]; + +webidl.converters.EcKeyImportParams = webidl.createDictionaryConverter( + "EcKeyImportParams", + dictEcKeyImportParams, +); + +const dictEcKeyGenParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "namedCurve", + converter: webidl.converters.NamedCurve, + required: true, + }, +]; + +webidl.converters.EcKeyGenParams = webidl + .createDictionaryConverter("EcKeyGenParams", dictEcKeyGenParams); + +const dictAesKeyGenParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "length", + converter: (V, prefix, context, opts) => + webidl.converters["unsigned short"](V, prefix, context, { + ...opts, + enforceRange: true, + }), + required: true, + }, +]; + +webidl.converters.AesKeyGenParams = webidl + .createDictionaryConverter("AesKeyGenParams", dictAesKeyGenParams); + +const dictHmacKeyGenParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "hash", + converter: webidl.converters.HashAlgorithmIdentifier, + required: true, + }, + { + key: "length", + converter: (V, prefix, context, opts) => + webidl.converters["unsigned long"](V, prefix, context, { + ...opts, + enforceRange: true, + }), + }, +]; + +webidl.converters.HmacKeyGenParams = webidl + .createDictionaryConverter("HmacKeyGenParams", dictHmacKeyGenParams); + +const dictRsaPssParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "saltLength", + converter: (V, prefix, context, opts) => + webidl.converters["unsigned long"](V, prefix, context, { + ...opts, + enforceRange: true, + }), + required: true, + }, +]; + +webidl.converters.RsaPssParams = webidl + .createDictionaryConverter("RsaPssParams", dictRsaPssParams); + +const dictRsaOaepParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "label", + converter: webidl.converters["BufferSource"], + }, +]; + +webidl.converters.RsaOaepParams = webidl + .createDictionaryConverter("RsaOaepParams", dictRsaOaepParams); + +const dictEcdsaParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "hash", + converter: webidl.converters.HashAlgorithmIdentifier, + required: true, + }, +]; + +webidl.converters["EcdsaParams"] = webidl + .createDictionaryConverter("EcdsaParams", dictEcdsaParams); + +const dictHmacImportParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "hash", + converter: webidl.converters.HashAlgorithmIdentifier, + required: true, + }, + { + key: "length", + converter: (V, prefix, context, opts) => + webidl.converters["unsigned long"](V, prefix, context, { + ...opts, + enforceRange: true, + }), + }, +]; + +webidl.converters.HmacImportParams = webidl + .createDictionaryConverter("HmacImportParams", dictHmacImportParams); + +const dictRsaOtherPrimesInfo = [ + { + key: "r", + converter: webidl.converters["DOMString"], + }, + { + key: "d", + converter: webidl.converters["DOMString"], + }, + { + key: "t", + converter: webidl.converters["DOMString"], + }, +]; + +webidl.converters.RsaOtherPrimesInfo = webidl.createDictionaryConverter( + "RsaOtherPrimesInfo", + dictRsaOtherPrimesInfo, +); +webidl.converters["sequence"] = webidl + .createSequenceConverter( + webidl.converters.RsaOtherPrimesInfo, + ); + +const dictJsonWebKey = [ + // Sections 4.2 and 4.3 of RFC7517. + // https://datatracker.ietf.org/doc/html/rfc7517#section-4 + { + key: "kty", + converter: webidl.converters["DOMString"], + }, + { + key: "use", + converter: webidl.converters["DOMString"], + }, + { + key: "key_ops", + converter: webidl.converters["sequence"], + }, + { + key: "alg", + converter: webidl.converters["DOMString"], + }, + // JSON Web Key Parameters Registration + { + key: "ext", + converter: webidl.converters["boolean"], + }, + // Section 6 of RFC7518 JSON Web Algorithms + // https://datatracker.ietf.org/doc/html/rfc7518#section-6 + { + key: "crv", + converter: webidl.converters["DOMString"], + }, + { + key: "x", + converter: webidl.converters["DOMString"], + }, + { + key: "y", + converter: webidl.converters["DOMString"], + }, + { + key: "d", + converter: webidl.converters["DOMString"], + }, + { + key: "n", + converter: webidl.converters["DOMString"], + }, + { + key: "e", + converter: webidl.converters["DOMString"], + }, + { + key: "p", + converter: webidl.converters["DOMString"], + }, + { + key: "q", + converter: webidl.converters["DOMString"], + }, + { + key: "dp", + converter: webidl.converters["DOMString"], + }, + { + key: "dq", + converter: webidl.converters["DOMString"], + }, + { + key: "qi", + converter: webidl.converters["DOMString"], + }, + { + key: "oth", + converter: webidl.converters["sequence"], + }, + { + key: "k", + converter: webidl.converters["DOMString"], + }, +]; + +webidl.converters.JsonWebKey = webidl.createDictionaryConverter( + "JsonWebKey", + dictJsonWebKey, +); + +const dictHkdfParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "hash", + converter: webidl.converters.HashAlgorithmIdentifier, + required: true, + }, + { + key: "salt", + converter: webidl.converters["BufferSource"], + required: true, + }, + { + key: "info", + converter: webidl.converters["BufferSource"], + required: true, + }, +]; + +webidl.converters.HkdfParams = webidl + .createDictionaryConverter("HkdfParams", dictHkdfParams); + +const dictPbkdf2Params = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "hash", + converter: webidl.converters.HashAlgorithmIdentifier, + required: true, + }, + { + key: "iterations", + converter: (V, prefix, context, opts) => + webidl.converters["unsigned long"](V, prefix, context, { + ...opts, + enforceRange: true, + }), + required: true, + }, + { + key: "salt", + converter: webidl.converters["BufferSource"], + required: true, + }, +]; + +webidl.converters.Pbkdf2Params = webidl + .createDictionaryConverter("Pbkdf2Params", dictPbkdf2Params); + +const dictAesDerivedKeyParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "length", + converter: (V, prefix, context, opts) => + webidl.converters["unsigned long"](V, prefix, context, { + ...opts, + enforceRange: true, + }), + required: true, + }, +]; + +const dictAesCbcParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "iv", + converter: webidl.converters["BufferSource"], + required: true, + }, +]; + +const dictAesGcmParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "iv", + converter: webidl.converters["BufferSource"], + required: true, + }, + { + key: "tagLength", + converter: (V, prefix, context, opts) => + webidl.converters["unsigned long"](V, prefix, context, { + ...opts, + enforceRange: true, + }), + }, + { + key: "additionalData", + converter: webidl.converters["BufferSource"], + }, +]; + +const dictAesCtrParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "counter", + converter: webidl.converters["BufferSource"], + required: true, + }, + { + key: "length", + converter: (V, prefix, context, opts) => + webidl.converters["unsigned short"](V, prefix, context, { + ...opts, + enforceRange: true, + }), + required: true, + }, +]; + +webidl.converters.AesDerivedKeyParams = webidl + .createDictionaryConverter("AesDerivedKeyParams", dictAesDerivedKeyParams); + +webidl.converters.AesCbcParams = webidl + .createDictionaryConverter("AesCbcParams", dictAesCbcParams); + +webidl.converters.AesGcmParams = webidl + .createDictionaryConverter("AesGcmParams", dictAesGcmParams); + +webidl.converters.AesCtrParams = webidl + .createDictionaryConverter("AesCtrParams", dictAesCtrParams); + +webidl.converters.CryptoKey = webidl.createInterfaceConverter( + "CryptoKey", + CryptoKey.prototype, +); + +const dictCryptoKeyPair = [ + { + key: "publicKey", + converter: webidl.converters.CryptoKey, + }, + { + key: "privateKey", + converter: webidl.converters.CryptoKey, + }, +]; + +webidl.converters.CryptoKeyPair = webidl + .createDictionaryConverter("CryptoKeyPair", dictCryptoKeyPair); + +const dictEcdhKeyDeriveParams = [ + ...new SafeArrayIterator(dictAlgorithm), + { + key: "public", + converter: webidl.converters.CryptoKey, + required: true, + }, +]; + +webidl.converters.EcdhKeyDeriveParams = webidl + .createDictionaryConverter("EcdhKeyDeriveParams", dictEcdhKeyDeriveParams); + export { Crypto, crypto, CryptoKey, SubtleCrypto }; diff --git a/ext/crypto/01_webidl.js b/ext/crypto/01_webidl.js deleted file mode 100644 index cd0655b3b8..0000000000 --- a/ext/crypto/01_webidl.js +++ /dev/null @@ -1,516 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. - -// @ts-check -/// -/// - -const primordials = globalThis.__bootstrap.primordials; -import * as webidl from "ext:deno_webidl/00_webidl.js"; -import { CryptoKey } from "ext:deno_crypto/00_crypto.js"; -const { - ArrayBufferIsView, - ArrayBufferPrototype, - ObjectPrototypeIsPrototypeOf, - SafeArrayIterator, -} = primordials; - -webidl.converters.AlgorithmIdentifier = (V, prefix, context, opts) => { - // Union for (object or DOMString) - if (webidl.type(V) == "Object") { - return webidl.converters.object(V, prefix, context, opts); - } - return webidl.converters.DOMString(V, prefix, context, opts); -}; - -webidl.converters["BufferSource or JsonWebKey"] = ( - V, - prefix, - context, - opts, -) => { - // Union for (BufferSource or JsonWebKey) - if ( - ArrayBufferIsView(V) || - ObjectPrototypeIsPrototypeOf(ArrayBufferPrototype, V) - ) { - return webidl.converters.BufferSource(V, prefix, context, opts); - } - return webidl.converters.JsonWebKey(V, prefix, context, opts); -}; - -webidl.converters.KeyType = webidl.createEnumConverter("KeyType", [ - "public", - "private", - "secret", -]); - -webidl.converters.KeyFormat = webidl.createEnumConverter("KeyFormat", [ - "raw", - "pkcs8", - "spki", - "jwk", -]); - -webidl.converters.KeyUsage = webidl.createEnumConverter("KeyUsage", [ - "encrypt", - "decrypt", - "sign", - "verify", - "deriveKey", - "deriveBits", - "wrapKey", - "unwrapKey", -]); - -webidl.converters["sequence"] = webidl.createSequenceConverter( - webidl.converters.KeyUsage, -); - -webidl.converters.HashAlgorithmIdentifier = - webidl.converters.AlgorithmIdentifier; - -/** @type {webidl.Dictionary} */ -const dictAlgorithm = [{ - key: "name", - converter: webidl.converters.DOMString, - required: true, -}]; - -webidl.converters.Algorithm = webidl - .createDictionaryConverter("Algorithm", dictAlgorithm); - -webidl.converters.BigInteger = webidl.converters.Uint8Array; - -/** @type {webidl.Dictionary} */ -const dictRsaKeyGenParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "modulusLength", - converter: (V, prefix, context, opts) => - webidl.converters["unsigned long"](V, prefix, context, { - ...opts, - enforceRange: true, - }), - required: true, - }, - { - key: "publicExponent", - converter: webidl.converters.BigInteger, - required: true, - }, -]; - -webidl.converters.RsaKeyGenParams = webidl - .createDictionaryConverter("RsaKeyGenParams", dictRsaKeyGenParams); - -const dictRsaHashedKeyGenParams = [ - ...new SafeArrayIterator(dictRsaKeyGenParams), - { - key: "hash", - converter: webidl.converters.HashAlgorithmIdentifier, - required: true, - }, -]; - -webidl.converters.RsaHashedKeyGenParams = webidl.createDictionaryConverter( - "RsaHashedKeyGenParams", - dictRsaHashedKeyGenParams, -); - -const dictRsaHashedImportParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "hash", - converter: webidl.converters.HashAlgorithmIdentifier, - required: true, - }, -]; - -webidl.converters.RsaHashedImportParams = webidl.createDictionaryConverter( - "RsaHashedImportParams", - dictRsaHashedImportParams, -); - -webidl.converters.NamedCurve = webidl.converters.DOMString; - -const dictEcKeyImportParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "namedCurve", - converter: webidl.converters.NamedCurve, - required: true, - }, -]; - -webidl.converters.EcKeyImportParams = webidl.createDictionaryConverter( - "EcKeyImportParams", - dictEcKeyImportParams, -); - -const dictEcKeyGenParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "namedCurve", - converter: webidl.converters.NamedCurve, - required: true, - }, -]; - -webidl.converters.EcKeyGenParams = webidl - .createDictionaryConverter("EcKeyGenParams", dictEcKeyGenParams); - -const dictAesKeyGenParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "length", - converter: (V, prefix, context, opts) => - webidl.converters["unsigned short"](V, prefix, context, { - ...opts, - enforceRange: true, - }), - required: true, - }, -]; - -webidl.converters.AesKeyGenParams = webidl - .createDictionaryConverter("AesKeyGenParams", dictAesKeyGenParams); - -const dictHmacKeyGenParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "hash", - converter: webidl.converters.HashAlgorithmIdentifier, - required: true, - }, - { - key: "length", - converter: (V, prefix, context, opts) => - webidl.converters["unsigned long"](V, prefix, context, { - ...opts, - enforceRange: true, - }), - }, -]; - -webidl.converters.HmacKeyGenParams = webidl - .createDictionaryConverter("HmacKeyGenParams", dictHmacKeyGenParams); - -const dictRsaPssParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "saltLength", - converter: (V, prefix, context, opts) => - webidl.converters["unsigned long"](V, prefix, context, { - ...opts, - enforceRange: true, - }), - required: true, - }, -]; - -webidl.converters.RsaPssParams = webidl - .createDictionaryConverter("RsaPssParams", dictRsaPssParams); - -const dictRsaOaepParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "label", - converter: webidl.converters["BufferSource"], - }, -]; - -webidl.converters.RsaOaepParams = webidl - .createDictionaryConverter("RsaOaepParams", dictRsaOaepParams); - -const dictEcdsaParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "hash", - converter: webidl.converters.HashAlgorithmIdentifier, - required: true, - }, -]; - -webidl.converters["EcdsaParams"] = webidl - .createDictionaryConverter("EcdsaParams", dictEcdsaParams); - -const dictHmacImportParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "hash", - converter: webidl.converters.HashAlgorithmIdentifier, - required: true, - }, - { - key: "length", - converter: (V, prefix, context, opts) => - webidl.converters["unsigned long"](V, prefix, context, { - ...opts, - enforceRange: true, - }), - }, -]; - -webidl.converters.HmacImportParams = webidl - .createDictionaryConverter("HmacImportParams", dictHmacImportParams); - -const dictRsaOtherPrimesInfo = [ - { - key: "r", - converter: webidl.converters["DOMString"], - }, - { - key: "d", - converter: webidl.converters["DOMString"], - }, - { - key: "t", - converter: webidl.converters["DOMString"], - }, -]; - -webidl.converters.RsaOtherPrimesInfo = webidl.createDictionaryConverter( - "RsaOtherPrimesInfo", - dictRsaOtherPrimesInfo, -); -webidl.converters["sequence"] = webidl - .createSequenceConverter( - webidl.converters.RsaOtherPrimesInfo, - ); - -const dictJsonWebKey = [ - // Sections 4.2 and 4.3 of RFC7517. - // https://datatracker.ietf.org/doc/html/rfc7517#section-4 - { - key: "kty", - converter: webidl.converters["DOMString"], - }, - { - key: "use", - converter: webidl.converters["DOMString"], - }, - { - key: "key_ops", - converter: webidl.converters["sequence"], - }, - { - key: "alg", - converter: webidl.converters["DOMString"], - }, - // JSON Web Key Parameters Registration - { - key: "ext", - converter: webidl.converters["boolean"], - }, - // Section 6 of RFC7518 JSON Web Algorithms - // https://datatracker.ietf.org/doc/html/rfc7518#section-6 - { - key: "crv", - converter: webidl.converters["DOMString"], - }, - { - key: "x", - converter: webidl.converters["DOMString"], - }, - { - key: "y", - converter: webidl.converters["DOMString"], - }, - { - key: "d", - converter: webidl.converters["DOMString"], - }, - { - key: "n", - converter: webidl.converters["DOMString"], - }, - { - key: "e", - converter: webidl.converters["DOMString"], - }, - { - key: "p", - converter: webidl.converters["DOMString"], - }, - { - key: "q", - converter: webidl.converters["DOMString"], - }, - { - key: "dp", - converter: webidl.converters["DOMString"], - }, - { - key: "dq", - converter: webidl.converters["DOMString"], - }, - { - key: "qi", - converter: webidl.converters["DOMString"], - }, - { - key: "oth", - converter: webidl.converters["sequence"], - }, - { - key: "k", - converter: webidl.converters["DOMString"], - }, -]; - -webidl.converters.JsonWebKey = webidl.createDictionaryConverter( - "JsonWebKey", - dictJsonWebKey, -); - -const dictHkdfParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "hash", - converter: webidl.converters.HashAlgorithmIdentifier, - required: true, - }, - { - key: "salt", - converter: webidl.converters["BufferSource"], - required: true, - }, - { - key: "info", - converter: webidl.converters["BufferSource"], - required: true, - }, -]; - -webidl.converters.HkdfParams = webidl - .createDictionaryConverter("HkdfParams", dictHkdfParams); - -const dictPbkdf2Params = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "hash", - converter: webidl.converters.HashAlgorithmIdentifier, - required: true, - }, - { - key: "iterations", - converter: (V, prefix, context, opts) => - webidl.converters["unsigned long"](V, prefix, context, { - ...opts, - enforceRange: true, - }), - required: true, - }, - { - key: "salt", - converter: webidl.converters["BufferSource"], - required: true, - }, -]; - -webidl.converters.Pbkdf2Params = webidl - .createDictionaryConverter("Pbkdf2Params", dictPbkdf2Params); - -const dictAesDerivedKeyParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "length", - converter: (V, prefix, context, opts) => - webidl.converters["unsigned long"](V, prefix, context, { - ...opts, - enforceRange: true, - }), - required: true, - }, -]; - -const dictAesCbcParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "iv", - converter: webidl.converters["BufferSource"], - required: true, - }, -]; - -const dictAesGcmParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "iv", - converter: webidl.converters["BufferSource"], - required: true, - }, - { - key: "tagLength", - converter: (V, prefix, context, opts) => - webidl.converters["unsigned long"](V, prefix, context, { - ...opts, - enforceRange: true, - }), - }, - { - key: "additionalData", - converter: webidl.converters["BufferSource"], - }, -]; - -const dictAesCtrParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "counter", - converter: webidl.converters["BufferSource"], - required: true, - }, - { - key: "length", - converter: (V, prefix, context, opts) => - webidl.converters["unsigned short"](V, prefix, context, { - ...opts, - enforceRange: true, - }), - required: true, - }, -]; - -webidl.converters.AesDerivedKeyParams = webidl - .createDictionaryConverter("AesDerivedKeyParams", dictAesDerivedKeyParams); - -webidl.converters.AesCbcParams = webidl - .createDictionaryConverter("AesCbcParams", dictAesCbcParams); - -webidl.converters.AesGcmParams = webidl - .createDictionaryConverter("AesGcmParams", dictAesGcmParams); - -webidl.converters.AesCtrParams = webidl - .createDictionaryConverter("AesCtrParams", dictAesCtrParams); - -webidl.converters.CryptoKey = webidl.createInterfaceConverter( - "CryptoKey", - CryptoKey.prototype, -); - -const dictCryptoKeyPair = [ - { - key: "publicKey", - converter: webidl.converters.CryptoKey, - }, - { - key: "privateKey", - converter: webidl.converters.CryptoKey, - }, -]; - -webidl.converters.CryptoKeyPair = webidl - .createDictionaryConverter("CryptoKeyPair", dictCryptoKeyPair); - -const dictEcdhKeyDeriveParams = [ - ...new SafeArrayIterator(dictAlgorithm), - { - key: "public", - converter: webidl.converters.CryptoKey, - required: true, - }, -]; - -webidl.converters.EcdhKeyDeriveParams = webidl - .createDictionaryConverter("EcdhKeyDeriveParams", dictEcdhKeyDeriveParams); diff --git a/ext/crypto/lib.rs b/ext/crypto/lib.rs index 695cc3abdf..f481f97f6b 100644 --- a/ext/crypto/lib.rs +++ b/ext/crypto/lib.rs @@ -103,7 +103,7 @@ deno_core::extension!(deno_crypto, x25519::op_crypto_export_spki_x25519, x25519::op_crypto_export_pkcs8_x25519, ], - esm = [ "00_crypto.js", "01_webidl.js" ], + esm = [ "00_crypto.js" ], options = { maybe_seed: Option, }, diff --git a/ext/web/09_file.js b/ext/web/09_file.js index a81176b385..d65a512f93 100644 --- a/ext/web/09_file.js +++ b/ext/web/09_file.js @@ -14,6 +14,7 @@ const core = globalThis.Deno.core; const ops = core.ops; import * as webidl from "ext:deno_webidl/00_webidl.js"; import { ReadableStream } from "ext:deno_web/06_streams.js"; +import { URL } from "ext:deno_url/00_url.js"; const primordials = globalThis.__bootstrap.primordials; const { ArrayBufferPrototype, @@ -653,6 +654,33 @@ function blobFromObjectUrl(url) { return blob; } +/** + * @param {Blob} blob + * @returns {string} + */ +function createObjectURL(blob) { + const prefix = "Failed to execute 'createObjectURL' on 'URL'"; + webidl.requiredArguments(arguments.length, 1, prefix); + blob = webidl.converters["Blob"](blob, prefix, "Argument 1"); + + return ops.op_blob_create_object_url(blob.type, getParts(blob)); +} + +/** + * @param {string} url + * @returns {void} + */ +function revokeObjectURL(url) { + const prefix = "Failed to execute 'revokeObjectURL' on 'URL'"; + webidl.requiredArguments(arguments.length, 1, prefix); + url = webidl.converters["DOMString"](url, prefix, "Argument 1"); + + ops.op_blob_revoke_object_url(url); +} + +URL.createObjectURL = createObjectURL; +URL.revokeObjectURL = revokeObjectURL; + export { Blob, blobFromObjectUrl, diff --git a/ext/web/11_blob_url.js b/ext/web/11_blob_url.js deleted file mode 100644 index 3ac240d90a..0000000000 --- a/ext/web/11_blob_url.js +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. - -// @ts-check -/// -/// -/// -/// -/// -/// -/// -/// -/// - -const core = globalThis.Deno.core; -const ops = core.ops; -import * as webidl from "ext:deno_webidl/00_webidl.js"; -import { getParts } from "ext:deno_web/09_file.js"; -import { URL } from "ext:deno_url/00_url.js"; - -/** - * @param {Blob} blob - * @returns {string} - */ -function createObjectURL(blob) { - const prefix = "Failed to execute 'createObjectURL' on 'URL'"; - webidl.requiredArguments(arguments.length, 1, prefix); - blob = webidl.converters["Blob"](blob, prefix, "Argument 1"); - - return ops.op_blob_create_object_url(blob.type, getParts(blob)); -} - -/** - * @param {string} url - * @returns {void} - */ -function revokeObjectURL(url) { - const prefix = "Failed to execute 'revokeObjectURL' on 'URL'"; - webidl.requiredArguments(arguments.length, 1, prefix); - url = webidl.converters["DOMString"](url, prefix, "Argument 1"); - - ops.op_blob_revoke_object_url(url); -} - -URL.createObjectURL = createObjectURL; -URL.revokeObjectURL = revokeObjectURL; diff --git a/ext/web/lib.rs b/ext/web/lib.rs index b0dc0d56d5..3f4468f1f1 100644 --- a/ext/web/lib.rs +++ b/ext/web/lib.rs @@ -103,7 +103,6 @@ deno_core::extension!(deno_web, "08_text_encoding.js", "09_file.js", "10_filereader.js", - "11_blob_url.js", "12_location.js", "13_message_port.js", "14_compression.js", diff --git a/runtime/build.rs b/runtime/build.rs index 4f49ba6816..18aaf7a7f0 100644 --- a/runtime/build.rs +++ b/runtime/build.rs @@ -267,6 +267,17 @@ mod startup_snapshot { include_str!("js/99_main.js"), ), }]); + ext.esm_entry_point("ext:runtime_main/js/99_main.js"); + } + ); + + #[cfg(feature = "snapshot_from_snapshot")] + deno_core::extension!( + runtime_main, + deps = [runtime], + customizer = |ext: &mut deno_core::ExtensionBuilder| { + eprintln!("I am here!!!"); + ext.esm_entry_point("ext:runtime/90_deno_ns.js"); } ); @@ -315,7 +326,6 @@ mod startup_snapshot { // FIXME(bartlomieju): these extensions are specified last, because they // depend on `runtime`, even though it should be other way around deno_node::deno_node::init_ops_and_esm::(None, fs), - #[cfg(not(feature = "snapshot_from_snapshot"))] runtime_main::init_ops_and_esm(), ]; From 3e1cc5dbf59618f22218ba7698922adbb26f16ec Mon Sep 17 00:00:00 2001 From: Satya Rohith Date: Tue, 9 May 2023 20:13:29 +0530 Subject: [PATCH 138/320] chore: upgrade tokio to 1.28 (#19053) --- Cargo.lock | 109 ++++++++++++++++++++++++++++++++++++++++++----------- Cargo.toml | 2 +- 2 files changed, 88 insertions(+), 23 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1d51220e03..01cadaa171 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5199,14 +5199,13 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.25.0" +version = "1.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8e00990ebabbe4c14c08aca901caed183ecd5c09562a12c824bb53d3c3fd3af" +checksum = "c3c786bf8134e5a3a166db9b29ab8f48134739014a3eca7bc6bfa95d673b136f" dependencies = [ "autocfg", "bytes", "libc", - "memchr", "mio", "num_cpus", "parking_lot 0.12.1", @@ -5214,18 +5213,18 @@ dependencies = [ "signal-hook-registry", "socket2", "tokio-macros", - "windows-sys 0.42.0", + "windows-sys 0.48.0", ] [[package]] name = "tokio-macros" -version = "1.8.2" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d266c00fde287f55d3f1c3e96c500c362a2b8c695076ec180f27918820bc6df8" +checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" dependencies = [ "proc-macro2 1.0.56", "quote 1.0.26", - "syn 1.0.109", + "syn 2.0.13", ] [[package]] @@ -5947,13 +5946,13 @@ version = "0.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", ] [[package]] @@ -5962,7 +5961,16 @@ version = "0.45.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" dependencies = [ - "windows-targets", + "windows-targets 0.42.2", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.0", ] [[package]] @@ -5971,13 +5979,28 @@ version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + +[[package]] +name = "windows-targets" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5" +dependencies = [ + "windows_aarch64_gnullvm 0.48.0", + "windows_aarch64_msvc 0.48.0", + "windows_i686_gnu 0.48.0", + "windows_i686_msvc 0.48.0", + "windows_x86_64_gnu 0.48.0", + "windows_x86_64_gnullvm 0.48.0", + "windows_x86_64_msvc 0.48.0", ] [[package]] @@ -5986,42 +6009,84 @@ version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" + [[package]] name = "windows_aarch64_msvc" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" + [[package]] name = "windows_i686_gnu" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" +[[package]] +name = "windows_i686_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" + [[package]] name = "windows_i686_msvc" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" +[[package]] +name = "windows_i686_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" + [[package]] name = "windows_x86_64_gnu" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" + [[package]] name = "windows_x86_64_gnullvm" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" + [[package]] name = "windows_x86_64_msvc" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" + [[package]] name = "winnow" version = "0.3.6" diff --git a/Cargo.toml b/Cargo.toml index 4ffac7e793..f265d9c674 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -133,7 +133,7 @@ socket2 = "0.4.7" tar = "=0.4.38" tempfile = "3.4.0" thiserror = "=1.0.38" -tokio = { version = "1.25.0", features = ["full"] } +tokio = { version = "1.28.0", features = ["full"] } tikv-jemallocator = "0.5.0" tikv-jemalloc-sys = "0.5.3" tokio-rustls = "0.23.3" From cb63db459cf0099e2975700cd0f66387b4ecb509 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Tue, 9 May 2023 22:53:37 +0200 Subject: [PATCH 139/320] bench: fix benchmarks with extensions (#19059) They broke in f34fcd16ea4d504c8a87c0873c65598d70bb1d07 --- ext/url/benches/url_ops.rs | 1 + ext/web/benches/encoding.rs | 1 + ext/web/benches/timers_ops.rs | 1 + ext/webidl/benches/dict.rs | 1 + 4 files changed, 4 insertions(+) diff --git a/ext/url/benches/url_ops.rs b/ext/url/benches/url_ops.rs index 2e56665521..835dfea2ec 100644 --- a/ext/url/benches/url_ops.rs +++ b/ext/url/benches/url_ops.rs @@ -22,6 +22,7 @@ fn setup() -> Vec { "#, ), }]) + .esm_entry_point("ext:bench_setup/setup") .build(), ] } diff --git a/ext/web/benches/encoding.rs b/ext/web/benches/encoding.rs index 74dd430fc0..5b147f00c8 100644 --- a/ext/web/benches/encoding.rs +++ b/ext/web/benches/encoding.rs @@ -45,6 +45,7 @@ fn setup() -> Vec { .state(|state| { state.put(Permissions {}); }) + .esm_entry_point("ext:bench_setup/setup") .build(), ] } diff --git a/ext/web/benches/timers_ops.rs b/ext/web/benches/timers_ops.rs index 62adaf3e37..084fac98ba 100644 --- a/ext/web/benches/timers_ops.rs +++ b/ext/web/benches/timers_ops.rs @@ -40,6 +40,7 @@ fn setup() -> Vec { .state(|state| { state.put(Permissions{}); }) + .esm_entry_point("ext:bench_setup/setup") .build() ] } diff --git a/ext/webidl/benches/dict.rs b/ext/webidl/benches/dict.rs index d08adbb6a8..b3d95c8a32 100644 --- a/ext/webidl/benches/dict.rs +++ b/ext/webidl/benches/dict.rs @@ -19,6 +19,7 @@ fn setup() -> Vec { "dict.js" )), }]) + .esm_entry_point("ext:deno_webidl_bench/setup.js") .build(), ] } From 50618fc0bc09ba343a9b143e60f4a9bab1ab2522 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Tue, 9 May 2023 17:49:16 -0400 Subject: [PATCH 140/320] fix(vendor): better handling of redirects (#19063) Closes #17582 Closes #19057 --- cli/tools/vendor/build.rs | 48 ++++++++++++++++++++++++++++++++++ cli/tools/vendor/import_map.rs | 19 +++++++------- 2 files changed, 58 insertions(+), 9 deletions(-) diff --git a/cli/tools/vendor/build.rs b/cli/tools/vendor/build.rs index f9df8f0786..11a1fb50e6 100644 --- a/cli/tools/vendor/build.rs +++ b/cli/tools/vendor/build.rs @@ -378,6 +378,54 @@ mod test { ); } + #[tokio::test] + async fn remote_redirect_entrypoint() { + let mut builder = VendorTestBuilder::with_default_setup(); + let output = builder + .with_loader(|loader| { + loader + .add( + "/mod.ts", + concat!( + "import * as test from 'https://x.nest.land/Yenv@1.0.0/mod.ts';\n", + "console.log(test)", + ), + ) + .add_redirect("https://x.nest.land/Yenv@1.0.0/mod.ts", "https://arweave.net/VFtWNW3QZ-7__v7c7kck22eFI24OuK1DFzyQHKoZ9AE/mod.ts") + .add( + "https://arweave.net/VFtWNW3QZ-7__v7c7kck22eFI24OuK1DFzyQHKoZ9AE/mod.ts", + "export * from './src/mod.ts'", + ) + .add( + "https://arweave.net/VFtWNW3QZ-7__v7c7kck22eFI24OuK1DFzyQHKoZ9AE/src/mod.ts", + "export class Test {}", + ); + }) + .build() + .await + .unwrap(); + + assert_eq!( + output.import_map, + Some(json!({ + "imports": { + "https://x.nest.land/Yenv@1.0.0/mod.ts": "./arweave.net/VFtWNW3QZ-7__v7c7kck22eFI24OuK1DFzyQHKoZ9AE/mod.ts", + "https://arweave.net/": "./arweave.net/" + }, + })) + ); + assert_eq!( + output.files, + to_file_vec(&[ + ("/vendor/arweave.net/VFtWNW3QZ-7__v7c7kck22eFI24OuK1DFzyQHKoZ9AE/mod.ts", "export * from './src/mod.ts'"), + ( + "/vendor/arweave.net/VFtWNW3QZ-7__v7c7kck22eFI24OuK1DFzyQHKoZ9AE/src/mod.ts", + "export class Test {}", + ), + ]), + ); + } + #[tokio::test] async fn same_target_filename_specifiers() { let mut builder = VendorTestBuilder::with_default_setup(); diff --git a/cli/tools/vendor/import_map.rs b/cli/tools/vendor/import_map.rs index 916eb55c58..562ae0216d 100644 --- a/cli/tools/vendor/import_map.rs +++ b/cli/tools/vendor/import_map.rs @@ -326,15 +326,16 @@ fn handle_remote_dep_specifier( ) { if is_remote_specifier_text(text) { let base_specifier = mappings.base_specifier(specifier); - if !text.starts_with(base_specifier.as_str()) { - panic!("Expected {text} to start with {base_specifier}"); - } - - let sub_path = &text[base_specifier.as_str().len()..]; - let relative_text = - mappings.relative_specifier_text(base_specifier, specifier); - let expected_sub_path = relative_text.trim_start_matches("./"); - if expected_sub_path != sub_path { + if text.starts_with(base_specifier.as_str()) { + let sub_path = &text[base_specifier.as_str().len()..]; + let relative_text = + mappings.relative_specifier_text(base_specifier, specifier); + let expected_sub_path = relative_text.trim_start_matches("./"); + if expected_sub_path != sub_path { + import_map.imports.add(text.to_string(), specifier); + } + } else { + // it's probably a redirect. Add it explicitly to the import map import_map.imports.add(text.to_string(), specifier); } } else { From 3dc745c881c43b9df4aa895291b9e13186be3f17 Mon Sep 17 00:00:00 2001 From: Levente Kurusa Date: Wed, 10 May 2023 12:30:02 +0200 Subject: [PATCH 141/320] chore(node/stream): unbundle/unminify readable-streams (#19045) --- ext/node/polyfills/_stream.mjs | 5736 +++++++++++++++++++++++- ext/node/polyfills/internal/errors.ts | 4 +- ext/node/polyfills/stream/promises.mjs | 4 +- 3 files changed, 5723 insertions(+), 21 deletions(-) diff --git a/ext/node/polyfills/_stream.mjs b/ext/node/polyfills/_stream.mjs index 3fec7f7767..2e2fcce8c3 100644 --- a/ext/node/polyfills/_stream.mjs +++ b/ext/node/polyfills/_stream.mjs @@ -5,10 +5,5707 @@ import { nextTick } from "ext:deno_node/_next_tick.ts"; import { AbortController } from "ext:deno_web/03_abort_signal.js"; import { Blob } from "ext:deno_web/09_file.js"; +import { StringDecoder } from "ext:deno_node/string_decoder.ts"; +import { + createDeferredPromise, + kEmptyObject, + normalizeEncoding, + once, + promisify, +} from "ext:deno_node/internal/util.mjs"; +import { + isArrayBufferView, + isAsyncFunction, +} from "ext:deno_node/internal/util/types.ts"; +import { debuglog } from "ext:deno_node/internal/util/debuglog.ts"; +import { inspect } from "ext:deno_node/internal/util/inspect.mjs"; + +import { + AbortError, + aggregateTwoErrors, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_RETURN_VALUE, + ERR_METHOD_NOT_IMPLEMENTED, + ERR_MISSING_ARGS, + ERR_MULTIPLE_CALLBACK, + ERR_OUT_OF_RANGE, + ERR_SOCKET_BAD_PORT, + ERR_STREAM_ALREADY_FINISHED, + ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED, + ERR_STREAM_NULL_VALUES, + ERR_STREAM_PREMATURE_CLOSE, + ERR_STREAM_PUSH_AFTER_EOF, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT, + ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING, + ERR_UNKNOWN_SIGNAL, + hideStackFrames, +} from "ext:deno_node/internal/errors.ts"; /* esm.sh - esbuild bundle(readable-stream@4.2.0) es2022 production */ -const __process$ = { nextTick };import __buffer$ from "ext:deno_node/buffer.ts";import __string_decoder$ from "ext:deno_node/string_decoder.ts";import __events$ from "ext:deno_node/events.ts";var pi=Object.create;var Bt=Object.defineProperty;var wi=Object.getOwnPropertyDescriptor;var yi=Object.getOwnPropertyNames;var gi=Object.getPrototypeOf,Si=Object.prototype.hasOwnProperty;var E=(e=>typeof require<"u"?require:typeof Proxy<"u"?new Proxy(e,{get:(t,n)=>(typeof require<"u"?require:t)[n]}):e)(function(e){if(typeof require<"u")return require.apply(this,arguments);throw new Error('Dynamic require of "'+e+'" is not supported')});var g=(e,t)=>()=>(t||e((t={exports:{}}).exports,t),t.exports);var Ei=(e,t,n,r)=>{if(t&&typeof t=="object"||typeof t=="function")for(let i of yi(t))!Si.call(e,i)&&i!==n&&Bt(e,i,{get:()=>t[i],enumerable:!(r=wi(t,i))||r.enumerable});return e};var Ri=(e,t,n)=>(n=e!=null?pi(gi(e)):{},Ei(t||!e||!e.__esModule?Bt(n,"default",{value:e,enumerable:!0}):n,e));var m=g((Yf,Gt)=>{"use strict";Gt.exports={ArrayIsArray(e){return Array.isArray(e)},ArrayPrototypeIncludes(e,t){return e.includes(t)},ArrayPrototypeIndexOf(e,t){return e.indexOf(t)},ArrayPrototypeJoin(e,t){return e.join(t)},ArrayPrototypeMap(e,t){return e.map(t)},ArrayPrototypePop(e,t){return e.pop(t)},ArrayPrototypePush(e,t){return e.push(t)},ArrayPrototypeSlice(e,t,n){return e.slice(t,n)},Error,FunctionPrototypeCall(e,t,...n){return e.call(t,...n)},FunctionPrototypeSymbolHasInstance(e,t){return Function.prototype[Symbol.hasInstance].call(e,t)},MathFloor:Math.floor,Number,NumberIsInteger:Number.isInteger,NumberIsNaN:Number.isNaN,NumberMAX_SAFE_INTEGER:Number.MAX_SAFE_INTEGER,NumberMIN_SAFE_INTEGER:Number.MIN_SAFE_INTEGER,NumberParseInt:Number.parseInt,ObjectDefineProperties(e,t){return Object.defineProperties(e,t)},ObjectDefineProperty(e,t,n){return Object.defineProperty(e,t,n)},ObjectGetOwnPropertyDescriptor(e,t){return Object.getOwnPropertyDescriptor(e,t)},ObjectKeys(e){return Object.keys(e)},ObjectSetPrototypeOf(e,t){return Object.setPrototypeOf(e,t)},Promise,PromisePrototypeCatch(e,t){return e.catch(t)},PromisePrototypeThen(e,t,n){return e.then(t,n)},PromiseReject(e){return Promise.reject(e)},ReflectApply:Reflect.apply,RegExpPrototypeTest(e,t){return e.test(t)},SafeSet:Set,String,StringPrototypeSlice(e,t,n){return e.slice(t,n)},StringPrototypeToLowerCase(e){return e.toLowerCase()},StringPrototypeToUpperCase(e){return e.toUpperCase()},StringPrototypeTrim(e){return e.trim()},Symbol,SymbolAsyncIterator:Symbol.asyncIterator,SymbolHasInstance:Symbol.hasInstance,SymbolIterator:Symbol.iterator,TypedArrayPrototypeSet(e,t,n){return e.set(t,n)},Uint8Array}});var j=g((Kf,Je)=>{"use strict";var Ai=__buffer$,mi=Object.getPrototypeOf(async function(){}).constructor,Ht=Blob||Ai.Blob,Ti=typeof Ht<"u"?function(t){return t instanceof Ht}:function(t){return!1},Xe=class extends Error{constructor(t){if(!Array.isArray(t))throw new TypeError(`Expected input to be an Array, got ${typeof t}`);let n="";for(let r=0;r{e=r,t=i}),resolve:e,reject:t}},promisify(e){return new Promise((t,n)=>{e((r,...i)=>r?n(r):t(...i))})},debuglog(){return function(){}},format(e,...t){return e.replace(/%([sdifj])/g,function(...[n,r]){let i=t.shift();return r==="f"?i.toFixed(6):r==="j"?JSON.stringify(i):r==="s"&&typeof i=="object"?`${i.constructor!==Object?i.constructor.name:""} {}`.trim():i.toString()})},inspect(e){switch(typeof e){case"string":if(e.includes("'"))if(e.includes('"')){if(!e.includes("`")&&!e.includes("${"))return`\`${e}\``}else return`"${e}"`;return`'${e}'`;case"number":return isNaN(e)?"NaN":Object.is(e,-0)?String(e):e;case"bigint":return`${String(e)}n`;case"boolean":case"undefined":return String(e);case"object":return"{}"}},types:{isAsyncFunction(e){return e instanceof mi},isArrayBufferView(e){return ArrayBuffer.isView(e)}},isBlob:Ti};Je.exports.promisify.custom=Symbol.for("nodejs.util.promisify.custom")});var O=g((zf,Kt)=>{"use strict";var{format:Ii,inspect:Re,AggregateError:Mi}=j(),Ni=globalThis.AggregateError||Mi,Di=Symbol("kIsNodeError"),Oi=["string","function","number","object","Function","Object","boolean","bigint","symbol"],qi=/^([A-Z][a-z0-9]*)+$/,xi="__node_internal_",Ae={};function X(e,t){if(!e)throw new Ae.ERR_INTERNAL_ASSERTION(t)}function Vt(e){let t="",n=e.length,r=e[0]==="-"?1:0;for(;n>=r+4;n-=3)t=`_${e.slice(n-3,n)}${t}`;return`${e.slice(0,n)}${t}`}function Li(e,t,n){if(typeof t=="function")return X(t.length<=n.length,`Code: ${e}; The provided arguments length (${n.length}) does not match the required ones (${t.length}).`),t(...n);let r=(t.match(/%[dfijoOs]/g)||[]).length;return X(r===n.length,`Code: ${e}; The provided arguments length (${n.length}) does not match the required ones (${r}).`),n.length===0?t:Ii(t,...n)}function N(e,t,n){n||(n=Error);class r extends n{constructor(...o){super(Li(e,t,o))}toString(){return`${this.name} [${e}]: ${this.message}`}}Object.defineProperties(r.prototype,{name:{value:n.name,writable:!0,enumerable:!1,configurable:!0},toString:{value(){return`${this.name} [${e}]: ${this.message}`},writable:!0,enumerable:!1,configurable:!0}}),r.prototype.code=e,r.prototype[Di]=!0,Ae[e]=r}function Yt(e){let t=xi+e.name;return Object.defineProperty(e,"name",{value:t}),e}function Pi(e,t){if(e&&t&&e!==t){if(Array.isArray(t.errors))return t.errors.push(e),t;let n=new Ni([t,e],t.message);return n.code=t.code,n}return e||t}var Qe=class extends Error{constructor(t="The operation was aborted",n=void 0){if(n!==void 0&&typeof n!="object")throw new Ae.ERR_INVALID_ARG_TYPE("options","Object",n);super(t,n),this.code="ABORT_ERR",this.name="AbortError"}};N("ERR_ASSERTION","%s",Error);N("ERR_INVALID_ARG_TYPE",(e,t,n)=>{X(typeof e=="string","'name' must be a string"),Array.isArray(t)||(t=[t]);let r="The ";e.endsWith(" argument")?r+=`${e} `:r+=`"${e}" ${e.includes(".")?"property":"argument"} `,r+="must be ";let i=[],o=[],l=[];for(let f of t)X(typeof f=="string","All expected entries have to be of type string"),Oi.includes(f)?i.push(f.toLowerCase()):qi.test(f)?o.push(f):(X(f!=="object",'The value "object" should be written as "Object"'),l.push(f));if(o.length>0){let f=i.indexOf("object");f!==-1&&(i.splice(i,f,1),o.push("Object"))}if(i.length>0){switch(i.length){case 1:r+=`of type ${i[0]}`;break;case 2:r+=`one of type ${i[0]} or ${i[1]}`;break;default:{let f=i.pop();r+=`one of type ${i.join(", ")}, or ${f}`}}(o.length>0||l.length>0)&&(r+=" or ")}if(o.length>0){switch(o.length){case 1:r+=`an instance of ${o[0]}`;break;case 2:r+=`an instance of ${o[0]} or ${o[1]}`;break;default:{let f=o.pop();r+=`an instance of ${o.join(", ")}, or ${f}`}}l.length>0&&(r+=" or ")}switch(l.length){case 0:break;case 1:l[0].toLowerCase()!==l[0]&&(r+="an "),r+=`${l[0]}`;break;case 2:r+=`one of ${l[0]} or ${l[1]}`;break;default:{let f=l.pop();r+=`one of ${l.join(", ")}, or ${f}`}}if(n==null)r+=`. Received ${n}`;else if(typeof n=="function"&&n.name)r+=`. Received function ${n.name}`;else if(typeof n=="object"){var u;(u=n.constructor)!==null&&u!==void 0&&u.name?r+=`. Received an instance of ${n.constructor.name}`:r+=`. Received ${Re(n,{depth:-1})}`}else{let f=Re(n,{colors:!1});f.length>25&&(f=`${f.slice(0,25)}...`),r+=`. Received type ${typeof n} (${f})`}return r},TypeError);N("ERR_INVALID_ARG_VALUE",(e,t,n="is invalid")=>{let r=Re(t);return r.length>128&&(r=r.slice(0,128)+"..."),`The ${e.includes(".")?"property":"argument"} '${e}' ${n}. Received ${r}`},TypeError);N("ERR_INVALID_RETURN_VALUE",(e,t,n)=>{var r;let i=n!=null&&(r=n.constructor)!==null&&r!==void 0&&r.name?`instance of ${n.constructor.name}`:`type ${typeof n}`;return`Expected ${e} to be returned from the "${t}" function but got ${i}.`},TypeError);N("ERR_MISSING_ARGS",(...e)=>{X(e.length>0,"At least one arg needs to be specified");let t,n=e.length;switch(e=(Array.isArray(e)?e:[e]).map(r=>`"${r}"`).join(" or "),n){case 1:t+=`The ${e[0]} argument`;break;case 2:t+=`The ${e[0]} and ${e[1]} arguments`;break;default:{let r=e.pop();t+=`The ${e.join(", ")}, and ${r} arguments`}break}return`${t} must be specified`},TypeError);N("ERR_OUT_OF_RANGE",(e,t,n)=>{X(t,'Missing "range" argument');let r;return Number.isInteger(n)&&Math.abs(n)>2**32?r=Vt(String(n)):typeof n=="bigint"?(r=String(n),(n>2n**32n||n<-(2n**32n))&&(r=Vt(r)),r+="n"):r=Re(n),`The value of "${e}" is out of range. It must be ${t}. Received ${r}`},RangeError);N("ERR_MULTIPLE_CALLBACK","Callback called multiple times",Error);N("ERR_METHOD_NOT_IMPLEMENTED","The %s method is not implemented",Error);N("ERR_STREAM_ALREADY_FINISHED","Cannot call %s after a stream was finished",Error);N("ERR_STREAM_CANNOT_PIPE","Cannot pipe, not readable",Error);N("ERR_STREAM_DESTROYED","Cannot call %s after a stream was destroyed",Error);N("ERR_STREAM_NULL_VALUES","May not write null values to stream",TypeError);N("ERR_STREAM_PREMATURE_CLOSE","Premature close",Error);N("ERR_STREAM_PUSH_AFTER_EOF","stream.push() after EOF",Error);N("ERR_STREAM_UNSHIFT_AFTER_END_EVENT","stream.unshift() after end event",Error);N("ERR_STREAM_WRITE_AFTER_END","write after end",Error);N("ERR_UNKNOWN_ENCODING","Unknown encoding: %s",TypeError);Kt.exports={AbortError:Qe,aggregateTwoErrors:Yt(Pi),hideStackFrames:Yt,codes:Ae}});var _e=g((Xf,nn)=>{"use strict";var{ArrayIsArray:Jt,ArrayPrototypeIncludes:Qt,ArrayPrototypeJoin:Zt,ArrayPrototypeMap:ki,NumberIsInteger:et,NumberIsNaN:Wi,NumberMAX_SAFE_INTEGER:Ci,NumberMIN_SAFE_INTEGER:ji,NumberParseInt:$i,ObjectPrototypeHasOwnProperty:vi,RegExpPrototypeExec:Fi,String:Ui,StringPrototypeToUpperCase:Bi,StringPrototypeTrim:Gi}=m(),{hideStackFrames:k,codes:{ERR_SOCKET_BAD_PORT:Hi,ERR_INVALID_ARG_TYPE:q,ERR_INVALID_ARG_VALUE:me,ERR_OUT_OF_RANGE:J,ERR_UNKNOWN_SIGNAL:zt}}=O(),{normalizeEncoding:Vi}=j(),{isAsyncFunction:Yi,isArrayBufferView:Ki}=j().types,Xt={};function zi(e){return e===(e|0)}function Xi(e){return e===e>>>0}var Ji=/^[0-7]+$/,Qi="must be a 32-bit unsigned integer or an octal string";function Zi(e,t,n){if(typeof e>"u"&&(e=n),typeof e=="string"){if(Fi(Ji,e)===null)throw new me(t,e,Qi);e=$i(e,8)}return en(e,t),e}var eo=k((e,t,n=ji,r=Ci)=>{if(typeof e!="number")throw new q(t,"number",e);if(!et(e))throw new J(t,"an integer",e);if(er)throw new J(t,`>= ${n} && <= ${r}`,e)}),to=k((e,t,n=-2147483648,r=2147483647)=>{if(typeof e!="number")throw new q(t,"number",e);if(!et(e))throw new J(t,"an integer",e);if(er)throw new J(t,`>= ${n} && <= ${r}`,e)}),en=k((e,t,n=!1)=>{if(typeof e!="number")throw new q(t,"number",e);if(!et(e))throw new J(t,"an integer",e);let r=n?1:0,i=4294967295;if(ei)throw new J(t,`>= ${r} && <= ${i}`,e)});function tn(e,t){if(typeof e!="string")throw new q(t,"string",e)}function no(e,t,n=void 0,r){if(typeof e!="number")throw new q(t,"number",e);if(n!=null&&er||(n!=null||r!=null)&&Wi(e))throw new J(t,`${n!=null?`>= ${n}`:""}${n!=null&&r!=null?" && ":""}${r!=null?`<= ${r}`:""}`,e)}var ro=k((e,t,n)=>{if(!Qt(n,e)){let r=Zt(ki(n,o=>typeof o=="string"?`'${o}'`:Ui(o)),", "),i="must be one of: "+r;throw new me(t,e,i)}});function io(e,t){if(typeof e!="boolean")throw new q(t,"boolean",e)}function Ze(e,t,n){return e==null||!vi(e,t)?n:e[t]}var oo=k((e,t,n=null)=>{let r=Ze(n,"allowArray",!1),i=Ze(n,"allowFunction",!1);if(!Ze(n,"nullable",!1)&&e===null||!r&&Jt(e)||typeof e!="object"&&(!i||typeof e!="function"))throw new q(t,"Object",e)}),lo=k((e,t,n=0)=>{if(!Jt(e))throw new q(t,"Array",e);if(e.length{if(!Ki(e))throw new q(t,["Buffer","TypedArray","DataView"],e)});function uo(e,t){let n=Vi(t),r=e.length;if(n==="hex"&&r%2!==0)throw new me("encoding",t,`is invalid for data of length ${r}`)}function so(e,t="Port",n=!0){if(typeof e!="number"&&typeof e!="string"||typeof e=="string"&&Gi(e).length===0||+e!==+e>>>0||e>65535||e===0&&!n)throw new Hi(t,e,n);return e|0}var co=k((e,t)=>{if(e!==void 0&&(e===null||typeof e!="object"||!("aborted"in e)))throw new q(t,"AbortSignal",e)}),ho=k((e,t)=>{if(typeof e!="function")throw new q(t,"Function",e)}),bo=k((e,t)=>{if(typeof e!="function"||Yi(e))throw new q(t,"Function",e)}),_o=k((e,t)=>{if(e!==void 0)throw new q(t,"undefined",e)});function po(e,t,n){if(!Qt(n,e))throw new q(t,`('${Zt(n,"|")}')`,e)}nn.exports={isInt32:zi,isUint32:Xi,parseFileMode:Zi,validateArray:lo,validateBoolean:io,validateBuffer:fo,validateEncoding:uo,validateFunction:ho,validateInt32:to,validateInteger:eo,validateNumber:no,validateObject:oo,validateOneOf:ro,validatePlainFunction:bo,validatePort:so,validateSignalName:ao,validateString:tn,validateUint32:en,validateUndefined:_o,validateUnion:po,validateAbortSignal:co}});var V=g((Jf,_n)=>{"use strict";var{Symbol:Te,SymbolAsyncIterator:rn,SymbolIterator:on}=m(),ln=Te("kDestroyed"),an=Te("kIsErrored"),tt=Te("kIsReadable"),fn=Te("kIsDisturbed");function Ie(e,t=!1){var n;return!!(e&&typeof e.pipe=="function"&&typeof e.on=="function"&&(!t||typeof e.pause=="function"&&typeof e.resume=="function")&&(!e._writableState||((n=e._readableState)===null||n===void 0?void 0:n.readable)!==!1)&&(!e._writableState||e._readableState))}function Me(e){var t;return!!(e&&typeof e.write=="function"&&typeof e.on=="function"&&(!e._readableState||((t=e._writableState)===null||t===void 0?void 0:t.writable)!==!1))}function wo(e){return!!(e&&typeof e.pipe=="function"&&e._readableState&&typeof e.on=="function"&&typeof e.write=="function")}function Q(e){return e&&(e._readableState||e._writableState||typeof e.write=="function"&&typeof e.on=="function"||typeof e.pipe=="function"&&typeof e.on=="function")}function yo(e,t){return e==null?!1:t===!0?typeof e[rn]=="function":t===!1?typeof e[on]=="function":typeof e[rn]=="function"||typeof e[on]=="function"}function Ne(e){if(!Q(e))return null;let t=e._writableState,n=e._readableState,r=t||n;return!!(e.destroyed||e[ln]||r!=null&&r.destroyed)}function un(e){if(!Me(e))return null;if(e.writableEnded===!0)return!0;let t=e._writableState;return t!=null&&t.errored?!1:typeof t?.ended!="boolean"?null:t.ended}function go(e,t){if(!Me(e))return null;if(e.writableFinished===!0)return!0;let n=e._writableState;return n!=null&&n.errored?!1:typeof n?.finished!="boolean"?null:!!(n.finished||t===!1&&n.ended===!0&&n.length===0)}function So(e){if(!Ie(e))return null;if(e.readableEnded===!0)return!0;let t=e._readableState;return!t||t.errored?!1:typeof t?.ended!="boolean"?null:t.ended}function sn(e,t){if(!Ie(e))return null;let n=e._readableState;return n!=null&&n.errored?!1:typeof n?.endEmitted!="boolean"?null:!!(n.endEmitted||t===!1&&n.ended===!0&&n.length===0)}function dn(e){return e&&e[tt]!=null?e[tt]:typeof e?.readable!="boolean"?null:Ne(e)?!1:Ie(e)&&e.readable&&!sn(e)}function cn(e){return typeof e?.writable!="boolean"?null:Ne(e)?!1:Me(e)&&e.writable&&!un(e)}function Eo(e,t){return Q(e)?Ne(e)?!0:!(t?.readable!==!1&&dn(e)||t?.writable!==!1&&cn(e)):null}function Ro(e){var t,n;return Q(e)?e.writableErrored?e.writableErrored:(t=(n=e._writableState)===null||n===void 0?void 0:n.errored)!==null&&t!==void 0?t:null:null}function Ao(e){var t,n;return Q(e)?e.readableErrored?e.readableErrored:(t=(n=e._readableState)===null||n===void 0?void 0:n.errored)!==null&&t!==void 0?t:null:null}function mo(e){if(!Q(e))return null;if(typeof e.closed=="boolean")return e.closed;let t=e._writableState,n=e._readableState;return typeof t?.closed=="boolean"||typeof n?.closed=="boolean"?t?.closed||n?.closed:typeof e._closed=="boolean"&&hn(e)?e._closed:null}function hn(e){return typeof e._closed=="boolean"&&typeof e._defaultKeepAlive=="boolean"&&typeof e._removedConnection=="boolean"&&typeof e._removedContLen=="boolean"}function bn(e){return typeof e._sent100=="boolean"&&hn(e)}function To(e){var t;return typeof e._consuming=="boolean"&&typeof e._dumped=="boolean"&&((t=e.req)===null||t===void 0?void 0:t.upgradeOrConnect)===void 0}function Io(e){if(!Q(e))return null;let t=e._writableState,n=e._readableState,r=t||n;return!r&&bn(e)||!!(r&&r.autoDestroy&&r.emitClose&&r.closed===!1)}function Mo(e){var t;return!!(e&&((t=e[fn])!==null&&t!==void 0?t:e.readableDidRead||e.readableAborted))}function No(e){var t,n,r,i,o,l,u,f,a,c;return!!(e&&((t=(n=(r=(i=(o=(l=e[an])!==null&&l!==void 0?l:e.readableErrored)!==null&&o!==void 0?o:e.writableErrored)!==null&&i!==void 0?i:(u=e._readableState)===null||u===void 0?void 0:u.errorEmitted)!==null&&r!==void 0?r:(f=e._writableState)===null||f===void 0?void 0:f.errorEmitted)!==null&&n!==void 0?n:(a=e._readableState)===null||a===void 0?void 0:a.errored)!==null&&t!==void 0?t:(c=e._writableState)===null||c===void 0?void 0:c.errored))}_n.exports={kDestroyed:ln,isDisturbed:Mo,kIsDisturbed:fn,isErrored:No,kIsErrored:an,isReadable:dn,kIsReadable:tt,isClosed:mo,isDestroyed:Ne,isDuplexNodeStream:wo,isFinished:Eo,isIterable:yo,isReadableNodeStream:Ie,isReadableEnded:So,isReadableFinished:sn,isReadableErrored:Ao,isNodeStream:Q,isWritable:cn,isWritableNodeStream:Me,isWritableEnded:un,isWritableFinished:go,isWritableErrored:Ro,isServerRequest:To,isServerResponse:bn,willEmitClose:Io}});var Y=g((Qf,rt)=>{var oe=__process$,{AbortError:Do,codes:Oo}=O(),{ERR_INVALID_ARG_TYPE:qo,ERR_STREAM_PREMATURE_CLOSE:pn}=Oo,{kEmptyObject:wn,once:yn}=j(),{validateAbortSignal:xo,validateFunction:Lo,validateObject:Po}=_e(),{Promise:ko}=m(),{isClosed:Wo,isReadable:gn,isReadableNodeStream:nt,isReadableFinished:Sn,isReadableErrored:Co,isWritable:En,isWritableNodeStream:Rn,isWritableFinished:An,isWritableErrored:jo,isNodeStream:$o,willEmitClose:vo}=V();function Fo(e){return e.setHeader&&typeof e.abort=="function"}var Uo=()=>{};function mn(e,t,n){var r,i;arguments.length===2?(n=t,t=wn):t==null?t=wn:Po(t,"options"),Lo(n,"callback"),xo(t.signal,"options.signal"),n=yn(n);let o=(r=t.readable)!==null&&r!==void 0?r:nt(e),l=(i=t.writable)!==null&&i!==void 0?i:Rn(e);if(!$o(e))throw new qo("stream","Stream",e);let u=e._writableState,f=e._readableState,a=()=>{e.writable||b()},c=vo(e)&&nt(e)===o&&Rn(e)===l,s=An(e,!1),b=()=>{s=!0,e.destroyed&&(c=!1),!(c&&(!e.readable||o))&&(!o||d)&&n.call(e)},d=Sn(e,!1),h=()=>{d=!0,e.destroyed&&(c=!1),!(c&&(!e.writable||l))&&(!l||s)&&n.call(e)},D=M=>{n.call(e,M)},L=Wo(e),_=()=>{L=!0;let M=jo(e)||Co(e);if(M&&typeof M!="boolean")return n.call(e,M);if(o&&!d&&nt(e,!0)&&!Sn(e,!1))return n.call(e,new pn);if(l&&!s&&!An(e,!1))return n.call(e,new pn);n.call(e)},p=()=>{e.req.on("finish",b)};Fo(e)?(e.on("complete",b),c||e.on("abort",_),e.req?p():e.on("request",p)):l&&!u&&(e.on("end",a),e.on("close",a)),!c&&typeof e.aborted=="boolean"&&e.on("aborted",_),e.on("end",h),e.on("finish",b),t.error!==!1&&e.on("error",D),e.on("close",_),L?oe.nextTick(_):u!=null&&u.errorEmitted||f!=null&&f.errorEmitted?c||oe.nextTick(_):(!o&&(!c||gn(e))&&(s||En(e)===!1)||!l&&(!c||En(e))&&(d||gn(e)===!1)||f&&e.req&&e.aborted)&&oe.nextTick(_);let I=()=>{n=Uo,e.removeListener("aborted",_),e.removeListener("complete",b),e.removeListener("abort",_),e.removeListener("request",p),e.req&&e.req.removeListener("finish",b),e.removeListener("end",a),e.removeListener("close",a),e.removeListener("finish",b),e.removeListener("end",h),e.removeListener("error",D),e.removeListener("close",_)};if(t.signal&&!L){let M=()=>{let F=n;I(),F.call(e,new Do(void 0,{cause:t.signal.reason}))};if(t.signal.aborted)oe.nextTick(M);else{let F=n;n=yn((...re)=>{t.signal.removeEventListener("abort",M),F.apply(e,re)}),t.signal.addEventListener("abort",M)}}return I}function Bo(e,t){return new ko((n,r)=>{mn(e,t,i=>{i?r(i):n()})})}rt.exports=mn;rt.exports.finished=Bo});var xn=g((Zf,lt)=>{"use strict";var Nn=AbortController,{codes:{ERR_INVALID_ARG_TYPE:pe,ERR_MISSING_ARGS:Go,ERR_OUT_OF_RANGE:Ho},AbortError:$}=O(),{validateAbortSignal:le,validateInteger:Vo,validateObject:ae}=_e(),Yo=m().Symbol("kWeak"),{finished:Ko}=Y(),{ArrayPrototypePush:zo,MathFloor:Xo,Number:Jo,NumberIsNaN:Qo,Promise:Tn,PromiseReject:In,PromisePrototypeThen:Zo,Symbol:Dn}=m(),De=Dn("kEmpty"),Mn=Dn("kEof");function Oe(e,t){if(typeof e!="function")throw new pe("fn",["Function","AsyncFunction"],e);t!=null&&ae(t,"options"),t?.signal!=null&&le(t.signal,"options.signal");let n=1;return t?.concurrency!=null&&(n=Xo(t.concurrency)),Vo(n,"concurrency",1),async function*(){var i,o;let l=new Nn,u=this,f=[],a=l.signal,c={signal:a},s=()=>l.abort();t!=null&&(i=t.signal)!==null&&i!==void 0&&i.aborted&&s(),t==null||(o=t.signal)===null||o===void 0||o.addEventListener("abort",s);let b,d,h=!1;function D(){h=!0}async function L(){try{for await(let I of u){var _;if(h)return;if(a.aborted)throw new $;try{I=e(I,c)}catch(M){I=In(M)}I!==De&&(typeof((_=I)===null||_===void 0?void 0:_.catch)=="function"&&I.catch(D),f.push(I),b&&(b(),b=null),!h&&f.length&&f.length>=n&&await new Tn(M=>{d=M}))}f.push(Mn)}catch(I){let M=In(I);Zo(M,void 0,D),f.push(M)}finally{var p;h=!0,b&&(b(),b=null),t==null||(p=t.signal)===null||p===void 0||p.removeEventListener("abort",s)}}L();try{for(;;){for(;f.length>0;){let _=await f[0];if(_===Mn)return;if(a.aborted)throw new $;_!==De&&(yield _),f.shift(),d&&(d(),d=null)}await new Tn(_=>{b=_})}}finally{l.abort(),h=!0,d&&(d(),d=null)}}.call(this)}function el(e=void 0){return e!=null&&ae(e,"options"),e?.signal!=null&&le(e.signal,"options.signal"),async function*(){let n=0;for await(let i of this){var r;if(e!=null&&(r=e.signal)!==null&&r!==void 0&&r.aborted)throw new $({cause:e.signal.reason});yield[n++,i]}}.call(this)}async function On(e,t=void 0){for await(let n of ot.call(this,e,t))return!0;return!1}async function tl(e,t=void 0){if(typeof e!="function")throw new pe("fn",["Function","AsyncFunction"],e);return!await On.call(this,async(...n)=>!await e(...n),t)}async function nl(e,t){for await(let n of ot.call(this,e,t))return n}async function rl(e,t){if(typeof e!="function")throw new pe("fn",["Function","AsyncFunction"],e);async function n(r,i){return await e(r,i),De}for await(let r of Oe.call(this,n,t));}function ot(e,t){if(typeof e!="function")throw new pe("fn",["Function","AsyncFunction"],e);async function n(r,i){return await e(r,i)?r:De}return Oe.call(this,n,t)}var it=class extends Go{constructor(){super("reduce"),this.message="Reduce of an empty stream requires an initial value"}};async function il(e,t,n){var r;if(typeof e!="function")throw new pe("reducer",["Function","AsyncFunction"],e);n!=null&&ae(n,"options"),n?.signal!=null&&le(n.signal,"options.signal");let i=arguments.length>1;if(n!=null&&(r=n.signal)!==null&&r!==void 0&&r.aborted){let a=new $(void 0,{cause:n.signal.reason});throw this.once("error",()=>{}),await Ko(this.destroy(a)),a}let o=new Nn,l=o.signal;if(n!=null&&n.signal){let a={once:!0,[Yo]:this};n.signal.addEventListener("abort",()=>o.abort(),a)}let u=!1;try{for await(let a of this){var f;if(u=!0,n!=null&&(f=n.signal)!==null&&f!==void 0&&f.aborted)throw new $;i?t=await e(t,a,{signal:l}):(t=a,i=!0)}if(!u&&!i)throw new it}finally{o.abort()}return t}async function ol(e){e!=null&&ae(e,"options"),e?.signal!=null&&le(e.signal,"options.signal");let t=[];for await(let r of this){var n;if(e!=null&&(n=e.signal)!==null&&n!==void 0&&n.aborted)throw new $(void 0,{cause:e.signal.reason});zo(t,r)}return t}function ll(e,t){let n=Oe.call(this,e,t);return async function*(){for await(let i of n)yield*i}.call(this)}function qn(e){if(e=Jo(e),Qo(e))return 0;if(e<0)throw new Ho("number",">= 0",e);return e}function al(e,t=void 0){return t!=null&&ae(t,"options"),t?.signal!=null&&le(t.signal,"options.signal"),e=qn(e),async function*(){var r;if(t!=null&&(r=t.signal)!==null&&r!==void 0&&r.aborted)throw new $;for await(let o of this){var i;if(t!=null&&(i=t.signal)!==null&&i!==void 0&&i.aborted)throw new $;e--<=0&&(yield o)}}.call(this)}function fl(e,t=void 0){return t!=null&&ae(t,"options"),t?.signal!=null&&le(t.signal,"options.signal"),e=qn(e),async function*(){var r;if(t!=null&&(r=t.signal)!==null&&r!==void 0&&r.aborted)throw new $;for await(let o of this){var i;if(t!=null&&(i=t.signal)!==null&&i!==void 0&&i.aborted)throw new $;if(e-- >0)yield o;else return}}.call(this)}lt.exports.streamReturningOperators={asIndexedPairs:el,drop:al,filter:ot,flatMap:ll,map:Oe,take:fl};lt.exports.promiseReturningOperators={every:tl,forEach:rl,reduce:il,toArray:ol,some:On,find:nl}});var Z=g((eu,vn)=>{"use strict";var K=__process$,{aggregateTwoErrors:ul,codes:{ERR_MULTIPLE_CALLBACK:sl},AbortError:dl}=O(),{Symbol:kn}=m(),{kDestroyed:cl,isDestroyed:hl,isFinished:bl,isServerRequest:_l}=V(),Wn=kn("kDestroy"),at=kn("kConstruct");function Cn(e,t,n){e&&(e.stack,t&&!t.errored&&(t.errored=e),n&&!n.errored&&(n.errored=e))}function pl(e,t){let n=this._readableState,r=this._writableState,i=r||n;return r&&r.destroyed||n&&n.destroyed?(typeof t=="function"&&t(),this):(Cn(e,r,n),r&&(r.destroyed=!0),n&&(n.destroyed=!0),i.constructed?Ln(this,e,t):this.once(Wn,function(o){Ln(this,ul(o,e),t)}),this)}function Ln(e,t,n){let r=!1;function i(o){if(r)return;r=!0;let l=e._readableState,u=e._writableState;Cn(o,u,l),u&&(u.closed=!0),l&&(l.closed=!0),typeof n=="function"&&n(o),o?K.nextTick(wl,e,o):K.nextTick(jn,e)}try{e._destroy(t||null,i)}catch(o){i(o)}}function wl(e,t){ft(e,t),jn(e)}function jn(e){let t=e._readableState,n=e._writableState;n&&(n.closeEmitted=!0),t&&(t.closeEmitted=!0),(n&&n.emitClose||t&&t.emitClose)&&e.emit("close")}function ft(e,t){let n=e._readableState,r=e._writableState;r&&r.errorEmitted||n&&n.errorEmitted||(r&&(r.errorEmitted=!0),n&&(n.errorEmitted=!0),e.emit("error",t))}function yl(){let e=this._readableState,t=this._writableState;e&&(e.constructed=!0,e.closed=!1,e.closeEmitted=!1,e.destroyed=!1,e.errored=null,e.errorEmitted=!1,e.reading=!1,e.ended=e.readable===!1,e.endEmitted=e.readable===!1),t&&(t.constructed=!0,t.destroyed=!1,t.closed=!1,t.closeEmitted=!1,t.errored=null,t.errorEmitted=!1,t.finalCalled=!1,t.prefinished=!1,t.ended=t.writable===!1,t.ending=t.writable===!1,t.finished=t.writable===!1)}function ut(e,t,n){let r=e._readableState,i=e._writableState;if(i&&i.destroyed||r&&r.destroyed)return this;r&&r.autoDestroy||i&&i.autoDestroy?e.destroy(t):t&&(t.stack,i&&!i.errored&&(i.errored=t),r&&!r.errored&&(r.errored=t),n?K.nextTick(ft,e,t):ft(e,t))}function gl(e,t){if(typeof e._construct!="function")return;let n=e._readableState,r=e._writableState;n&&(n.constructed=!1),r&&(r.constructed=!1),e.once(at,t),!(e.listenerCount(at)>1)&&K.nextTick(Sl,e)}function Sl(e){let t=!1;function n(r){if(t){ut(e,r??new sl);return}t=!0;let i=e._readableState,o=e._writableState,l=o||i;i&&(i.constructed=!0),o&&(o.constructed=!0),l.destroyed?e.emit(Wn,r):r?ut(e,r,!0):K.nextTick(El,e)}try{e._construct(n)}catch(r){n(r)}}function El(e){e.emit(at)}function Pn(e){return e&&e.setHeader&&typeof e.abort=="function"}function $n(e){e.emit("close")}function Rl(e,t){e.emit("error",t),K.nextTick($n,e)}function Al(e,t){!e||hl(e)||(!t&&!bl(e)&&(t=new dl),_l(e)?(e.socket=null,e.destroy(t)):Pn(e)?e.abort():Pn(e.req)?e.req.abort():typeof e.destroy=="function"?e.destroy(t):typeof e.close=="function"?e.close():t?K.nextTick(Rl,e,t):K.nextTick($n,e),e.destroyed||(e[cl]=!0))}vn.exports={construct:gl,destroyer:Al,destroy:pl,undestroy:yl,errorOrDestroy:ut}});var Le=g((tu,Un)=>{"use strict";var{ArrayIsArray:ml,ObjectSetPrototypeOf:Fn}=m(),{EventEmitter:qe}=__events$;function xe(e){qe.call(this,e)}Fn(xe.prototype,qe.prototype);Fn(xe,qe);xe.prototype.pipe=function(e,t){let n=this;function r(c){e.writable&&e.write(c)===!1&&n.pause&&n.pause()}n.on("data",r);function i(){n.readable&&n.resume&&n.resume()}e.on("drain",i),!e._isStdio&&(!t||t.end!==!1)&&(n.on("end",l),n.on("close",u));let o=!1;function l(){o||(o=!0,e.end())}function u(){o||(o=!0,typeof e.destroy=="function"&&e.destroy())}function f(c){a(),qe.listenerCount(this,"error")===0&&this.emit("error",c)}st(n,"error",f),st(e,"error",f);function a(){n.removeListener("data",r),e.removeListener("drain",i),n.removeListener("end",l),n.removeListener("close",u),n.removeListener("error",f),e.removeListener("error",f),n.removeListener("end",a),n.removeListener("close",a),e.removeListener("close",a)}return n.on("end",a),n.on("close",a),e.on("close",a),e.emit("pipe",n),e};function st(e,t,n){if(typeof e.prependListener=="function")return e.prependListener(t,n);!e._events||!e._events[t]?e.on(t,n):ml(e._events[t])?e._events[t].unshift(n):e._events[t]=[n,e._events[t]]}Un.exports={Stream:xe,prependListener:st}});var ke=g((nu,Pe)=>{"use strict";var{AbortError:Tl,codes:Il}=O(),Ml=Y(),{ERR_INVALID_ARG_TYPE:Bn}=Il,Nl=(e,t)=>{if(typeof e!="object"||!("aborted"in e))throw new Bn(t,"AbortSignal",e)};function Dl(e){return!!(e&&typeof e.pipe=="function")}Pe.exports.addAbortSignal=function(t,n){if(Nl(t,"signal"),!Dl(n))throw new Bn("stream","stream.Stream",n);return Pe.exports.addAbortSignalNoValidate(t,n)};Pe.exports.addAbortSignalNoValidate=function(e,t){if(typeof e!="object"||!("aborted"in e))return t;let n=()=>{t.destroy(new Tl(void 0,{cause:e.reason}))};return e.aborted?n():(e.addEventListener("abort",n),Ml(t,()=>e.removeEventListener("abort",n))),t}});var Vn=g((iu,Hn)=>{"use strict";var{StringPrototypeSlice:Gn,SymbolIterator:Ol,TypedArrayPrototypeSet:We,Uint8Array:ql}=m(),{Buffer:dt}=__buffer$,{inspect:xl}=j();Hn.exports=class{constructor(){this.head=null,this.tail=null,this.length=0}push(t){let n={data:t,next:null};this.length>0?this.tail.next=n:this.head=n,this.tail=n,++this.length}unshift(t){let n={data:t,next:this.head};this.length===0&&(this.tail=n),this.head=n,++this.length}shift(){if(this.length===0)return;let t=this.head.data;return this.length===1?this.head=this.tail=null:this.head=this.head.next,--this.length,t}clear(){this.head=this.tail=null,this.length=0}join(t){if(this.length===0)return"";let n=this.head,r=""+n.data;for(;(n=n.next)!==null;)r+=t+n.data;return r}concat(t){if(this.length===0)return dt.alloc(0);let n=dt.allocUnsafe(t>>>0),r=this.head,i=0;for(;r;)We(n,r.data,i),i+=r.data.length,r=r.next;return n}consume(t,n){let r=this.head.data;if(to.length)n+=o,t-=o.length;else{t===o.length?(n+=o,++i,r.next?this.head=r.next:this.head=this.tail=null):(n+=Gn(o,0,t),this.head=r,r.data=Gn(o,t));break}++i}while((r=r.next)!==null);return this.length-=i,n}_getBuffer(t){let n=dt.allocUnsafe(t),r=t,i=this.head,o=0;do{let l=i.data;if(t>l.length)We(n,l,r-t),t-=l.length;else{t===l.length?(We(n,l,r-t),++o,i.next?this.head=i.next:this.head=this.tail=null):(We(n,new ql(l.buffer,l.byteOffset,t),r-t),this.head=i,i.data=l.slice(t));break}++o}while((i=i.next)!==null);return this.length-=o,n}[Symbol.for("nodejs.util.inspect.custom")](t,n){return xl(this,{...n,depth:0,customInspect:!1})}}});var Ce=g((ou,Kn)=>{"use strict";var{MathFloor:Ll,NumberIsInteger:Pl}=m(),{ERR_INVALID_ARG_VALUE:kl}=O().codes;function Wl(e,t,n){return e.highWaterMark!=null?e.highWaterMark:t?e[n]:null}function Yn(e){return e?16:16*1024}function Cl(e,t,n,r){let i=Wl(t,r,n);if(i!=null){if(!Pl(i)||i<0){let o=r?`options.${n}`:"options.highWaterMark";throw new kl(o,i)}return Ll(i)}return Yn(e.objectMode)}Kn.exports={getHighWaterMark:Cl,getDefaultHighWaterMark:Yn}});var ct=g((lu,Qn)=>{"use strict";var zn=__process$,{PromisePrototypeThen:jl,SymbolAsyncIterator:Xn,SymbolIterator:Jn}=m(),{Buffer:$l}=__buffer$,{ERR_INVALID_ARG_TYPE:vl,ERR_STREAM_NULL_VALUES:Fl}=O().codes;function Ul(e,t,n){let r;if(typeof t=="string"||t instanceof $l)return new e({objectMode:!0,...n,read(){this.push(t),this.push(null)}});let i;if(t&&t[Xn])i=!0,r=t[Xn]();else if(t&&t[Jn])i=!1,r=t[Jn]();else throw new vl("iterable",["Iterable"],t);let o=new e({objectMode:!0,highWaterMark:1,...n}),l=!1;o._read=function(){l||(l=!0,f())},o._destroy=function(a,c){jl(u(a),()=>zn.nextTick(c,a),s=>zn.nextTick(c,s||a))};async function u(a){let c=a!=null,s=typeof r.throw=="function";if(c&&s){let{value:b,done:d}=await r.throw(a);if(await b,d)return}if(typeof r.return=="function"){let{value:b}=await r.return();await b}}async function f(){for(;;){try{let{value:a,done:c}=i?await r.next():r.next();if(c)o.push(null);else{let s=a&&typeof a.then=="function"?await a:a;if(s===null)throw l=!1,new Fl;if(o.push(s))continue;l=!1}}catch(a){o.destroy(a)}break}}return o}Qn.exports=Ul});var we=g((au,dr)=>{var W=__process$,{ArrayPrototypeIndexOf:Bl,NumberIsInteger:Gl,NumberIsNaN:Hl,NumberParseInt:Vl,ObjectDefineProperties:tr,ObjectKeys:Yl,ObjectSetPrototypeOf:nr,Promise:Kl,SafeSet:zl,SymbolAsyncIterator:Xl,Symbol:Jl}=m();dr.exports=w;w.ReadableState=yt;var{EventEmitter:Ql}=__events$,{Stream:z,prependListener:Zl}=Le(),{Buffer:ht}=__buffer$,{addAbortSignal:ea}=ke(),ta=Y(),y=j().debuglog("stream",e=>{y=e}),na=Vn(),ue=Z(),{getHighWaterMark:ra,getDefaultHighWaterMark:ia}=Ce(),{aggregateTwoErrors:Zn,codes:{ERR_INVALID_ARG_TYPE:oa,ERR_METHOD_NOT_IMPLEMENTED:la,ERR_OUT_OF_RANGE:aa,ERR_STREAM_PUSH_AFTER_EOF:fa,ERR_STREAM_UNSHIFT_AFTER_END_EVENT:ua}}=O(),{validateObject:sa}=_e(),ee=Jl("kPaused"),{StringDecoder:rr}=__string_decoder$,da=ct();nr(w.prototype,z.prototype);nr(w,z);var bt=()=>{},{errorOrDestroy:fe}=ue;function yt(e,t,n){typeof n!="boolean"&&(n=t instanceof v()),this.objectMode=!!(e&&e.objectMode),n&&(this.objectMode=this.objectMode||!!(e&&e.readableObjectMode)),this.highWaterMark=e?ra(this,e,"readableHighWaterMark",n):ia(!1),this.buffer=new na,this.length=0,this.pipes=[],this.flowing=null,this.ended=!1,this.endEmitted=!1,this.reading=!1,this.constructed=!0,this.sync=!0,this.needReadable=!1,this.emittedReadable=!1,this.readableListening=!1,this.resumeScheduled=!1,this[ee]=null,this.errorEmitted=!1,this.emitClose=!e||e.emitClose!==!1,this.autoDestroy=!e||e.autoDestroy!==!1,this.destroyed=!1,this.errored=null,this.closed=!1,this.closeEmitted=!1,this.defaultEncoding=e&&e.defaultEncoding||"utf8",this.awaitDrainWriters=null,this.multiAwaitDrain=!1,this.readingMore=!1,this.dataEmitted=!1,this.decoder=null,this.encoding=null,e&&e.encoding&&(this.decoder=new rr(e.encoding),this.encoding=e.encoding)}function w(e){if(!(this instanceof w))return new w(e);let t=this instanceof v();this._readableState=new yt(e,this,t),e&&(typeof e.read=="function"&&(this._read=e.read),typeof e.destroy=="function"&&(this._destroy=e.destroy),typeof e.construct=="function"&&(this._construct=e.construct),e.signal&&!t&&ea(e.signal,this)),z.call(this,e),ue.construct(this,()=>{this._readableState.needReadable&&je(this,this._readableState)})}w.prototype.destroy=ue.destroy;w.prototype._undestroy=ue.undestroy;w.prototype._destroy=function(e,t){t(e)};w.prototype[Ql.captureRejectionSymbol]=function(e){this.destroy(e)};w.prototype.push=function(e,t){return ir(this,e,t,!1)};w.prototype.unshift=function(e,t){return ir(this,e,t,!0)};function ir(e,t,n,r){y("readableAddChunk",t);let i=e._readableState,o;if(i.objectMode||(typeof t=="string"?(n=n||i.defaultEncoding,i.encoding!==n&&(r&&i.encoding?t=ht.from(t,n).toString(i.encoding):(t=ht.from(t,n),n=""))):t instanceof ht?n="":z._isUint8Array(t)?(t=z._uint8ArrayToBuffer(t),n=""):t!=null&&(o=new oa("chunk",["string","Buffer","Uint8Array"],t))),o)fe(e,o);else if(t===null)i.reading=!1,ba(e,i);else if(i.objectMode||t&&t.length>0)if(r)if(i.endEmitted)fe(e,new ua);else{if(i.destroyed||i.errored)return!1;_t(e,i,t,!0)}else if(i.ended)fe(e,new fa);else{if(i.destroyed||i.errored)return!1;i.reading=!1,i.decoder&&!n?(t=i.decoder.write(t),i.objectMode||t.length!==0?_t(e,i,t,!1):je(e,i)):_t(e,i,t,!1)}else r||(i.reading=!1,je(e,i));return!i.ended&&(i.length0?(t.multiAwaitDrain?t.awaitDrainWriters.clear():t.awaitDrainWriters=null,t.dataEmitted=!0,e.emit("data",n)):(t.length+=t.objectMode?1:n.length,r?t.buffer.unshift(n):t.buffer.push(n),t.needReadable&&$e(e)),je(e,t)}w.prototype.isPaused=function(){let e=this._readableState;return e[ee]===!0||e.flowing===!1};w.prototype.setEncoding=function(e){let t=new rr(e);this._readableState.decoder=t,this._readableState.encoding=this._readableState.decoder.encoding;let n=this._readableState.buffer,r="";for(let i of n)r+=t.write(i);return n.clear(),r!==""&&n.push(r),this._readableState.length=r.length,this};var ca=1073741824;function ha(e){if(e>ca)throw new aa("size","<= 1GiB",e);return e--,e|=e>>>1,e|=e>>>2,e|=e>>>4,e|=e>>>8,e|=e>>>16,e++,e}function er(e,t){return e<=0||t.length===0&&t.ended?0:t.objectMode?1:Hl(e)?t.flowing&&t.length?t.buffer.first().length:t.length:e<=t.length?e:t.ended?t.length:0}w.prototype.read=function(e){y("read",e),e===void 0?e=NaN:Gl(e)||(e=Vl(e,10));let t=this._readableState,n=e;if(e>t.highWaterMark&&(t.highWaterMark=ha(e)),e!==0&&(t.emittedReadable=!1),e===0&&t.needReadable&&((t.highWaterMark!==0?t.length>=t.highWaterMark:t.length>0)||t.ended))return y("read: emitReadable",t.length,t.ended),t.length===0&&t.ended?pt(this):$e(this),null;if(e=er(e,t),e===0&&t.ended)return t.length===0&&pt(this),null;let r=t.needReadable;if(y("need readable",r),(t.length===0||t.length-e0?i=ur(e,t):i=null,i===null?(t.needReadable=t.length<=t.highWaterMark,e=0):(t.length-=e,t.multiAwaitDrain?t.awaitDrainWriters.clear():t.awaitDrainWriters=null),t.length===0&&(t.ended||(t.needReadable=!0),n!==e&&t.ended&&pt(this)),i!==null&&!t.errorEmitted&&!t.closeEmitted&&(t.dataEmitted=!0,this.emit("data",i)),i};function ba(e,t){if(y("onEofChunk"),!t.ended){if(t.decoder){let n=t.decoder.end();n&&n.length&&(t.buffer.push(n),t.length+=t.objectMode?1:n.length)}t.ended=!0,t.sync?$e(e):(t.needReadable=!1,t.emittedReadable=!0,or(e))}}function $e(e){let t=e._readableState;y("emitReadable",t.needReadable,t.emittedReadable),t.needReadable=!1,t.emittedReadable||(y("emitReadable",t.flowing),t.emittedReadable=!0,W.nextTick(or,e))}function or(e){let t=e._readableState;y("emitReadable_",t.destroyed,t.length,t.ended),!t.destroyed&&!t.errored&&(t.length||t.ended)&&(e.emit("readable"),t.emittedReadable=!1),t.needReadable=!t.flowing&&!t.ended&&t.length<=t.highWaterMark,ar(e)}function je(e,t){!t.readingMore&&t.constructed&&(t.readingMore=!0,W.nextTick(_a,e,t))}function _a(e,t){for(;!t.reading&&!t.ended&&(t.length1&&r.pipes.includes(e)&&(y("false write response, pause",r.awaitDrainWriters.size),r.awaitDrainWriters.add(e)),n.pause()),f||(f=pa(n,e),e.on("drain",f))}n.on("data",b);function b(_){y("ondata");let p=e.write(_);y("dest.write",p),p===!1&&s()}function d(_){if(y("onerror",_),L(),e.removeListener("error",d),e.listenerCount("error")===0){let p=e._writableState||e._readableState;p&&!p.errorEmitted?fe(e,_):e.emit("error",_)}}Zl(e,"error",d);function h(){e.removeListener("finish",D),L()}e.once("close",h);function D(){y("onfinish"),e.removeListener("close",h),L()}e.once("finish",D);function L(){y("unpipe"),n.unpipe(e)}return e.emit("pipe",n),e.writableNeedDrain===!0?r.flowing&&s():r.flowing||(y("pipe resume"),n.resume()),e};function pa(e,t){return function(){let r=e._readableState;r.awaitDrainWriters===t?(y("pipeOnDrain",1),r.awaitDrainWriters=null):r.multiAwaitDrain&&(y("pipeOnDrain",r.awaitDrainWriters.size),r.awaitDrainWriters.delete(t)),(!r.awaitDrainWriters||r.awaitDrainWriters.size===0)&&e.listenerCount("data")&&e.resume()}}w.prototype.unpipe=function(e){let t=this._readableState,n={hasUnpiped:!1};if(t.pipes.length===0)return this;if(!e){let i=t.pipes;t.pipes=[],this.pause();for(let o=0;o0,r.flowing!==!1&&this.resume()):e==="readable"&&!r.endEmitted&&!r.readableListening&&(r.readableListening=r.needReadable=!0,r.flowing=!1,r.emittedReadable=!1,y("on readable",r.length,r.reading),r.length?$e(this):r.reading||W.nextTick(wa,this)),n};w.prototype.addListener=w.prototype.on;w.prototype.removeListener=function(e,t){let n=z.prototype.removeListener.call(this,e,t);return e==="readable"&&W.nextTick(lr,this),n};w.prototype.off=w.prototype.removeListener;w.prototype.removeAllListeners=function(e){let t=z.prototype.removeAllListeners.apply(this,arguments);return(e==="readable"||e===void 0)&&W.nextTick(lr,this),t};function lr(e){let t=e._readableState;t.readableListening=e.listenerCount("readable")>0,t.resumeScheduled&&t[ee]===!1?t.flowing=!0:e.listenerCount("data")>0?e.resume():t.readableListening||(t.flowing=null)}function wa(e){y("readable nexttick read 0"),e.read(0)}w.prototype.resume=function(){let e=this._readableState;return e.flowing||(y("resume"),e.flowing=!e.readableListening,ya(this,e)),e[ee]=!1,this};function ya(e,t){t.resumeScheduled||(t.resumeScheduled=!0,W.nextTick(ga,e,t))}function ga(e,t){y("resume",t.reading),t.reading||e.read(0),t.resumeScheduled=!1,e.emit("resume"),ar(e),t.flowing&&!t.reading&&e.read(0)}w.prototype.pause=function(){return y("call pause flowing=%j",this._readableState.flowing),this._readableState.flowing!==!1&&(y("pause"),this._readableState.flowing=!1,this.emit("pause")),this._readableState[ee]=!0,this};function ar(e){let t=e._readableState;for(y("flow",t.flowing);t.flowing&&e.read()!==null;);}w.prototype.wrap=function(e){let t=!1;e.on("data",r=>{!this.push(r)&&e.pause&&(t=!0,e.pause())}),e.on("end",()=>{this.push(null)}),e.on("error",r=>{fe(this,r)}),e.on("close",()=>{this.destroy()}),e.on("destroy",()=>{this.destroy()}),this._read=()=>{t&&e.resume&&(t=!1,e.resume())};let n=Yl(e);for(let r=1;r{i=l?Zn(i,l):null,n(),n=bt});try{for(;;){let l=e.destroyed?null:e.read();if(l!==null)yield l;else{if(i)throw i;if(i===null)return;await new Kl(r)}}}catch(l){throw i=Zn(i,l),i}finally{(i||t?.destroyOnReturn!==!1)&&(i===void 0||e._readableState.autoDestroy)?ue.destroyer(e,null):(e.off("readable",r),o())}}tr(w.prototype,{readable:{__proto__:null,get(){let e=this._readableState;return!!e&&e.readable!==!1&&!e.destroyed&&!e.errorEmitted&&!e.endEmitted},set(e){this._readableState&&(this._readableState.readable=!!e)}},readableDidRead:{__proto__:null,enumerable:!1,get:function(){return this._readableState.dataEmitted}},readableAborted:{__proto__:null,enumerable:!1,get:function(){return!!(this._readableState.readable!==!1&&(this._readableState.destroyed||this._readableState.errored)&&!this._readableState.endEmitted)}},readableHighWaterMark:{__proto__:null,enumerable:!1,get:function(){return this._readableState.highWaterMark}},readableBuffer:{__proto__:null,enumerable:!1,get:function(){return this._readableState&&this._readableState.buffer}},readableFlowing:{__proto__:null,enumerable:!1,get:function(){return this._readableState.flowing},set:function(e){this._readableState&&(this._readableState.flowing=e)}},readableLength:{__proto__:null,enumerable:!1,get(){return this._readableState.length}},readableObjectMode:{__proto__:null,enumerable:!1,get(){return this._readableState?this._readableState.objectMode:!1}},readableEncoding:{__proto__:null,enumerable:!1,get(){return this._readableState?this._readableState.encoding:null}},errored:{__proto__:null,enumerable:!1,get(){return this._readableState?this._readableState.errored:null}},closed:{__proto__:null,get(){return this._readableState?this._readableState.closed:!1}},destroyed:{__proto__:null,enumerable:!1,get(){return this._readableState?this._readableState.destroyed:!1},set(e){!this._readableState||(this._readableState.destroyed=e)}},readableEnded:{__proto__:null,enumerable:!1,get(){return this._readableState?this._readableState.endEmitted:!1}}});tr(yt.prototype,{pipesCount:{__proto__:null,get(){return this.pipes.length}},paused:{__proto__:null,get(){return this[ee]!==!1},set(e){this[ee]=!!e}}});w._fromList=ur;function ur(e,t){if(t.length===0)return null;let n;return t.objectMode?n=t.buffer.shift():!e||e>=t.length?(t.decoder?n=t.buffer.join(""):t.buffer.length===1?n=t.buffer.first():n=t.buffer.concat(t.length),t.buffer.clear()):n=t.buffer.consume(e,t.decoder),n}function pt(e){let t=e._readableState;y("endReadable",t.endEmitted),t.endEmitted||(t.ended=!0,W.nextTick(Ea,t,e))}function Ea(e,t){if(y("endReadableNT",e.endEmitted,e.length),!e.errored&&!e.closeEmitted&&!e.endEmitted&&e.length===0){if(e.endEmitted=!0,t.emit("end"),t.writable&&t.allowHalfOpen===!1)W.nextTick(Ra,t);else if(e.autoDestroy){let n=t._writableState;(!n||n.autoDestroy&&(n.finished||n.writable===!1))&&t.destroy()}}}function Ra(e){e.writable&&!e.writableEnded&&!e.destroyed&&e.end()}w.from=function(e,t){return da(w,e,t)};var wt;function sr(){return wt===void 0&&(wt={}),wt}w.fromWeb=function(e,t){return sr().newStreamReadableFromReadableStream(e,t)};w.toWeb=function(e,t){return sr().newReadableStreamFromStreamReadable(e,t)};w.wrap=function(e,t){var n,r;return new w({objectMode:(n=(r=e.readableObjectMode)!==null&&r!==void 0?r:e.objectMode)!==null&&n!==void 0?n:!0,...t,destroy(i,o){ue.destroyer(e,i),o(i)}}).wrap(e)}});var Tt=g((fu,Ar)=>{var te=__process$,{ArrayPrototypeSlice:br,Error:Aa,FunctionPrototypeSymbolHasInstance:_r,ObjectDefineProperty:pr,ObjectDefineProperties:ma,ObjectSetPrototypeOf:wr,StringPrototypeToLowerCase:Ta,Symbol:Ia,SymbolHasInstance:Ma}=m();Ar.exports=S;S.WritableState=Se;var{EventEmitter:Na}=__events$,ye=Le().Stream,{Buffer:ve}=__buffer$,Be=Z(),{addAbortSignal:Da}=ke(),{getHighWaterMark:Oa,getDefaultHighWaterMark:qa}=Ce(),{ERR_INVALID_ARG_TYPE:xa,ERR_METHOD_NOT_IMPLEMENTED:La,ERR_MULTIPLE_CALLBACK:yr,ERR_STREAM_CANNOT_PIPE:Pa,ERR_STREAM_DESTROYED:ge,ERR_STREAM_ALREADY_FINISHED:ka,ERR_STREAM_NULL_VALUES:Wa,ERR_STREAM_WRITE_AFTER_END:Ca,ERR_UNKNOWN_ENCODING:gr}=O().codes,{errorOrDestroy:se}=Be;wr(S.prototype,ye.prototype);wr(S,ye);function Et(){}var de=Ia("kOnFinished");function Se(e,t,n){typeof n!="boolean"&&(n=t instanceof v()),this.objectMode=!!(e&&e.objectMode),n&&(this.objectMode=this.objectMode||!!(e&&e.writableObjectMode)),this.highWaterMark=e?Oa(this,e,"writableHighWaterMark",n):qa(!1),this.finalCalled=!1,this.needDrain=!1,this.ending=!1,this.ended=!1,this.finished=!1,this.destroyed=!1;let r=!!(e&&e.decodeStrings===!1);this.decodeStrings=!r,this.defaultEncoding=e&&e.defaultEncoding||"utf8",this.length=0,this.writing=!1,this.corked=0,this.sync=!0,this.bufferProcessing=!1,this.onwrite=$a.bind(void 0,t),this.writecb=null,this.writelen=0,this.afterWriteTickInfo=null,Ue(this),this.pendingcb=0,this.constructed=!0,this.prefinished=!1,this.errorEmitted=!1,this.emitClose=!e||e.emitClose!==!1,this.autoDestroy=!e||e.autoDestroy!==!1,this.errored=null,this.closed=!1,this.closeEmitted=!1,this[de]=[]}function Ue(e){e.buffered=[],e.bufferedIndex=0,e.allBuffers=!0,e.allNoop=!0}Se.prototype.getBuffer=function(){return br(this.buffered,this.bufferedIndex)};pr(Se.prototype,"bufferedRequestCount",{__proto__:null,get(){return this.buffered.length-this.bufferedIndex}});function S(e){let t=this instanceof v();if(!t&&!_r(S,this))return new S(e);this._writableState=new Se(e,this,t),e&&(typeof e.write=="function"&&(this._write=e.write),typeof e.writev=="function"&&(this._writev=e.writev),typeof e.destroy=="function"&&(this._destroy=e.destroy),typeof e.final=="function"&&(this._final=e.final),typeof e.construct=="function"&&(this._construct=e.construct),e.signal&&Da(e.signal,this)),ye.call(this,e),Be.construct(this,()=>{let n=this._writableState;n.writing||At(this,n),mt(this,n)})}pr(S,Ma,{__proto__:null,value:function(e){return _r(this,e)?!0:this!==S?!1:e&&e._writableState instanceof Se}});S.prototype.pipe=function(){se(this,new Pa)};function Sr(e,t,n,r){let i=e._writableState;if(typeof n=="function")r=n,n=i.defaultEncoding;else{if(!n)n=i.defaultEncoding;else if(n!=="buffer"&&!ve.isEncoding(n))throw new gr(n);typeof r!="function"&&(r=Et)}if(t===null)throw new Wa;if(!i.objectMode)if(typeof t=="string")i.decodeStrings!==!1&&(t=ve.from(t,n),n="buffer");else if(t instanceof ve)n="buffer";else if(ye._isUint8Array(t))t=ye._uint8ArrayToBuffer(t),n="buffer";else throw new xa("chunk",["string","Buffer","Uint8Array"],t);let o;return i.ending?o=new Ca:i.destroyed&&(o=new ge("write")),o?(te.nextTick(r,o),se(e,o,!0),o):(i.pendingcb++,ja(e,i,t,n,r))}S.prototype.write=function(e,t,n){return Sr(this,e,t,n)===!0};S.prototype.cork=function(){this._writableState.corked++};S.prototype.uncork=function(){let e=this._writableState;e.corked&&(e.corked--,e.writing||At(this,e))};S.prototype.setDefaultEncoding=function(t){if(typeof t=="string"&&(t=Ta(t)),!ve.isEncoding(t))throw new gr(t);return this._writableState.defaultEncoding=t,this};function ja(e,t,n,r,i){let o=t.objectMode?1:n.length;t.length+=o;let l=t.lengthn.bufferedIndex&&At(e,n),r?n.afterWriteTickInfo!==null&&n.afterWriteTickInfo.cb===i?n.afterWriteTickInfo.count++:(n.afterWriteTickInfo={count:1,cb:i,stream:e,state:n},te.nextTick(va,n.afterWriteTickInfo)):Er(e,n,1,i))}function va({stream:e,state:t,count:n,cb:r}){return t.afterWriteTickInfo=null,Er(e,t,n,r)}function Er(e,t,n,r){for(!t.ending&&!e.destroyed&&t.length===0&&t.needDrain&&(t.needDrain=!1,e.emit("drain"));n-- >0;)t.pendingcb--,r();t.destroyed&&Rt(t),mt(e,t)}function Rt(e){if(e.writing)return;for(let i=e.bufferedIndex;i1&&e._writev){t.pendingcb-=o-1;let u=t.allNoop?Et:a=>{for(let c=l;c256?(n.splice(0,l),t.bufferedIndex=0):t.bufferedIndex=l}t.bufferProcessing=!1}S.prototype._write=function(e,t,n){if(this._writev)this._writev([{chunk:e,encoding:t}],n);else throw new La("_write()")};S.prototype._writev=null;S.prototype.end=function(e,t,n){let r=this._writableState;typeof e=="function"?(n=e,e=null,t=null):typeof t=="function"&&(n=t,t=null);let i;if(e!=null){let o=Sr(this,e,t);o instanceof Aa&&(i=o)}return r.corked&&(r.corked=1,this.uncork()),i||(!r.errored&&!r.ending?(r.ending=!0,mt(this,r,!0),r.ended=!0):r.finished?i=new ka("end"):r.destroyed&&(i=new ge("end"))),typeof n=="function"&&(i||r.finished?te.nextTick(n,i):r[de].push(n)),this};function Fe(e){return e.ending&&!e.destroyed&&e.constructed&&e.length===0&&!e.errored&&e.buffered.length===0&&!e.finished&&!e.writing&&!e.errorEmitted&&!e.closeEmitted}function Fa(e,t){let n=!1;function r(i){if(n){se(e,i??yr());return}if(n=!0,t.pendingcb--,i){let o=t[de].splice(0);for(let l=0;l{Fe(i)?St(r,i):i.pendingcb--},e,t)):Fe(t)&&(t.pendingcb++,St(e,t))))}function St(e,t){t.pendingcb--,t.finished=!0;let n=t[de].splice(0);for(let r=0;r{var It=__process$,Ga=__buffer$,{isReadable:Ha,isWritable:Va,isIterable:mr,isNodeStream:Ya,isReadableNodeStream:Tr,isWritableNodeStream:Ir,isDuplexNodeStream:Ka}=V(),Mr=Y(),{AbortError:Lr,codes:{ERR_INVALID_ARG_TYPE:za,ERR_INVALID_RETURN_VALUE:Nr}}=O(),{destroyer:ce}=Z(),Xa=v(),Ja=we(),{createDeferredPromise:Dr}=j(),Or=ct(),qr=Blob||Ga.Blob,Qa=typeof qr<"u"?function(t){return t instanceof qr}:function(t){return!1},Za=AbortController,{FunctionPrototypeCall:xr}=m(),ne=class extends Xa{constructor(t){super(t),t?.readable===!1&&(this._readableState.readable=!1,this._readableState.ended=!0,this._readableState.endEmitted=!0),t?.writable===!1&&(this._writableState.writable=!1,this._writableState.ending=!0,this._writableState.ended=!0,this._writableState.finished=!0)}};Pr.exports=function e(t,n){if(Ka(t))return t;if(Tr(t))return Ge({readable:t});if(Ir(t))return Ge({writable:t});if(Ya(t))return Ge({writable:!1,readable:!1});if(typeof t=="function"){let{value:i,write:o,final:l,destroy:u}=ef(t);if(mr(i))return Or(ne,i,{objectMode:!0,write:o,final:l,destroy:u});let f=i?.then;if(typeof f=="function"){let a,c=xr(f,i,s=>{if(s!=null)throw new Nr("nully","body",s)},s=>{ce(a,s)});return a=new ne({objectMode:!0,readable:!1,write:o,final(s){l(async()=>{try{await c,It.nextTick(s,null)}catch(b){It.nextTick(s,b)}})},destroy:u})}throw new Nr("Iterable, AsyncIterable or AsyncFunction",n,i)}if(Qa(t))return e(t.arrayBuffer());if(mr(t))return Or(ne,t,{objectMode:!0,writable:!1});if(typeof t?.writable=="object"||typeof t?.readable=="object"){let i=t!=null&&t.readable?Tr(t?.readable)?t?.readable:e(t.readable):void 0,o=t!=null&&t.writable?Ir(t?.writable)?t?.writable:e(t.writable):void 0;return Ge({readable:i,writable:o})}let r=t?.then;if(typeof r=="function"){let i;return xr(r,t,o=>{o!=null&&i.push(o),i.push(null)},o=>{ce(i,o)}),i=new ne({objectMode:!0,writable:!1,read(){}})}throw new za(n,["Blob","ReadableStream","WritableStream","Stream","Iterable","AsyncIterable","Function","{ readable, writable } pair","Promise"],t)};function ef(e){let{promise:t,resolve:n}=Dr(),r=new Za,i=r.signal;return{value:e(async function*(){for(;;){let l=t;t=null;let{chunk:u,done:f,cb:a}=await l;if(It.nextTick(a),f)return;if(i.aborted)throw new Lr(void 0,{cause:i.reason});({promise:t,resolve:n}=Dr()),yield u}}(),{signal:i}),write(l,u,f){let a=n;n=null,a({chunk:l,done:!1,cb:f})},final(l){let u=n;n=null,u({done:!0,cb:l})},destroy(l,u){r.abort(),u(l)}}}function Ge(e){let t=e.readable&&typeof e.readable.read!="function"?Ja.wrap(e.readable):e.readable,n=e.writable,r=!!Ha(t),i=!!Va(n),o,l,u,f,a;function c(s){let b=f;f=null,b?b(s):s?a.destroy(s):!r&&!i&&a.destroy()}return a=new ne({readableObjectMode:!!(t!=null&&t.readableObjectMode),writableObjectMode:!!(n!=null&&n.writableObjectMode),readable:r,writable:i}),i&&(Mr(n,s=>{i=!1,s&&ce(t,s),c(s)}),a._write=function(s,b,d){n.write(s,b)?d():o=d},a._final=function(s){n.end(),l=s},n.on("drain",function(){if(o){let s=o;o=null,s()}}),n.on("finish",function(){if(l){let s=l;l=null,s()}})),r&&(Mr(t,s=>{r=!1,s&&ce(t,s),c(s)}),t.on("readable",function(){if(u){let s=u;u=null,s()}}),t.on("end",function(){a.push(null)}),a._read=function(){for(;;){let s=t.read();if(s===null){u=a._read;return}if(!a.push(s))return}}),a._destroy=function(s,b){!s&&f!==null&&(s=new Lr),u=null,o=null,l=null,f===null?b(s):(f=b,ce(n,s),ce(t,s))},a}});var v=g((su,jr)=>{"use strict";var{ObjectDefineProperties:tf,ObjectGetOwnPropertyDescriptor:B,ObjectKeys:nf,ObjectSetPrototypeOf:Wr}=m();jr.exports=C;var Dt=we(),x=Tt();Wr(C.prototype,Dt.prototype);Wr(C,Dt);{let e=nf(x.prototype);for(let t=0;t{"use strict";var{ObjectSetPrototypeOf:$r,Symbol:rf}=m();vr.exports=G;var{ERR_METHOD_NOT_IMPLEMENTED:of}=O().codes,qt=v(),{getHighWaterMark:lf}=Ce();$r(G.prototype,qt.prototype);$r(G,qt);var Ee=rf("kCallback");function G(e){if(!(this instanceof G))return new G(e);let t=e?lf(this,e,"readableHighWaterMark",!0):null;t===0&&(e={...e,highWaterMark:null,readableHighWaterMark:t,writableHighWaterMark:e.writableHighWaterMark||0}),qt.call(this,e),this._readableState.sync=!1,this[Ee]=null,e&&(typeof e.transform=="function"&&(this._transform=e.transform),typeof e.flush=="function"&&(this._flush=e.flush)),this.on("prefinish",af)}function Ot(e){typeof this._flush=="function"&&!this.destroyed?this._flush((t,n)=>{if(t){e?e(t):this.destroy(t);return}n!=null&&this.push(n),this.push(null),e&&e()}):(this.push(null),e&&e())}function af(){this._final!==Ot&&Ot.call(this)}G.prototype._final=Ot;G.prototype._transform=function(e,t,n){throw new of("_transform()")};G.prototype._write=function(e,t,n){let r=this._readableState,i=this._writableState,o=r.length;this._transform(e,t,(l,u)=>{if(l){n(l);return}u!=null&&this.push(u),i.ended||o===r.length||r.length{"use strict";var{ObjectSetPrototypeOf:Fr}=m();Ur.exports=he;var Lt=xt();Fr(he.prototype,Lt.prototype);Fr(he,Lt);function he(e){if(!(this instanceof he))return new he(e);Lt.call(this,e)}he.prototype._transform=function(e,t,n){n(null,e)}});var Ye=g((hu,zr)=>{var He=__process$,{ArrayIsArray:ff,Promise:uf,SymbolAsyncIterator:sf}=m(),Ve=Y(),{once:df}=j(),cf=Z(),Br=v(),{aggregateTwoErrors:hf,codes:{ERR_INVALID_ARG_TYPE:Yr,ERR_INVALID_RETURN_VALUE:kt,ERR_MISSING_ARGS:bf,ERR_STREAM_DESTROYED:_f,ERR_STREAM_PREMATURE_CLOSE:pf},AbortError:wf}=O(),{validateFunction:yf,validateAbortSignal:gf}=_e(),{isIterable:be,isReadable:Wt,isReadableNodeStream:$t,isNodeStream:Gr}=V(),Sf=AbortController,Ct,jt;function Hr(e,t,n){let r=!1;e.on("close",()=>{r=!0});let i=Ve(e,{readable:t,writable:n},o=>{r=!o});return{destroy:o=>{r||(r=!0,cf.destroyer(e,o||new _f("pipe")))},cleanup:i}}function Ef(e){return yf(e[e.length-1],"streams[stream.length - 1]"),e.pop()}function Rf(e){if(be(e))return e;if($t(e))return Af(e);throw new Yr("val",["Readable","Iterable","AsyncIterable"],e)}async function*Af(e){jt||(jt=we()),yield*jt.prototype[sf].call(e)}async function Vr(e,t,n,{end:r}){let i,o=null,l=a=>{if(a&&(i=a),o){let c=o;o=null,c()}},u=()=>new uf((a,c)=>{i?c(i):o=()=>{i?c(i):a()}});t.on("drain",l);let f=Ve(t,{readable:!1},l);try{t.writableNeedDrain&&await u();for await(let a of e)t.write(a)||await u();r&&t.end(),await u(),n()}catch(a){n(i!==a?hf(i,a):a)}finally{f(),t.off("drain",l)}}function mf(...e){return Kr(e,df(Ef(e)))}function Kr(e,t,n){if(e.length===1&&ff(e[0])&&(e=e[0]),e.length<2)throw new bf("streams");let r=new Sf,i=r.signal,o=n?.signal,l=[];gf(o,"options.signal");function u(){d(new wf)}o?.addEventListener("abort",u);let f,a,c=[],s=0;function b(_){d(_,--s===0)}function d(_,p){if(_&&(!f||f.code==="ERR_STREAM_PREMATURE_CLOSE")&&(f=_),!(!f&&!p)){for(;c.length;)c.shift()(f);o?.removeEventListener("abort",u),r.abort(),p&&(f||l.forEach(I=>I()),He.nextTick(t,f,a))}}let h;for(let _=0;_0,F=I||n?.end!==!1,re=_===e.length-1;if(Gr(p)){let P=function(U){U&&U.name!=="AbortError"&&U.code!=="ERR_STREAM_PREMATURE_CLOSE"&&b(U)};var L=P;if(F){let{destroy:U,cleanup:ze}=Hr(p,I,M);c.push(U),Wt(p)&&re&&l.push(ze)}p.on("error",P),Wt(p)&&re&&l.push(()=>{p.removeListener("error",P)})}if(_===0)if(typeof p=="function"){if(h=p({signal:i}),!be(h))throw new kt("Iterable, AsyncIterable or Stream","source",h)}else be(p)||$t(p)?h=p:h=Br.from(p);else if(typeof p=="function")if(h=Rf(h),h=p(h,{signal:i}),I){if(!be(h,!0))throw new kt("AsyncIterable",`transform[${_-1}]`,h)}else{var D;Ct||(Ct=Pt());let P=new Ct({objectMode:!0}),U=(D=h)===null||D===void 0?void 0:D.then;if(typeof U=="function")s++,U.call(h,ie=>{a=ie,ie!=null&&P.write(ie),F&&P.end(),He.nextTick(b)},ie=>{P.destroy(ie),He.nextTick(b,ie)});else if(be(h,!0))s++,Vr(h,P,b,{end:F});else throw new kt("AsyncIterable or Promise","destination",h);h=P;let{destroy:ze,cleanup:_i}=Hr(h,!1,!0);c.push(ze),re&&l.push(_i)}else if(Gr(p)){if($t(h)){s+=2;let P=Tf(h,p,b,{end:F});Wt(p)&&re&&l.push(P)}else if(be(h))s++,Vr(h,p,b,{end:F});else throw new Yr("val",["Readable","Iterable","AsyncIterable"],h);h=p}else h=Br.from(p)}return(i!=null&&i.aborted||o!=null&&o.aborted)&&He.nextTick(u),h}function Tf(e,t,n,{end:r}){let i=!1;return t.on("close",()=>{i||n(new pf)}),e.pipe(t,{end:r}),r?e.once("end",()=>{i=!0,t.end()}):n(),Ve(e,{readable:!0,writable:!1},o=>{let l=e._readableState;o&&o.code==="ERR_STREAM_PREMATURE_CLOSE"&&l&&l.ended&&!l.errored&&!l.errorEmitted?e.once("end",n).once("error",n):n(o)}),Ve(t,{readable:!1,writable:!0},n)}zr.exports={pipelineImpl:Kr,pipeline:mf}});var ei=g((bu,Zr)=>{"use strict";var{pipeline:If}=Ye(),Ke=v(),{destroyer:Mf}=Z(),{isNodeStream:Nf,isReadable:Xr,isWritable:Jr}=V(),{AbortError:Df,codes:{ERR_INVALID_ARG_VALUE:Qr,ERR_MISSING_ARGS:Of}}=O();Zr.exports=function(...t){if(t.length===0)throw new Of("streams");if(t.length===1)return Ke.from(t[0]);let n=[...t];if(typeof t[0]=="function"&&(t[0]=Ke.from(t[0])),typeof t[t.length-1]=="function"){let d=t.length-1;t[d]=Ke.from(t[d])}for(let d=0;d0&&!Jr(t[d]))throw new Qr(`streams[${d}]`,n[d],"must be writable")}let r,i,o,l,u;function f(d){let h=l;l=null,h?h(d):d?u.destroy(d):!b&&!s&&u.destroy()}let a=t[0],c=If(t,f),s=!!Jr(a),b=!!Xr(c);return u=new Ke({writableObjectMode:!!(a!=null&&a.writableObjectMode),readableObjectMode:!!(c!=null&&c.writableObjectMode),writable:s,readable:b}),s&&(u._write=function(d,h,D){a.write(d,h)?D():r=D},u._final=function(d){a.end(),i=d},a.on("drain",function(){if(r){let d=r;r=null,d()}}),c.on("finish",function(){if(i){let d=i;i=null,d()}})),b&&(c.on("readable",function(){if(o){let d=o;o=null,d()}}),c.on("end",function(){u.push(null)}),u._read=function(){for(;;){let d=c.read();if(d===null){o=u._read;return}if(!u.push(d))return}}),u._destroy=function(d,h){!d&&l!==null&&(d=new Df),o=null,r=null,i=null,l===null?h(d):(l=h,Mf(c,d))},u}});var vt=g((_u,ti)=>{"use strict";var{ArrayPrototypePop:qf,Promise:xf}=m(),{isIterable:Lf,isNodeStream:Pf}=V(),{pipelineImpl:kf}=Ye(),{finished:Wf}=Y();function Cf(...e){return new xf((t,n)=>{let r,i,o=e[e.length-1];if(o&&typeof o=="object"&&!Pf(o)&&!Lf(o)){let l=qf(e);r=l.signal,i=l.end}kf(e,(l,u)=>{l?n(l):t(u)},{signal:r,end:i})})}ti.exports={finished:Wf,pipeline:Cf}});var di=g((pu,si)=>{var{Buffer:jf}=__buffer$,{ObjectDefineProperty:H,ObjectKeys:ii,ReflectApply:oi}=m(),{promisify:{custom:li}}=j(),{streamReturningOperators:ni,promiseReturningOperators:ri}=xn(),{codes:{ERR_ILLEGAL_CONSTRUCTOR:ai}}=O(),$f=ei(),{pipeline:fi}=Ye(),{destroyer:vf}=Z(),ui=Y(),Ft=vt(),Ut=V(),R=si.exports=Le().Stream;R.isDisturbed=Ut.isDisturbed;R.isErrored=Ut.isErrored;R.isReadable=Ut.isReadable;R.Readable=we();for(let e of ii(ni)){let n=function(...r){if(new.target)throw ai();return R.Readable.from(oi(t,this,r))};Uf=n;let t=ni[e];H(n,"name",{__proto__:null,value:t.name}),H(n,"length",{__proto__:null,value:t.length}),H(R.Readable.prototype,e,{__proto__:null,value:n,enumerable:!1,configurable:!0,writable:!0})}var Uf;for(let e of ii(ri)){let n=function(...i){if(new.target)throw ai();return oi(t,this,i)};Uf=n;let t=ri[e];H(n,"name",{__proto__:null,value:t.name}),H(n,"length",{__proto__:null,value:t.length}),H(R.Readable.prototype,e,{__proto__:null,value:n,enumerable:!1,configurable:!0,writable:!0})}var Uf;R.Writable=Tt();R.Duplex=v();R.Transform=xt();R.PassThrough=Pt();R.pipeline=fi;var{addAbortSignal:Ff}=ke();R.addAbortSignal=Ff;R.finished=ui;R.destroy=vf;R.compose=$f;H(R,"promises",{__proto__:null,configurable:!0,enumerable:!0,get(){return Ft}});H(fi,li,{__proto__:null,enumerable:!0,get(){return Ft.pipeline}});H(ui,li,{__proto__:null,enumerable:!0,get(){return Ft.finished}});R.Stream=R;R._isUint8Array=function(t){return t instanceof Uint8Array};R._uint8ArrayToBuffer=function(t){return jf.from(t.buffer,t.byteOffset,t.byteLength)}});var ci=g((wu,A)=>{"use strict";var T=di(),Bf=vt(),Gf=T.Readable.destroy;A.exports=T.Readable;A.exports._uint8ArrayToBuffer=T._uint8ArrayToBuffer;A.exports._isUint8Array=T._isUint8Array;A.exports.isDisturbed=T.isDisturbed;A.exports.isErrored=T.isErrored;A.exports.isReadable=T.isReadable;A.exports.Readable=T.Readable;A.exports.Writable=T.Writable;A.exports.Duplex=T.Duplex;A.exports.Transform=T.Transform;A.exports.PassThrough=T.PassThrough;A.exports.addAbortSignal=T.addAbortSignal;A.exports.finished=T.finished;A.exports.destroy=T.destroy;A.exports.destroy=Gf;A.exports.pipeline=T.pipeline;A.exports.compose=T.compose;Object.defineProperty(T,"promises",{configurable:!0,enumerable:!0,get(){return Bf}});A.exports.Stream=T.Stream;A.exports.default=A.exports});var bi=Ri(ci()),{_uint8ArrayToBuffer:yu,_isUint8Array:gu,isDisturbed:Su,isErrored:Eu,isReadable:Ru,Readable:Au,Writable:mu,Duplex:Tu,Transform:Iu,PassThrough:Mu,addAbortSignal:Nu,finished:Du,destroy:Ou,pipeline:qu,compose:xu,Stream:Lu}=bi,{default:hi,...Hf}=bi,Pu=hi!==void 0?hi:Hf;export{Tu as Duplex,Mu as PassThrough,Au as Readable,Lu as Stream,Iu as Transform,mu as Writable,gu as _isUint8Array,yu as _uint8ArrayToBuffer,Nu as addAbortSignal,xu as compose,Pu as default,Ou as destroy,Du as finished,Su as isDisturbed,Eu as isErrored,Ru as isReadable,qu as pipeline}; +// generated with +// $ esbuild --bundle --legal-comments=none --target=es2022 --tree-shaking=true --format=esm . +// ... then making sure the file uses the existing ext:deno_node stuff instead of bundling it +const __process$ = { nextTick }; +import __buffer$ from "ext:deno_node/buffer.ts"; +import __string_decoder$ from "ext:deno_node/string_decoder.ts"; +import __events$ from "ext:deno_node/events.ts"; + +var __getOwnPropNames = Object.getOwnPropertyNames; +var __commonJS = (cb, mod) => + function __require() { + return mod || + (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), + mod.exports; + }; + +// node_modules/buffer/index.js +var require_buffer = () => { + return __buffer$; +}; + +// lib/ours/errors.js +var require_primordials = __commonJS({ + "lib/ours/primordials.js"(exports2, module2) { + "use strict"; + module2.exports = { + ArrayIsArray(self2) { + return Array.isArray(self2); + }, + ArrayPrototypeIncludes(self2, el) { + return self2.includes(el); + }, + ArrayPrototypeIndexOf(self2, el) { + return self2.indexOf(el); + }, + ArrayPrototypeJoin(self2, sep) { + return self2.join(sep); + }, + ArrayPrototypeMap(self2, fn) { + return self2.map(fn); + }, + ArrayPrototypePop(self2, el) { + return self2.pop(el); + }, + ArrayPrototypePush(self2, el) { + return self2.push(el); + }, + ArrayPrototypeSlice(self2, start, end) { + return self2.slice(start, end); + }, + Error, + FunctionPrototypeCall(fn, thisArgs, ...args) { + return fn.call(thisArgs, ...args); + }, + FunctionPrototypeSymbolHasInstance(self2, instance) { + return Function.prototype[Symbol.hasInstance].call(self2, instance); + }, + MathFloor: Math.floor, + Number, + NumberIsInteger: Number.isInteger, + NumberIsNaN: Number.isNaN, + NumberMAX_SAFE_INTEGER: Number.MAX_SAFE_INTEGER, + NumberMIN_SAFE_INTEGER: Number.MIN_SAFE_INTEGER, + NumberParseInt: Number.parseInt, + ObjectDefineProperties(self2, props) { + return Object.defineProperties(self2, props); + }, + ObjectDefineProperty(self2, name, prop) { + return Object.defineProperty(self2, name, prop); + }, + ObjectGetOwnPropertyDescriptor(self2, name) { + return Object.getOwnPropertyDescriptor(self2, name); + }, + ObjectKeys(obj) { + return Object.keys(obj); + }, + ObjectSetPrototypeOf(target, proto) { + return Object.setPrototypeOf(target, proto); + }, + Promise, + PromisePrototypeCatch(self2, fn) { + return self2.catch(fn); + }, + PromisePrototypeThen(self2, thenFn, catchFn) { + return self2.then(thenFn, catchFn); + }, + PromiseReject(err) { + return Promise.reject(err); + }, + ReflectApply: Reflect.apply, + RegExpPrototypeTest(self2, value) { + return self2.test(value); + }, + SafeSet: Set, + String, + StringPrototypeSlice(self2, start, end) { + return self2.slice(start, end); + }, + StringPrototypeToLowerCase(self2) { + return self2.toLowerCase(); + }, + StringPrototypeToUpperCase(self2) { + return self2.toUpperCase(); + }, + StringPrototypeTrim(self2) { + return self2.trim(); + }, + Symbol, + SymbolAsyncIterator: Symbol.asyncIterator, + SymbolHasInstance: Symbol.hasInstance, + SymbolIterator: Symbol.iterator, + TypedArrayPrototypeSet(self2, buf, len) { + return self2.set(buf, len); + }, + Uint8Array, + }; + }, +}); + +// lib/internal/validators.js +var require_validators = __commonJS({ + "lib/internal/validators.js"(exports, module) { + "use strict"; + var { + ArrayIsArray, + ArrayPrototypeIncludes, + ArrayPrototypeJoin, + ArrayPrototypeMap, + NumberIsInteger, + NumberIsNaN, + NumberMAX_SAFE_INTEGER, + NumberMIN_SAFE_INTEGER, + NumberParseInt, + ObjectPrototypeHasOwnProperty, + RegExpPrototypeExec, + String: String2, + StringPrototypeToUpperCase, + StringPrototypeTrim, + } = require_primordials(); + var signals = {}; + function isInt32(value) { + return value === (value | 0); + } + function isUint32(value) { + return value === value >>> 0; + } + var octalReg = /^[0-7]+$/; + var modeDesc = "must be a 32-bit unsigned integer or an octal string"; + function parseFileMode(value, name, def) { + if (typeof value === "undefined") { + value = def; + } + if (typeof value === "string") { + if (RegExpPrototypeExec(octalReg, value) === null) { + throw new ERR_INVALID_ARG_VALUE(name, value, modeDesc); + } + value = NumberParseInt(value, 8); + } + validateUint32(value, name); + return value; + } + var validateInteger = hideStackFrames( + ( + value, + name, + min = NumberMIN_SAFE_INTEGER, + max = NumberMAX_SAFE_INTEGER, + ) => { + if (typeof value !== "number") { + throw new ERR_INVALID_ARG_TYPE(name, "number", value); + } + if (!NumberIsInteger(value)) { + throw new ERR_OUT_OF_RANGE(name, "an integer", value); + } + if (value < min || value > max) { + throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value); + } + }, + ); + var validateInt32 = hideStackFrames( + (value, name, min = -2147483648, max = 2147483647) => { + if (typeof value !== "number") { + throw new ERR_INVALID_ARG_TYPE(name, "number", value); + } + if (!NumberIsInteger(value)) { + throw new ERR_OUT_OF_RANGE(name, "an integer", value); + } + if (value < min || value > max) { + throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value); + } + }, + ); + var validateUint32 = hideStackFrames((value, name, positive = false) => { + if (typeof value !== "number") { + throw new ERR_INVALID_ARG_TYPE(name, "number", value); + } + if (!NumberIsInteger(value)) { + throw new ERR_OUT_OF_RANGE(name, "an integer", value); + } + const min = positive ? 1 : 0; + const max = 4294967295; + if (value < min || value > max) { + throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value); + } + }); + function validateString(value, name) { + if (typeof value !== "string") { + throw new ERR_INVALID_ARG_TYPE(name, "string", value); + } + } + function validateNumber(value, name, min = void 0, max) { + if (typeof value !== "number") { + throw new ERR_INVALID_ARG_TYPE(name, "number", value); + } + if ( + min != null && value < min || max != null && value > max || + (min != null || max != null) && NumberIsNaN(value) + ) { + throw new ERR_OUT_OF_RANGE( + name, + `${min != null ? `>= ${min}` : ""}${ + min != null && max != null ? " && " : "" + }${max != null ? `<= ${max}` : ""}`, + value, + ); + } + } + var validateOneOf = hideStackFrames((value, name, oneOf) => { + if (!ArrayPrototypeIncludes(oneOf, value)) { + const allowed = ArrayPrototypeJoin( + ArrayPrototypeMap( + oneOf, + (v) => typeof v === "string" ? `'${v}'` : String2(v), + ), + ", ", + ); + const reason = "must be one of: " + allowed; + throw new ERR_INVALID_ARG_VALUE(name, value, reason); + } + }); + function validateBoolean(value, name) { + if (typeof value !== "boolean") { + throw new ERR_INVALID_ARG_TYPE(name, "boolean", value); + } + } + function getOwnPropertyValueOrDefault(options, key, defaultValue) { + return options == null || !ObjectPrototypeHasOwnProperty(options, key) + ? defaultValue + : options[key]; + } + var validateObject = hideStackFrames((value, name, options = null) => { + const allowArray = getOwnPropertyValueOrDefault( + options, + "allowArray", + false, + ); + const allowFunction = getOwnPropertyValueOrDefault( + options, + "allowFunction", + false, + ); + const nullable = getOwnPropertyValueOrDefault(options, "nullable", false); + if ( + !nullable && value === null || !allowArray && ArrayIsArray(value) || + typeof value !== "object" && + (!allowFunction || typeof value !== "function") + ) { + throw new ERR_INVALID_ARG_TYPE(name, "Object", value); + } + }); + var validateArray = hideStackFrames((value, name, minLength = 0) => { + if (!ArrayIsArray(value)) { + throw new ERR_INVALID_ARG_TYPE(name, "Array", value); + } + if (value.length < minLength) { + const reason = `must be longer than ${minLength}`; + throw new ERR_INVALID_ARG_VALUE(name, value, reason); + } + }); + function validateSignalName(signal, name = "signal") { + validateString(signal, name); + if (signals[signal] === void 0) { + if (signals[StringPrototypeToUpperCase(signal)] !== void 0) { + throw new ERR_UNKNOWN_SIGNAL( + signal + " (signals must use all capital letters)", + ); + } + throw new ERR_UNKNOWN_SIGNAL(signal); + } + } + var validateBuffer = hideStackFrames((buffer, name = "buffer") => { + if (!isArrayBufferView(buffer)) { + throw new ERR_INVALID_ARG_TYPE(name, [ + "Buffer", + "TypedArray", + "DataView", + ], buffer); + } + }); + function validateEncoding(data, encoding) { + const normalizedEncoding = normalizeEncoding(encoding); + const length = data.length; + if (normalizedEncoding === "hex" && length % 2 !== 0) { + throw new ERR_INVALID_ARG_VALUE( + "encoding", + encoding, + `is invalid for data of length ${length}`, + ); + } + } + function validatePort(port, name = "Port", allowZero = true) { + if ( + typeof port !== "number" && typeof port !== "string" || + typeof port === "string" && StringPrototypeTrim(port).length === 0 || + +port !== +port >>> 0 || port > 65535 || port === 0 && !allowZero + ) { + throw new ERR_SOCKET_BAD_PORT(name, port, allowZero); + } + return port | 0; + } + var validateAbortSignal = hideStackFrames((signal, name) => { + if ( + signal !== void 0 && + (signal === null || typeof signal !== "object" || + !("aborted" in signal)) + ) { + throw new ERR_INVALID_ARG_TYPE(name, "AbortSignal", signal); + } + }); + var validateFunction = hideStackFrames((value, name) => { + if (typeof value !== "function") { + throw new ERR_INVALID_ARG_TYPE(name, "Function", value); + } + }); + var validatePlainFunction = hideStackFrames((value, name) => { + if (typeof value !== "function" || isAsyncFunction(value)) { + throw new ERR_INVALID_ARG_TYPE(name, "Function", value); + } + }); + var validateUndefined = hideStackFrames((value, name) => { + if (value !== void 0) { + throw new ERR_INVALID_ARG_TYPE(name, "undefined", value); + } + }); + function validateUnion(value, name, union) { + if (!ArrayPrototypeIncludes(union, value)) { + throw new ERR_INVALID_ARG_TYPE( + name, + `('${ArrayPrototypeJoin(union, "|")}')`, + value, + ); + } + } + module.exports = { + isInt32, + isUint32, + parseFileMode, + validateArray, + validateBoolean, + validateBuffer, + validateEncoding, + validateFunction, + validateInt32, + validateInteger, + validateNumber, + validateObject, + validateOneOf, + validatePlainFunction, + validatePort, + validateSignalName, + validateString, + validateUint32, + validateUndefined, + validateUnion, + validateAbortSignal, + }; + }, +}); + +// node_modules/process/browser.js +var require_browser2 = () => { + return __process$; +}; + +// lib/internal/streams/utils.js +var require_utils = __commonJS({ + "lib/internal/streams/utils.js"(exports, module) { + "use strict"; + var { Symbol: Symbol2, SymbolAsyncIterator, SymbolIterator } = + require_primordials(); + var kDestroyed = Symbol2("kDestroyed"); + var kIsErrored = Symbol2("kIsErrored"); + var kIsReadable = Symbol2("kIsReadable"); + var kIsDisturbed = Symbol2("kIsDisturbed"); + function isReadableNodeStream(obj, strict = false) { + var _obj$_readableState; + return !!(obj && typeof obj.pipe === "function" && + typeof obj.on === "function" && + (!strict || + typeof obj.pause === "function" && + typeof obj.resume === "function") && + (!obj._writableState || + ((_obj$_readableState = obj._readableState) === null || + _obj$_readableState === void 0 + ? void 0 + : _obj$_readableState.readable) !== false) && // Duplex + (!obj._writableState || obj._readableState)); + } + function isWritableNodeStream(obj) { + var _obj$_writableState; + return !!(obj && typeof obj.write === "function" && + typeof obj.on === "function" && + (!obj._readableState || + ((_obj$_writableState = obj._writableState) === null || + _obj$_writableState === void 0 + ? void 0 + : _obj$_writableState.writable) !== false)); + } + function isDuplexNodeStream(obj) { + return !!(obj && typeof obj.pipe === "function" && obj._readableState && + typeof obj.on === "function" && typeof obj.write === "function"); + } + function isNodeStream(obj) { + return obj && + (obj._readableState || obj._writableState || + typeof obj.write === "function" && typeof obj.on === "function" || + typeof obj.pipe === "function" && typeof obj.on === "function"); + } + function isIterable(obj, isAsync) { + if (obj == null) { + return false; + } + if (isAsync === true) { + return typeof obj[SymbolAsyncIterator] === "function"; + } + if (isAsync === false) { + return typeof obj[SymbolIterator] === "function"; + } + return typeof obj[SymbolAsyncIterator] === "function" || + typeof obj[SymbolIterator] === "function"; + } + function isDestroyed(stream) { + if (!isNodeStream(stream)) { + return null; + } + const wState = stream._writableState; + const rState = stream._readableState; + const state = wState || rState; + return !!(stream.destroyed || stream[kDestroyed] || + state !== null && state !== void 0 && state.destroyed); + } + function isWritableEnded(stream) { + if (!isWritableNodeStream(stream)) { + return null; + } + if (stream.writableEnded === true) { + return true; + } + const wState = stream._writableState; + if (wState !== null && wState !== void 0 && wState.errored) { + return false; + } + if ( + typeof (wState === null || wState === void 0 + ? void 0 + : wState.ended) !== "boolean" + ) { + return null; + } + return wState.ended; + } + function isWritableFinished(stream, strict) { + if (!isWritableNodeStream(stream)) { + return null; + } + if (stream.writableFinished === true) { + return true; + } + const wState = stream._writableState; + if (wState !== null && wState !== void 0 && wState.errored) { + return false; + } + if ( + typeof (wState === null || wState === void 0 + ? void 0 + : wState.finished) !== "boolean" + ) { + return null; + } + return !!(wState.finished || + strict === false && wState.ended === true && wState.length === 0); + } + function isReadableEnded(stream) { + if (!isReadableNodeStream(stream)) { + return null; + } + if (stream.readableEnded === true) { + return true; + } + const rState = stream._readableState; + if (!rState || rState.errored) { + return false; + } + if ( + typeof (rState === null || rState === void 0 + ? void 0 + : rState.ended) !== "boolean" + ) { + return null; + } + return rState.ended; + } + function isReadableFinished(stream, strict) { + if (!isReadableNodeStream(stream)) { + return null; + } + const rState = stream._readableState; + if (rState !== null && rState !== void 0 && rState.errored) { + return false; + } + if ( + typeof (rState === null || rState === void 0 + ? void 0 + : rState.endEmitted) !== "boolean" + ) { + return null; + } + return !!(rState.endEmitted || + strict === false && rState.ended === true && rState.length === 0); + } + function isReadable(stream) { + if (stream && stream[kIsReadable] != null) { + return stream[kIsReadable]; + } + if ( + typeof (stream === null || stream === void 0 + ? void 0 + : stream.readable) !== "boolean" + ) { + return null; + } + if (isDestroyed(stream)) { + return false; + } + return isReadableNodeStream(stream) && stream.readable && + !isReadableFinished(stream); + } + function isWritable(stream) { + if ( + typeof (stream === null || stream === void 0 + ? void 0 + : stream.writable) !== "boolean" + ) { + return null; + } + if (isDestroyed(stream)) { + return false; + } + return isWritableNodeStream(stream) && stream.writable && + !isWritableEnded(stream); + } + function isFinished(stream, opts) { + if (!isNodeStream(stream)) { + return null; + } + if (isDestroyed(stream)) { + return true; + } + if ( + (opts === null || opts === void 0 ? void 0 : opts.readable) !== false && + isReadable(stream) + ) { + return false; + } + if ( + (opts === null || opts === void 0 ? void 0 : opts.writable) !== false && + isWritable(stream) + ) { + return false; + } + return true; + } + function isWritableErrored(stream) { + var _stream$_writableStat, _stream$_writableStat2; + if (!isNodeStream(stream)) { + return null; + } + if (stream.writableErrored) { + return stream.writableErrored; + } + return (_stream$_writableStat = + (_stream$_writableStat2 = stream._writableState) === null || + _stream$_writableStat2 === void 0 + ? void 0 + : _stream$_writableStat2.errored) !== null && + _stream$_writableStat !== void 0 + ? _stream$_writableStat + : null; + } + function isReadableErrored(stream) { + var _stream$_readableStat, _stream$_readableStat2; + if (!isNodeStream(stream)) { + return null; + } + if (stream.readableErrored) { + return stream.readableErrored; + } + return (_stream$_readableStat = + (_stream$_readableStat2 = stream._readableState) === null || + _stream$_readableStat2 === void 0 + ? void 0 + : _stream$_readableStat2.errored) !== null && + _stream$_readableStat !== void 0 + ? _stream$_readableStat + : null; + } + function isClosed(stream) { + if (!isNodeStream(stream)) { + return null; + } + if (typeof stream.closed === "boolean") { + return stream.closed; + } + const wState = stream._writableState; + const rState = stream._readableState; + if ( + typeof (wState === null || wState === void 0 + ? void 0 + : wState.closed) === "boolean" || + typeof (rState === null || rState === void 0 + ? void 0 + : rState.closed) === "boolean" + ) { + return (wState === null || wState === void 0 + ? void 0 + : wState.closed) || + (rState === null || rState === void 0 ? void 0 : rState.closed); + } + if (typeof stream._closed === "boolean" && isOutgoingMessage(stream)) { + return stream._closed; + } + return null; + } + function isOutgoingMessage(stream) { + return typeof stream._closed === "boolean" && + typeof stream._defaultKeepAlive === "boolean" && + typeof stream._removedConnection === "boolean" && + typeof stream._removedContLen === "boolean"; + } + function isServerResponse(stream) { + return typeof stream._sent100 === "boolean" && isOutgoingMessage(stream); + } + function isServerRequest(stream) { + var _stream$req; + return typeof stream._consuming === "boolean" && + typeof stream._dumped === "boolean" && + ((_stream$req = stream.req) === null || _stream$req === void 0 + ? void 0 + : _stream$req.upgradeOrConnect) === void 0; + } + function willEmitClose(stream) { + if (!isNodeStream(stream)) { + return null; + } + const wState = stream._writableState; + const rState = stream._readableState; + const state = wState || rState; + return !state && isServerResponse(stream) || + !!(state && state.autoDestroy && state.emitClose && + state.closed === false); + } + function isDisturbed(stream) { + var _stream$kIsDisturbed; + return !!(stream && + ((_stream$kIsDisturbed = stream[kIsDisturbed]) !== null && + _stream$kIsDisturbed !== void 0 + ? _stream$kIsDisturbed + : stream.readableDidRead || stream.readableAborted)); + } + function isErrored(stream) { + var _ref, + _ref2, + _ref3, + _ref4, + _ref5, + _stream$kIsErrored, + _stream$_readableStat3, + _stream$_writableStat3, + _stream$_readableStat4, + _stream$_writableStat4; + return !!(stream && + ((_ref = + (_ref2 = + (_ref3 = + (_ref4 = + (_ref5 = + (_stream$kIsErrored = + stream[kIsErrored]) !== null && + _stream$kIsErrored !== void 0 + ? _stream$kIsErrored + : stream.readableErrored) !== null && + _ref5 !== void 0 + ? _ref5 + : stream.writableErrored) !== null && + _ref4 !== void 0 + ? _ref4 + : (_stream$_readableStat3 = + stream._readableState) === null || + _stream$_readableStat3 === void 0 + ? void 0 + : _stream$_readableStat3.errorEmitted) !== null && + _ref3 !== void 0 + ? _ref3 + : (_stream$_writableStat3 = stream._writableState) === + null || _stream$_writableStat3 === void 0 + ? void 0 + : _stream$_writableStat3.errorEmitted) !== null && + _ref2 !== void 0 + ? _ref2 + : (_stream$_readableStat4 = stream._readableState) === null || + _stream$_readableStat4 === void 0 + ? void 0 + : _stream$_readableStat4.errored) !== null && _ref !== void 0 + ? _ref + : (_stream$_writableStat4 = stream._writableState) === null || + _stream$_writableStat4 === void 0 + ? void 0 + : _stream$_writableStat4.errored)); + } + module.exports = { + kDestroyed, + isDisturbed, + kIsDisturbed, + isErrored, + kIsErrored, + isReadable, + kIsReadable, + isClosed, + isDestroyed, + isDuplexNodeStream, + isFinished, + isIterable, + isReadableNodeStream, + isReadableEnded, + isReadableFinished, + isReadableErrored, + isNodeStream, + isWritable, + isWritableNodeStream, + isWritableEnded, + isWritableFinished, + isWritableErrored, + isServerRequest, + isServerResponse, + willEmitClose, + }; + }, +}); + +// lib/internal/streams/end-of-stream.js +var require_end_of_stream = __commonJS({ + "lib/internal/streams/end-of-stream.js"(exports, module) { + var process = require_browser2(); + var { validateAbortSignal, validateFunction, validateObject } = + require_validators(); + var { Promise: Promise2 } = require_primordials(); + var { + isClosed, + isReadable, + isReadableNodeStream, + isReadableFinished, + isReadableErrored, + isWritable, + isWritableNodeStream, + isWritableFinished, + isWritableErrored, + isNodeStream, + willEmitClose: _willEmitClose, + } = require_utils(); + function isRequest(stream) { + return stream.setHeader && typeof stream.abort === "function"; + } + var nop = () => { + }; + function eos(stream, options, callback) { + var _options$readable, _options$writable; + if (arguments.length === 2) { + callback = options; + options = kEmptyObject; + } else if (options == null) { + options = kEmptyObject; + } else { + validateObject(options, "options"); + } + validateFunction(callback, "callback"); + validateAbortSignal(options.signal, "options.signal"); + callback = once(callback); + const readable = (_options$readable = options.readable) !== null && + _options$readable !== void 0 + ? _options$readable + : isReadableNodeStream(stream); + const writable = (_options$writable = options.writable) !== null && + _options$writable !== void 0 + ? _options$writable + : isWritableNodeStream(stream); + if (!isNodeStream(stream)) { + throw new ERR_INVALID_ARG_TYPE("stream", "Stream", stream); + } + const wState = stream._writableState; + const rState = stream._readableState; + const onlegacyfinish = () => { + if (!stream.writable) { + onfinish(); + } + }; + let willEmitClose = _willEmitClose(stream) && + isReadableNodeStream(stream) === readable && + isWritableNodeStream(stream) === writable; + let writableFinished = isWritableFinished(stream, false); + const onfinish = () => { + writableFinished = true; + if (stream.destroyed) { + willEmitClose = false; + } + if (willEmitClose && (!stream.readable || readable)) { + return; + } + if (!readable || readableFinished) { + callback.call(stream); + } + }; + let readableFinished = isReadableFinished(stream, false); + const onend = () => { + readableFinished = true; + if (stream.destroyed) { + willEmitClose = false; + } + if (willEmitClose && (!stream.writable || writable)) { + return; + } + if (!writable || writableFinished) { + callback.call(stream); + } + }; + const onerror = (err) => { + callback.call(stream, err); + }; + let closed = isClosed(stream); + const onclose = () => { + closed = true; + const errored = isWritableErrored(stream) || isReadableErrored(stream); + if (errored && typeof errored !== "boolean") { + return callback.call(stream, errored); + } + if ( + readable && !readableFinished && isReadableNodeStream(stream, true) + ) { + if (!isReadableFinished(stream, false)) { + return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE()); + } + } + if (writable && !writableFinished) { + if (!isWritableFinished(stream, false)) { + return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE()); + } + } + callback.call(stream); + }; + const onrequest = () => { + stream.req.on("finish", onfinish); + }; + if (isRequest(stream)) { + stream.on("complete", onfinish); + if (!willEmitClose) { + stream.on("abort", onclose); + } + if (stream.req) { + onrequest(); + } else { + stream.on("request", onrequest); + } + } else if (writable && !wState) { + stream.on("end", onlegacyfinish); + stream.on("close", onlegacyfinish); + } + if (!willEmitClose && typeof stream.aborted === "boolean") { + stream.on("aborted", onclose); + } + stream.on("end", onend); + stream.on("finish", onfinish); + if (options.error !== false) { + stream.on("error", onerror); + } + stream.on("close", onclose); + if (closed) { + process.nextTick(onclose); + } else if ( + wState !== null && wState !== void 0 && wState.errorEmitted || + rState !== null && rState !== void 0 && rState.errorEmitted + ) { + if (!willEmitClose) { + process.nextTick(onclose); + } + } else if ( + !readable && (!willEmitClose || isReadable(stream)) && + (writableFinished || isWritable(stream) === false) + ) { + process.nextTick(onclose); + } else if ( + !writable && (!willEmitClose || isWritable(stream)) && + (readableFinished || isReadable(stream) === false) + ) { + process.nextTick(onclose); + } else if (rState && stream.req && stream.aborted) { + process.nextTick(onclose); + } + const cleanup = () => { + callback = nop; + stream.removeListener("aborted", onclose); + stream.removeListener("complete", onfinish); + stream.removeListener("abort", onclose); + stream.removeListener("request", onrequest); + if (stream.req) { + stream.req.removeListener("finish", onfinish); + } + stream.removeListener("end", onlegacyfinish); + stream.removeListener("close", onlegacyfinish); + stream.removeListener("finish", onfinish); + stream.removeListener("end", onend); + stream.removeListener("error", onerror); + stream.removeListener("close", onclose); + }; + if (options.signal && !closed) { + const abort = () => { + const endCallback = callback; + cleanup(); + endCallback.call( + stream, + new AbortError(void 0, { + cause: options.signal.reason, + }), + ); + }; + if (options.signal.aborted) { + process.nextTick(abort); + } else { + const originalCallback = callback; + callback = once((...args) => { + options.signal.removeEventListener("abort", abort); + originalCallback.apply(stream, args); + }); + options.signal.addEventListener("abort", abort); + } + } + return cleanup; + } + function finished(stream, opts) { + return new Promise2((resolve, reject) => { + eos(stream, opts, (err) => { + if (err) { + reject(err); + } else { + resolve(); + } + }); + }); + } + module.exports = eos; + module.exports.finished = finished; + }, +}); + +// lib/internal/streams/operators.js +var require_operators = __commonJS({ + "lib/internal/streams/operators.js"(exports, module) { + "use strict"; + var { validateAbortSignal, validateInteger, validateObject } = + require_validators(); + var kWeakHandler = require_primordials().Symbol("kWeak"); + var { finished } = require_end_of_stream(); + var { + ArrayPrototypePush, + MathFloor, + Number: Number2, + NumberIsNaN, + Promise: Promise2, + PromiseReject, + PromisePrototypeThen, + Symbol: Symbol2, + } = require_primordials(); + var kEmpty = Symbol2("kEmpty"); + var kEof = Symbol2("kEof"); + function map(fn, options) { + if (typeof fn !== "function") { + throw new ERR_INVALID_ARG_TYPE("fn", ["Function", "AsyncFunction"], fn); + } + if (options != null) { + validateObject(options, "options"); + } + if ( + (options === null || options === void 0 ? void 0 : options.signal) != + null + ) { + validateAbortSignal(options.signal, "options.signal"); + } + let concurrency = 1; + if ( + (options === null || options === void 0 + ? void 0 + : options.concurrency) != null + ) { + concurrency = MathFloor(options.concurrency); + } + validateInteger(concurrency, "concurrency", 1); + return async function* map2() { + var _options$signal, _options$signal2; + const ac = new AbortController(); + const stream = this; + const queue = []; + const signal = ac.signal; + const signalOpt = { + signal, + }; + const abort = () => ac.abort(); + if ( + options !== null && options !== void 0 && + (_options$signal = options.signal) !== null && + _options$signal !== void 0 && _options$signal.aborted + ) { + abort(); + } + options === null || options === void 0 + ? void 0 + : (_options$signal2 = options.signal) === null || + _options$signal2 === void 0 + ? void 0 + : _options$signal2.addEventListener("abort", abort); + let next; + let resume; + let done = false; + function onDone() { + done = true; + } + async function pump() { + try { + for await (let val of stream) { + var _val; + if (done) { + return; + } + if (signal.aborted) { + throw new AbortError(); + } + try { + val = fn(val, signalOpt); + } catch (err) { + val = PromiseReject(err); + } + if (val === kEmpty) { + continue; + } + if ( + typeof ((_val = val) === null || _val === void 0 + ? void 0 + : _val.catch) === "function" + ) { + val.catch(onDone); + } + queue.push(val); + if (next) { + next(); + next = null; + } + if (!done && queue.length && queue.length >= concurrency) { + await new Promise2((resolve) => { + resume = resolve; + }); + } + } + queue.push(kEof); + } catch (err) { + const val = PromiseReject(err); + PromisePrototypeThen(val, void 0, onDone); + queue.push(val); + } finally { + var _options$signal3; + done = true; + if (next) { + next(); + next = null; + } + options === null || options === void 0 + ? void 0 + : (_options$signal3 = options.signal) === null || + _options$signal3 === void 0 + ? void 0 + : _options$signal3.removeEventListener("abort", abort); + } + } + pump(); + try { + while (true) { + while (queue.length > 0) { + const val = await queue[0]; + if (val === kEof) { + return; + } + if (signal.aborted) { + throw new AbortError(); + } + if (val !== kEmpty) { + yield val; + } + queue.shift(); + if (resume) { + resume(); + resume = null; + } + } + await new Promise2((resolve) => { + next = resolve; + }); + } + } finally { + ac.abort(); + done = true; + if (resume) { + resume(); + resume = null; + } + } + }.call(this); + } + function asIndexedPairs(options = void 0) { + if (options != null) { + validateObject(options, "options"); + } + if ( + (options === null || options === void 0 ? void 0 : options.signal) != + null + ) { + validateAbortSignal(options.signal, "options.signal"); + } + return async function* asIndexedPairs2() { + let index = 0; + for await (const val of this) { + var _options$signal4; + if ( + options !== null && options !== void 0 && + (_options$signal4 = options.signal) !== null && + _options$signal4 !== void 0 && _options$signal4.aborted + ) { + throw new AbortError({ + cause: options.signal.reason, + }); + } + yield [index++, val]; + } + }.call(this); + } + async function some(fn, options = void 0) { + for await (const unused of filter.call(this, fn, options)) { + return true; + } + return false; + } + async function every(fn, options = void 0) { + if (typeof fn !== "function") { + throw new ERR_INVALID_ARG_TYPE("fn", ["Function", "AsyncFunction"], fn); + } + return !await some.call( + this, + async (...args) => { + return !await fn(...args); + }, + options, + ); + } + async function find(fn, options) { + for await (const result of filter.call(this, fn, options)) { + return result; + } + return void 0; + } + async function forEach(fn, options) { + if (typeof fn !== "function") { + throw new ERR_INVALID_ARG_TYPE("fn", ["Function", "AsyncFunction"], fn); + } + async function forEachFn(value, options2) { + await fn(value, options2); + return kEmpty; + } + for await (const unused of map.call(this, forEachFn, options)); + } + function filter(fn, options) { + if (typeof fn !== "function") { + throw new ERR_INVALID_ARG_TYPE("fn", ["Function", "AsyncFunction"], fn); + } + async function filterFn(value, options2) { + if (await fn(value, options2)) { + return value; + } + return kEmpty; + } + return map.call(this, filterFn, options); + } + var ReduceAwareErrMissingArgs = class extends ERR_MISSING_ARGS { + constructor() { + super("reduce"); + this.message = "Reduce of an empty stream requires an initial value"; + } + }; + async function reduce(reducer, initialValue, options) { + var _options$signal5; + if (typeof reducer !== "function") { + throw new ERR_INVALID_ARG_TYPE( + "reducer", + ["Function", "AsyncFunction"], + reducer, + ); + } + if (options != null) { + validateObject(options, "options"); + } + if ( + (options === null || options === void 0 ? void 0 : options.signal) != + null + ) { + validateAbortSignal(options.signal, "options.signal"); + } + let hasInitialValue = arguments.length > 1; + if ( + options !== null && options !== void 0 && + (_options$signal5 = options.signal) !== null && + _options$signal5 !== void 0 && _options$signal5.aborted + ) { + const err = new AbortError(void 0, { + cause: options.signal.reason, + }); + this.once("error", () => { + }); + await finished(this.destroy(err)); + throw err; + } + const ac = new AbortController(); + const signal = ac.signal; + if (options !== null && options !== void 0 && options.signal) { + const opts = { + once: true, + [kWeakHandler]: this, + }; + options.signal.addEventListener("abort", () => ac.abort(), opts); + } + let gotAnyItemFromStream = false; + try { + for await (const value of this) { + var _options$signal6; + gotAnyItemFromStream = true; + if ( + options !== null && options !== void 0 && + (_options$signal6 = options.signal) !== null && + _options$signal6 !== void 0 && _options$signal6.aborted + ) { + throw new AbortError(); + } + if (!hasInitialValue) { + initialValue = value; + hasInitialValue = true; + } else { + initialValue = await reducer(initialValue, value, { + signal, + }); + } + } + if (!gotAnyItemFromStream && !hasInitialValue) { + throw new ReduceAwareErrMissingArgs(); + } + } finally { + ac.abort(); + } + return initialValue; + } + async function toArray(options) { + if (options != null) { + validateObject(options, "options"); + } + if ( + (options === null || options === void 0 ? void 0 : options.signal) != + null + ) { + validateAbortSignal(options.signal, "options.signal"); + } + const result = []; + for await (const val of this) { + var _options$signal7; + if ( + options !== null && options !== void 0 && + (_options$signal7 = options.signal) !== null && + _options$signal7 !== void 0 && _options$signal7.aborted + ) { + throw new AbortError(void 0, { + cause: options.signal.reason, + }); + } + ArrayPrototypePush(result, val); + } + return result; + } + function flatMap(fn, options) { + const values = map.call(this, fn, options); + return async function* flatMap2() { + for await (const val of values) { + yield* val; + } + }.call(this); + } + function toIntegerOrInfinity(number) { + number = Number2(number); + if (NumberIsNaN(number)) { + return 0; + } + if (number < 0) { + throw new ERR_OUT_OF_RANGE("number", ">= 0", number); + } + return number; + } + function drop(number, options = void 0) { + if (options != null) { + validateObject(options, "options"); + } + if ( + (options === null || options === void 0 ? void 0 : options.signal) != + null + ) { + validateAbortSignal(options.signal, "options.signal"); + } + number = toIntegerOrInfinity(number); + return async function* drop2() { + var _options$signal8; + if ( + options !== null && options !== void 0 && + (_options$signal8 = options.signal) !== null && + _options$signal8 !== void 0 && _options$signal8.aborted + ) { + throw new AbortError(); + } + for await (const val of this) { + var _options$signal9; + if ( + options !== null && options !== void 0 && + (_options$signal9 = options.signal) !== null && + _options$signal9 !== void 0 && _options$signal9.aborted + ) { + throw new AbortError(); + } + if (number-- <= 0) { + yield val; + } + } + }.call(this); + } + function take(number, options = void 0) { + if (options != null) { + validateObject(options, "options"); + } + if ( + (options === null || options === void 0 ? void 0 : options.signal) != + null + ) { + validateAbortSignal(options.signal, "options.signal"); + } + number = toIntegerOrInfinity(number); + return async function* take2() { + var _options$signal10; + if ( + options !== null && options !== void 0 && + (_options$signal10 = options.signal) !== null && + _options$signal10 !== void 0 && _options$signal10.aborted + ) { + throw new AbortError(); + } + for await (const val of this) { + var _options$signal11; + if ( + options !== null && options !== void 0 && + (_options$signal11 = options.signal) !== null && + _options$signal11 !== void 0 && _options$signal11.aborted + ) { + throw new AbortError(); + } + if (number-- > 0) { + yield val; + } else { + return; + } + } + }.call(this); + } + module.exports.streamReturningOperators = { + asIndexedPairs, + drop, + filter, + flatMap, + map, + take, + }; + module.exports.promiseReturningOperators = { + every, + forEach, + reduce, + toArray, + some, + find, + }; + }, +}); + +// lib/internal/streams/destroy.js +var require_destroy = __commonJS({ + "lib/internal/streams/destroy.js"(exports, module) { + "use strict"; + var process = require_browser2(); + var { Symbol: Symbol2 } = require_primordials(); + var { kDestroyed, isDestroyed, isFinished, isServerRequest } = + require_utils(); + var kDestroy = Symbol2("kDestroy"); + var kConstruct = Symbol2("kConstruct"); + function checkError(err, w, r) { + if (err) { + err.stack; + if (w && !w.errored) { + w.errored = err; + } + if (r && !r.errored) { + r.errored = err; + } + } + } + function destroy(err, cb) { + const r = this._readableState; + const w = this._writableState; + const s = w || r; + if (w && w.destroyed || r && r.destroyed) { + if (typeof cb === "function") { + cb(); + } + return this; + } + checkError(err, w, r); + if (w) { + w.destroyed = true; + } + if (r) { + r.destroyed = true; + } + if (!s.constructed) { + this.once(kDestroy, function (er) { + _destroy(this, aggregateTwoErrors(er, err), cb); + }); + } else { + _destroy(this, err, cb); + } + return this; + } + function _destroy(self2, err, cb) { + let called = false; + function onDestroy(err2) { + if (called) { + return; + } + called = true; + const r = self2._readableState; + const w = self2._writableState; + checkError(err2, w, r); + if (w) { + w.closed = true; + } + if (r) { + r.closed = true; + } + if (typeof cb === "function") { + cb(err2); + } + if (err2) { + process.nextTick(emitErrorCloseNT, self2, err2); + } else { + process.nextTick(emitCloseNT, self2); + } + } + try { + self2._destroy(err || null, onDestroy); + } catch (err2) { + onDestroy(err2); + } + } + function emitErrorCloseNT(self2, err) { + emitErrorNT(self2, err); + emitCloseNT(self2); + } + function emitCloseNT(self2) { + const r = self2._readableState; + const w = self2._writableState; + if (w) { + w.closeEmitted = true; + } + if (r) { + r.closeEmitted = true; + } + if (w && w.emitClose || r && r.emitClose) { + self2.emit("close"); + } + } + function emitErrorNT(self2, err) { + const r = self2._readableState; + const w = self2._writableState; + if (w && w.errorEmitted || r && r.errorEmitted) { + return; + } + if (w) { + w.errorEmitted = true; + } + if (r) { + r.errorEmitted = true; + } + self2.emit("error", err); + } + function undestroy() { + const r = this._readableState; + const w = this._writableState; + if (r) { + r.constructed = true; + r.closed = false; + r.closeEmitted = false; + r.destroyed = false; + r.errored = null; + r.errorEmitted = false; + r.reading = false; + r.ended = r.readable === false; + r.endEmitted = r.readable === false; + } + if (w) { + w.constructed = true; + w.destroyed = false; + w.closed = false; + w.closeEmitted = false; + w.errored = null; + w.errorEmitted = false; + w.finalCalled = false; + w.prefinished = false; + w.ended = w.writable === false; + w.ending = w.writable === false; + w.finished = w.writable === false; + } + } + function errorOrDestroy(stream, err, sync) { + const r = stream._readableState; + const w = stream._writableState; + if (w && w.destroyed || r && r.destroyed) { + return this; + } + if (r && r.autoDestroy || w && w.autoDestroy) { + stream.destroy(err); + } else if (err) { + err.stack; + if (w && !w.errored) { + w.errored = err; + } + if (r && !r.errored) { + r.errored = err; + } + if (sync) { + process.nextTick(emitErrorNT, stream, err); + } else { + emitErrorNT(stream, err); + } + } + } + function construct(stream, cb) { + if (typeof stream._construct !== "function") { + return; + } + const r = stream._readableState; + const w = stream._writableState; + if (r) { + r.constructed = false; + } + if (w) { + w.constructed = false; + } + stream.once(kConstruct, cb); + if (stream.listenerCount(kConstruct) > 1) { + return; + } + process.nextTick(constructNT, stream); + } + function constructNT(stream) { + let called = false; + function onConstruct(err) { + if (called) { + errorOrDestroy( + stream, + err !== null && err !== void 0 ? err : new ERR_MULTIPLE_CALLBACK(), + ); + return; + } + called = true; + const r = stream._readableState; + const w = stream._writableState; + const s = w || r; + if (r) { + r.constructed = true; + } + if (w) { + w.constructed = true; + } + if (s.destroyed) { + stream.emit(kDestroy, err); + } else if (err) { + errorOrDestroy(stream, err, true); + } else { + process.nextTick(emitConstructNT, stream); + } + } + try { + stream._construct(onConstruct); + } catch (err) { + onConstruct(err); + } + } + function emitConstructNT(stream) { + stream.emit(kConstruct); + } + function isRequest(stream) { + return stream && stream.setHeader && typeof stream.abort === "function"; + } + function emitCloseLegacy(stream) { + stream.emit("close"); + } + function emitErrorCloseLegacy(stream, err) { + stream.emit("error", err); + process.nextTick(emitCloseLegacy, stream); + } + function destroyer(stream, err) { + if (!stream || isDestroyed(stream)) { + return; + } + if (!err && !isFinished(stream)) { + err = new AbortError(); + } + if (isServerRequest(stream)) { + stream.socket = null; + stream.destroy(err); + } else if (isRequest(stream)) { + stream.abort(); + } else if (isRequest(stream.req)) { + stream.req.abort(); + } else if (typeof stream.destroy === "function") { + stream.destroy(err); + } else if (typeof stream.close === "function") { + stream.close(); + } else if (err) { + process.nextTick(emitErrorCloseLegacy, stream, err); + } else { + process.nextTick(emitCloseLegacy, stream); + } + if (!stream.destroyed) { + stream[kDestroyed] = true; + } + } + module.exports = { + construct, + destroyer, + destroy, + undestroy, + errorOrDestroy, + }; + }, +}); + +// node_modules/events/events.js +var require_events = __commonJS({ + "node_modules/events/events.js"(exports, module) { + "use strict"; + var R = typeof Reflect === "object" ? Reflect : null; + var ReflectApply = R && typeof R.apply === "function" + ? R.apply + : function ReflectApply2(target, receiver, args) { + return Function.prototype.apply.call(target, receiver, args); + }; + var ReflectOwnKeys; + if (R && typeof R.ownKeys === "function") { + ReflectOwnKeys = R.ownKeys; + } else if (Object.getOwnPropertySymbols) { + ReflectOwnKeys = function ReflectOwnKeys2(target) { + return Object.getOwnPropertyNames(target).concat( + Object.getOwnPropertySymbols(target), + ); + }; + } else { + ReflectOwnKeys = function ReflectOwnKeys2(target) { + return Object.getOwnPropertyNames(target); + }; + } + function ProcessEmitWarning(warning) { + if (console && console.warn) { + console.warn(warning); + } + } + var NumberIsNaN = Number.isNaN || function NumberIsNaN2(value) { + return value !== value; + }; + function EventEmitter() { + EventEmitter.init.call(this); + } + module.exports = EventEmitter; + module.exports.once = once; + EventEmitter.EventEmitter = EventEmitter; + EventEmitter.prototype._events = void 0; + EventEmitter.prototype._eventsCount = 0; + EventEmitter.prototype._maxListeners = void 0; + var defaultMaxListeners = 10; + function checkListener(listener) { + if (typeof listener !== "function") { + throw new TypeError( + 'The "listener" argument must be of type Function. Received type ' + + typeof listener, + ); + } + } + Object.defineProperty(EventEmitter, "defaultMaxListeners", { + enumerable: true, + get: function () { + return defaultMaxListeners; + }, + set: function (arg) { + if (typeof arg !== "number" || arg < 0 || NumberIsNaN(arg)) { + throw new RangeError( + 'The value of "defaultMaxListeners" is out of range. It must be a non-negative number. Received ' + + arg + ".", + ); + } + defaultMaxListeners = arg; + }, + }); + EventEmitter.init = function () { + if ( + this._events === void 0 || + this._events === Object.getPrototypeOf(this)._events + ) { + this._events = /* @__PURE__ */ Object.create(null); + this._eventsCount = 0; + } + this._maxListeners = this._maxListeners || void 0; + }; + EventEmitter.prototype.setMaxListeners = function setMaxListeners(n) { + if (typeof n !== "number" || n < 0 || NumberIsNaN(n)) { + throw new RangeError( + 'The value of "n" is out of range. It must be a non-negative number. Received ' + + n + ".", + ); + } + this._maxListeners = n; + return this; + }; + function _getMaxListeners(that) { + if (that._maxListeners === void 0) { + return EventEmitter.defaultMaxListeners; + } + return that._maxListeners; + } + EventEmitter.prototype.getMaxListeners = function getMaxListeners() { + return _getMaxListeners(this); + }; + EventEmitter.prototype.emit = function emit(type) { + var args = []; + for (var i = 1; i < arguments.length; i++) { + args.push(arguments[i]); + } + var doError = type === "error"; + var events = this._events; + if (events !== void 0) { + doError = doError && events.error === void 0; + } else if (!doError) { + return false; + } + if (doError) { + var er; + if (args.length > 0) { + er = args[0]; + } + if (er instanceof Error) { + throw er; + } + var err = new Error( + "Unhandled error." + (er ? " (" + er.message + ")" : ""), + ); + err.context = er; + throw err; + } + var handler = events[type]; + if (handler === void 0) { + return false; + } + if (typeof handler === "function") { + ReflectApply(handler, this, args); + } else { + var len = handler.length; + var listeners = arrayClone(handler, len); + for (var i = 0; i < len; ++i) { + ReflectApply(listeners[i], this, args); + } + } + return true; + }; + function _addListener(target, type, listener, prepend) { + var m; + var events; + var existing; + checkListener(listener); + events = target._events; + if (events === void 0) { + events = target._events = /* @__PURE__ */ Object.create(null); + target._eventsCount = 0; + } else { + if (events.newListener !== void 0) { + target.emit( + "newListener", + type, + listener.listener ? listener.listener : listener, + ); + events = target._events; + } + existing = events[type]; + } + if (existing === void 0) { + existing = events[type] = listener; + ++target._eventsCount; + } else { + if (typeof existing === "function") { + existing = events[type] = prepend + ? [listener, existing] + : [existing, listener]; + } else if (prepend) { + existing.unshift(listener); + } else { + existing.push(listener); + } + m = _getMaxListeners(target); + if (m > 0 && existing.length > m && !existing.warned) { + existing.warned = true; + var w = new Error( + "Possible EventEmitter memory leak detected. " + existing.length + + " " + String(type) + + " listeners added. Use emitter.setMaxListeners() to increase limit", + ); + w.name = "MaxListenersExceededWarning"; + w.emitter = target; + w.type = type; + w.count = existing.length; + ProcessEmitWarning(w); + } + } + return target; + } + EventEmitter.prototype.addListener = function addListener(type, listener) { + return _addListener(this, type, listener, false); + }; + EventEmitter.prototype.on = EventEmitter.prototype.addListener; + EventEmitter.prototype.prependListener = function prependListener( + type, + listener, + ) { + return _addListener(this, type, listener, true); + }; + function onceWrapper() { + if (!this.fired) { + this.target.removeListener(this.type, this.wrapFn); + this.fired = true; + if (arguments.length === 0) { + return this.listener.call(this.target); + } + return this.listener.apply(this.target, arguments); + } + } + function _onceWrap(target, type, listener) { + var state = { fired: false, wrapFn: void 0, target, type, listener }; + var wrapped = onceWrapper.bind(state); + wrapped.listener = listener; + state.wrapFn = wrapped; + return wrapped; + } + EventEmitter.prototype.once = function once2(type, listener) { + checkListener(listener); + this.on(type, _onceWrap(this, type, listener)); + return this; + }; + EventEmitter.prototype.prependOnceListener = function prependOnceListener( + type, + listener, + ) { + checkListener(listener); + this.prependListener(type, _onceWrap(this, type, listener)); + return this; + }; + EventEmitter.prototype.removeListener = function removeListener( + type, + listener, + ) { + var list, events, position, i, originalListener; + checkListener(listener); + events = this._events; + if (events === void 0) { + return this; + } + list = events[type]; + if (list === void 0) { + return this; + } + if (list === listener || list.listener === listener) { + if (--this._eventsCount === 0) { + this._events = /* @__PURE__ */ Object.create(null); + } else { + delete events[type]; + if (events.removeListener) { + this.emit("removeListener", type, list.listener || listener); + } + } + } else if (typeof list !== "function") { + position = -1; + for (i = list.length - 1; i >= 0; i--) { + if (list[i] === listener || list[i].listener === listener) { + originalListener = list[i].listener; + position = i; + break; + } + } + if (position < 0) { + return this; + } + if (position === 0) { + list.shift(); + } else { + spliceOne(list, position); + } + if (list.length === 1) { + events[type] = list[0]; + } + if (events.removeListener !== void 0) { + this.emit("removeListener", type, originalListener || listener); + } + } + return this; + }; + EventEmitter.prototype.off = EventEmitter.prototype.removeListener; + EventEmitter.prototype.removeAllListeners = function removeAllListeners( + type, + ) { + var listeners, events, i; + events = this._events; + if (events === void 0) { + return this; + } + if (events.removeListener === void 0) { + if (arguments.length === 0) { + this._events = /* @__PURE__ */ Object.create(null); + this._eventsCount = 0; + } else if (events[type] !== void 0) { + if (--this._eventsCount === 0) { + this._events = /* @__PURE__ */ Object.create(null); + } else { + delete events[type]; + } + } + return this; + } + if (arguments.length === 0) { + var keys = Object.keys(events); + var key; + for (i = 0; i < keys.length; ++i) { + key = keys[i]; + if (key === "removeListener") { + continue; + } + this.removeAllListeners(key); + } + this.removeAllListeners("removeListener"); + this._events = /* @__PURE__ */ Object.create(null); + this._eventsCount = 0; + return this; + } + listeners = events[type]; + if (typeof listeners === "function") { + this.removeListener(type, listeners); + } else if (listeners !== void 0) { + for (i = listeners.length - 1; i >= 0; i--) { + this.removeListener(type, listeners[i]); + } + } + return this; + }; + function _listeners(target, type, unwrap) { + var events = target._events; + if (events === void 0) { + return []; + } + var evlistener = events[type]; + if (evlistener === void 0) { + return []; + } + if (typeof evlistener === "function") { + return unwrap ? [evlistener.listener || evlistener] : [evlistener]; + } + return unwrap + ? unwrapListeners(evlistener) + : arrayClone(evlistener, evlistener.length); + } + EventEmitter.prototype.listeners = function listeners(type) { + return _listeners(this, type, true); + }; + EventEmitter.prototype.rawListeners = function rawListeners(type) { + return _listeners(this, type, false); + }; + EventEmitter.listenerCount = function (emitter, type) { + if (typeof emitter.listenerCount === "function") { + return emitter.listenerCount(type); + } else { + return listenerCount.call(emitter, type); + } + }; + EventEmitter.prototype.listenerCount = listenerCount; + function listenerCount(type) { + var events = this._events; + if (events !== void 0) { + var evlistener = events[type]; + if (typeof evlistener === "function") { + return 1; + } else if (evlistener !== void 0) { + return evlistener.length; + } + } + return 0; + } + EventEmitter.prototype.eventNames = function eventNames() { + return this._eventsCount > 0 ? ReflectOwnKeys(this._events) : []; + }; + function arrayClone(arr, n) { + var copy = new Array(n); + for (var i = 0; i < n; ++i) { + copy[i] = arr[i]; + } + return copy; + } + function spliceOne(list, index) { + for (; index + 1 < list.length; index++) { + list[index] = list[index + 1]; + } + list.pop(); + } + function unwrapListeners(arr) { + var ret = new Array(arr.length); + for (var i = 0; i < ret.length; ++i) { + ret[i] = arr[i].listener || arr[i]; + } + return ret; + } + function once(emitter, name) { + return new Promise(function (resolve, reject) { + function errorListener(err) { + emitter.removeListener(name, resolver); + reject(err); + } + function resolver() { + if (typeof emitter.removeListener === "function") { + emitter.removeListener("error", errorListener); + } + resolve([].slice.call(arguments)); + } + + eventTargetAgnosticAddListener(emitter, name, resolver, { once: true }); + if (name !== "error") { + addErrorHandlerIfEventEmitter(emitter, errorListener, { once: true }); + } + }); + } + function addErrorHandlerIfEventEmitter(emitter, handler, flags) { + if (typeof emitter.on === "function") { + eventTargetAgnosticAddListener(emitter, "error", handler, flags); + } + } + function eventTargetAgnosticAddListener(emitter, name, listener, flags) { + if (typeof emitter.on === "function") { + if (flags.once) { + emitter.once(name, listener); + } else { + emitter.on(name, listener); + } + } else if (typeof emitter.addEventListener === "function") { + emitter.addEventListener(name, function wrapListener(arg) { + if (flags.once) { + emitter.removeEventListener(name, wrapListener); + } + listener(arg); + }); + } else { + throw new TypeError( + 'The "emitter" argument must be of type EventEmitter. Received type ' + + typeof emitter, + ); + } + } + }, +}); + +// lib/internal/streams/legacy.js +var require_legacy = __commonJS({ + "lib/internal/streams/legacy.js"(exports, module) { + "use strict"; + var { ArrayIsArray, ObjectSetPrototypeOf } = require_primordials(); + var { EventEmitter: EE } = require_events(); + function Stream(opts) { + EE.call(this, opts); + } + ObjectSetPrototypeOf(Stream.prototype, EE.prototype); + ObjectSetPrototypeOf(Stream, EE); + Stream.prototype.pipe = function (dest, options) { + const source = this; + function ondata(chunk) { + if (dest.writable && dest.write(chunk) === false && source.pause) { + source.pause(); + } + } + source.on("data", ondata); + function ondrain() { + if (source.readable && source.resume) { + source.resume(); + } + } + dest.on("drain", ondrain); + if (!dest._isStdio && (!options || options.end !== false)) { + source.on("end", onend); + source.on("close", onclose); + } + let didOnEnd = false; + function onend() { + if (didOnEnd) { + return; + } + didOnEnd = true; + dest.end(); + } + function onclose() { + if (didOnEnd) { + return; + } + didOnEnd = true; + if (typeof dest.destroy === "function") { + dest.destroy(); + } + } + function onerror(er) { + cleanup(); + if (EE.listenerCount(this, "error") === 0) { + this.emit("error", er); + } + } + prependListener(source, "error", onerror); + prependListener(dest, "error", onerror); + function cleanup() { + source.removeListener("data", ondata); + dest.removeListener("drain", ondrain); + source.removeListener("end", onend); + source.removeListener("close", onclose); + source.removeListener("error", onerror); + dest.removeListener("error", onerror); + source.removeListener("end", cleanup); + source.removeListener("close", cleanup); + dest.removeListener("close", cleanup); + } + source.on("end", cleanup); + source.on("close", cleanup); + dest.on("close", cleanup); + dest.emit("pipe", source); + return dest; + }; + function prependListener(emitter, event, fn) { + if (typeof emitter.prependListener === "function") { + return emitter.prependListener(event, fn); + } + if (!emitter._events || !emitter._events[event]) { + emitter.on(event, fn); + } else if (ArrayIsArray(emitter._events[event])) { + emitter._events[event].unshift(fn); + } else { + emitter._events[event] = [fn, emitter._events[event]]; + } + } + module.exports = { + Stream, + prependListener, + }; + }, +}); + +// lib/internal/streams/add-abort-signal.js +var require_add_abort_signal = __commonJS({ + "lib/internal/streams/add-abort-signal.js"(exports, module) { + "use strict"; + var eos = require_end_of_stream(); + var validateAbortSignal = (signal, name) => { + if (typeof signal !== "object" || !("aborted" in signal)) { + throw new ERR_INVALID_ARG_TYPE(name, "AbortSignal", signal); + } + }; + function isNodeStream(obj) { + return !!(obj && typeof obj.pipe === "function"); + } + module.exports.addAbortSignal = function addAbortSignal(signal, stream) { + validateAbortSignal(signal, "signal"); + if (!isNodeStream(stream)) { + throw new ERR_INVALID_ARG_TYPE("stream", "stream.Stream", stream); + } + return module.exports.addAbortSignalNoValidate(signal, stream); + }; + module.exports.addAbortSignalNoValidate = function (signal, stream) { + if (typeof signal !== "object" || !("aborted" in signal)) { + return stream; + } + const onAbort = () => { + stream.destroy( + new AbortError(void 0, { + cause: signal.reason, + }), + ); + }; + if (signal.aborted) { + onAbort(); + } else { + signal.addEventListener("abort", onAbort); + eos(stream, () => signal.removeEventListener("abort", onAbort)); + } + return stream; + }; + }, +}); + +// lib/internal/streams/buffer_list.js +var require_buffer_list = __commonJS({ + "lib/internal/streams/buffer_list.js"(exports, module) { + "use strict"; + var { + StringPrototypeSlice, + SymbolIterator, + TypedArrayPrototypeSet, + Uint8Array: Uint8Array2, + } = require_primordials(); + var { Buffer: Buffer2 } = require_buffer(); + module.exports = class BufferList { + constructor() { + this.head = null; + this.tail = null; + this.length = 0; + } + push(v) { + const entry = { + data: v, + next: null, + }; + if (this.length > 0) { + this.tail.next = entry; + } else { + this.head = entry; + } + this.tail = entry; + ++this.length; + } + unshift(v) { + const entry = { + data: v, + next: this.head, + }; + if (this.length === 0) { + this.tail = entry; + } + this.head = entry; + ++this.length; + } + shift() { + if (this.length === 0) { + return; + } + const ret = this.head.data; + if (this.length === 1) { + this.head = this.tail = null; + } else { + this.head = this.head.next; + } + --this.length; + return ret; + } + clear() { + this.head = this.tail = null; + this.length = 0; + } + join(s) { + if (this.length === 0) { + return ""; + } + let p = this.head; + let ret = "" + p.data; + while ((p = p.next) !== null) { + ret += s + p.data; + } + return ret; + } + concat(n) { + if (this.length === 0) { + return Buffer2.alloc(0); + } + const ret = Buffer2.allocUnsafe(n >>> 0); + let p = this.head; + let i = 0; + while (p) { + TypedArrayPrototypeSet(ret, p.data, i); + i += p.data.length; + p = p.next; + } + return ret; + } + // Consumes a specified amount of bytes or characters from the buffered data. + consume(n, hasStrings) { + const data = this.head.data; + if (n < data.length) { + const slice = data.slice(0, n); + this.head.data = data.slice(n); + return slice; + } + if (n === data.length) { + return this.shift(); + } + return hasStrings ? this._getString(n) : this._getBuffer(n); + } + first() { + return this.head.data; + } + *[SymbolIterator]() { + for (let p = this.head; p; p = p.next) { + yield p.data; + } + } + // Consumes a specified amount of characters from the buffered data. + _getString(n) { + let ret = ""; + let p = this.head; + let c = 0; + do { + const str = p.data; + if (n > str.length) { + ret += str; + n -= str.length; + } else { + if (n === str.length) { + ret += str; + ++c; + if (p.next) { + this.head = p.next; + } else { + this.head = this.tail = null; + } + } else { + ret += StringPrototypeSlice(str, 0, n); + this.head = p; + p.data = StringPrototypeSlice(str, n); + } + break; + } + ++c; + } while ((p = p.next) !== null); + this.length -= c; + return ret; + } + // Consumes a specified amount of bytes from the buffered data. + _getBuffer(n) { + const ret = Buffer2.allocUnsafe(n); + const retLen = n; + let p = this.head; + let c = 0; + do { + const buf = p.data; + if (n > buf.length) { + TypedArrayPrototypeSet(ret, buf, retLen - n); + n -= buf.length; + } else { + if (n === buf.length) { + TypedArrayPrototypeSet(ret, buf, retLen - n); + ++c; + if (p.next) { + this.head = p.next; + } else { + this.head = this.tail = null; + } + } else { + TypedArrayPrototypeSet( + ret, + new Uint8Array2(buf.buffer, buf.byteOffset, n), + retLen - n, + ); + this.head = p; + p.data = buf.slice(n); + } + break; + } + ++c; + } while ((p = p.next) !== null); + this.length -= c; + return ret; + } + // Make sure the linked list only shows the minimal necessary information. + [Symbol.for("nodejs.util.inspect.custom")](_, options) { + return inspect(this, { + ...options, + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false, + }); + } + }; + }, +}); + +// lib/internal/streams/state.js +var require_state = __commonJS({ + "lib/internal/streams/state.js"(exports, module) { + "use strict"; + var { MathFloor, NumberIsInteger } = require_primordials(); + function highWaterMarkFrom(options, isDuplex, duplexKey) { + return options.highWaterMark != null + ? options.highWaterMark + : isDuplex + ? options[duplexKey] + : null; + } + function getDefaultHighWaterMark(objectMode) { + return objectMode ? 16 : 16 * 1024; + } + function getHighWaterMark(state, options, duplexKey, isDuplex) { + const hwm = highWaterMarkFrom(options, isDuplex, duplexKey); + if (hwm != null) { + if (!NumberIsInteger(hwm) || hwm < 0) { + const name = isDuplex + ? `options.${duplexKey}` + : "options.highWaterMark"; + throw new ERR_INVALID_ARG_VALUE(name, hwm); + } + return MathFloor(hwm); + } + return getDefaultHighWaterMark(state.objectMode); + } + module.exports = { + getHighWaterMark, + getDefaultHighWaterMark, + }; + }, +}); + +// node_modules/safe-buffer/index.js +var require_safe_buffer = __commonJS({ + "node_modules/safe-buffer/index.js"(exports, module) { + var buffer = require_buffer(); + var Buffer2 = buffer.Buffer; + function copyProps(src, dst) { + for (var key in src) { + dst[key] = src[key]; + } + } + if ( + Buffer2.from && Buffer2.alloc && Buffer2.allocUnsafe && + Buffer2.allocUnsafeSlow + ) { + module.exports = buffer; + } else { + copyProps(buffer, exports); + exports.Buffer = SafeBuffer; + } + function SafeBuffer(arg, encodingOrOffset, length) { + return Buffer2(arg, encodingOrOffset, length); + } + SafeBuffer.prototype = Object.create(Buffer2.prototype); + copyProps(Buffer2, SafeBuffer); + SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === "number") { + throw new TypeError("Argument must not be a number"); + } + return Buffer2(arg, encodingOrOffset, length); + }; + SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== "number") { + throw new TypeError("Argument must be a number"); + } + var buf = Buffer2(size); + if (fill !== void 0) { + if (typeof encoding === "string") { + buf.fill(fill, encoding); + } else { + buf.fill(fill); + } + } else { + buf.fill(0); + } + return buf; + }; + SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== "number") { + throw new TypeError("Argument must be a number"); + } + return Buffer2(size); + }; + SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== "number") { + throw new TypeError("Argument must be a number"); + } + return buffer.SlowBuffer(size); + }; + }, +}); + +// lib/internal/streams/from.js +var require_from = __commonJS({ + "lib/internal/streams/from.js"(exports, module) { + "use strict"; + var process = require_browser2(); + var { PromisePrototypeThen, SymbolAsyncIterator, SymbolIterator } = + require_primordials(); + var { Buffer: Buffer2 } = require_buffer(); + function from(Readable, iterable, opts) { + let iterator; + if (typeof iterable === "string" || iterable instanceof Buffer2) { + return new Readable({ + objectMode: true, + ...opts, + read() { + this.push(iterable); + this.push(null); + }, + }); + } + let isAsync; + if (iterable && iterable[SymbolAsyncIterator]) { + isAsync = true; + iterator = iterable[SymbolAsyncIterator](); + } else if (iterable && iterable[SymbolIterator]) { + isAsync = false; + iterator = iterable[SymbolIterator](); + } else { + throw new ERR_INVALID_ARG_TYPE("iterable", ["Iterable"], iterable); + } + const readable = new Readable({ + objectMode: true, + highWaterMark: 1, + // TODO(ronag): What options should be allowed? + ...opts, + }); + let reading = false; + readable._read = function () { + if (!reading) { + reading = true; + next(); + } + }; + readable._destroy = function (error, cb) { + PromisePrototypeThen( + close(error), + () => process.nextTick(cb, error), + // nextTick is here in case cb throws + (e) => process.nextTick(cb, e || error), + ); + }; + async function close(error) { + const hadError = error !== void 0 && error !== null; + const hasThrow = typeof iterator.throw === "function"; + if (hadError && hasThrow) { + const { value, done } = await iterator.throw(error); + await value; + if (done) { + return; + } + } + if (typeof iterator.return === "function") { + const { value } = await iterator.return(); + await value; + } + } + async function next() { + for (;;) { + try { + const { value, done } = isAsync + ? await iterator.next() + : iterator.next(); + if (done) { + readable.push(null); + } else { + const res = value && typeof value.then === "function" + ? await value + : value; + if (res === null) { + reading = false; + throw new ERR_STREAM_NULL_VALUES(); + } else if (readable.push(res)) { + continue; + } else { + reading = false; + } + } + } catch (err) { + readable.destroy(err); + } + break; + } + } + return readable; + } + module.exports = from; + }, +}); + +// lib/internal/streams/readable.js +var require_readable = __commonJS({ + "lib/internal/streams/readable.js"(exports, module) { + var process = require_browser2(); + var { + ArrayPrototypeIndexOf, + NumberIsInteger, + NumberIsNaN, + NumberParseInt, + ObjectDefineProperties, + ObjectKeys, + ObjectSetPrototypeOf, + Promise: Promise2, + SafeSet, + SymbolAsyncIterator, + Symbol: Symbol2, + } = require_primordials(); + module.exports = Readable; + Readable.ReadableState = ReadableState; + var { EventEmitter: EE } = require_events(); + var { Stream, prependListener } = require_legacy(); + var { Buffer: Buffer2 } = require_buffer(); + var { addAbortSignal } = require_add_abort_signal(); + var eos = require_end_of_stream(); + var debug = debuglog("stream", (fn) => { + debug = fn; + }); + var BufferList = require_buffer_list(); + var destroyImpl = require_destroy(); + var { getHighWaterMark, getDefaultHighWaterMark } = require_state(); + var { validateObject } = require_validators(); + var kPaused = Symbol2("kPaused"); + var from = require_from(); + ObjectSetPrototypeOf(Readable.prototype, Stream.prototype); + ObjectSetPrototypeOf(Readable, Stream); + var nop = () => { + }; + var { errorOrDestroy } = destroyImpl; + function ReadableState(options, stream, isDuplex) { + if (typeof isDuplex !== "boolean") { + isDuplex = stream instanceof require_duplex(); + } + this.objectMode = !!(options && options.objectMode); + if (isDuplex) { + this.objectMode = this.objectMode || + !!(options && options.readableObjectMode); + } + this.highWaterMark = options + ? getHighWaterMark(this, options, "readableHighWaterMark", isDuplex) + : getDefaultHighWaterMark(false); + this.buffer = new BufferList(); + this.length = 0; + this.pipes = []; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + this.constructed = true; + this.sync = true; + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + this[kPaused] = null; + this.errorEmitted = false; + this.emitClose = !options || options.emitClose !== false; + this.autoDestroy = !options || options.autoDestroy !== false; + this.destroyed = false; + this.errored = null; + this.closed = false; + this.closeEmitted = false; + this.defaultEncoding = options && options.defaultEncoding || "utf8"; + this.awaitDrainWriters = null; + this.multiAwaitDrain = false; + this.readingMore = false; + this.dataEmitted = false; + this.decoder = null; + this.encoding = null; + if (options && options.encoding) { + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } + } + function Readable(options) { + if (!(this instanceof Readable)) { + return new Readable(options); + } + const isDuplex = this instanceof require_duplex(); + this._readableState = new ReadableState(options, this, isDuplex); + if (options) { + if (typeof options.read === "function") { + this._read = options.read; + } + if (typeof options.destroy === "function") { + this._destroy = options.destroy; + } + if (typeof options.construct === "function") { + this._construct = options.construct; + } + if (options.signal && !isDuplex) { + addAbortSignal(options.signal, this); + } + } + Stream.call(this, options); + destroyImpl.construct(this, () => { + if (this._readableState.needReadable) { + maybeReadMore(this, this._readableState); + } + }); + } + Readable.prototype.destroy = destroyImpl.destroy; + Readable.prototype._undestroy = destroyImpl.undestroy; + Readable.prototype._destroy = function (err, cb) { + cb(err); + }; + Readable.prototype[EE.captureRejectionSymbol] = function (err) { + this.destroy(err); + }; + Readable.prototype.push = function (chunk, encoding) { + return readableAddChunk(this, chunk, encoding, false); + }; + Readable.prototype.unshift = function (chunk, encoding) { + return readableAddChunk(this, chunk, encoding, true); + }; + function readableAddChunk(stream, chunk, encoding, addToFront) { + debug("readableAddChunk", chunk); + const state = stream._readableState; + let err; + if (!state.objectMode) { + if (typeof chunk === "string") { + encoding = encoding || state.defaultEncoding; + if (state.encoding !== encoding) { + if (addToFront && state.encoding) { + chunk = Buffer2.from(chunk, encoding).toString(state.encoding); + } else { + chunk = Buffer2.from(chunk, encoding); + encoding = ""; + } + } + } else if (chunk instanceof Buffer2) { + encoding = ""; + } else if (Stream._isUint8Array(chunk)) { + chunk = Stream._uint8ArrayToBuffer(chunk); + encoding = ""; + } else if (chunk != null) { + err = new ERR_INVALID_ARG_TYPE("chunk", [ + "string", + "Buffer", + "Uint8Array", + ], chunk); + } + } + if (err) { + errorOrDestroy(stream, err); + } else if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (addToFront) { + if (state.endEmitted) { + errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT()); + } else if (state.destroyed || state.errored) { + return false; + } else { + addChunk(stream, state, chunk, true); + } + } else if (state.ended) { + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); + } else if (state.destroyed || state.errored) { + return false; + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) { + addChunk(stream, state, chunk, false); + } else { + maybeReadMore(stream, state); + } + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + maybeReadMore(stream, state); + } + return !state.ended && + (state.length < state.highWaterMark || state.length === 0); + } + function addChunk(stream, state, chunk, addToFront) { + if ( + state.flowing && state.length === 0 && !state.sync && + stream.listenerCount("data") > 0 + ) { + if (state.multiAwaitDrain) { + state.awaitDrainWriters.clear(); + } else { + state.awaitDrainWriters = null; + } + state.dataEmitted = true; + stream.emit("data", chunk); + } else { + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) { + state.buffer.unshift(chunk); + } else { + state.buffer.push(chunk); + } + if (state.needReadable) { + emitReadable(stream); + } + } + maybeReadMore(stream, state); + } + Readable.prototype.isPaused = function () { + const state = this._readableState; + return state[kPaused] === true || state.flowing === false; + }; + Readable.prototype.setEncoding = function (enc) { + const decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; + this._readableState.encoding = this._readableState.decoder.encoding; + const buffer = this._readableState.buffer; + let content = ""; + for (const data of buffer) { + content += decoder.write(data); + } + buffer.clear(); + if (content !== "") { + buffer.push(content); + } + this._readableState.length = content.length; + return this; + }; + var MAX_HWM = 1073741824; + function computeNewHighWaterMark(n) { + if (n > MAX_HWM) { + throw new ERR_OUT_OF_RANGE("size", "<= 1GiB", n); + } else { + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; + } + function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) { + return 0; + } + if (state.objectMode) { + return 1; + } + if (NumberIsNaN(n)) { + if (state.flowing && state.length) { + return state.buffer.first().length; + } + return state.length; + } + if (n <= state.length) { + return n; + } + return state.ended ? state.length : 0; + } + Readable.prototype.read = function (n) { + debug("read", n); + if (n === void 0) { + n = NaN; + } else if (!NumberIsInteger(n)) { + n = NumberParseInt(n, 10); + } + const state = this._readableState; + const nOrig = n; + if (n > state.highWaterMark) { + state.highWaterMark = computeNewHighWaterMark(n); + } + if (n !== 0) { + state.emittedReadable = false; + } + if ( + n === 0 && state.needReadable && + ((state.highWaterMark !== 0 + ? state.length >= state.highWaterMark + : state.length > 0) || state.ended) + ) { + debug("read: emitReadable", state.length, state.ended); + if (state.length === 0 && state.ended) { + endReadable(this); + } else { + emitReadable(this); + } + return null; + } + n = howMuchToRead(n, state); + if (n === 0 && state.ended) { + if (state.length === 0) { + endReadable(this); + } + return null; + } + let doRead = state.needReadable; + debug("need readable", doRead); + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug("length less than watermark", doRead); + } + if ( + state.ended || state.reading || state.destroyed || state.errored || + !state.constructed + ) { + doRead = false; + debug("reading, ended or constructing", doRead); + } else if (doRead) { + debug("do read"); + state.reading = true; + state.sync = true; + if (state.length === 0) { + state.needReadable = true; + } + try { + this._read(state.highWaterMark); + } catch (err) { + errorOrDestroy(this, err); + } + state.sync = false; + if (!state.reading) { + n = howMuchToRead(nOrig, state); + } + } + let ret; + if (n > 0) { + ret = fromList(n, state); + } else { + ret = null; + } + if (ret === null) { + state.needReadable = state.length <= state.highWaterMark; + n = 0; + } else { + state.length -= n; + if (state.multiAwaitDrain) { + state.awaitDrainWriters.clear(); + } else { + state.awaitDrainWriters = null; + } + } + if (state.length === 0) { + if (!state.ended) { + state.needReadable = true; + } + if (nOrig !== n && state.ended) { + endReadable(this); + } + } + if (ret !== null && !state.errorEmitted && !state.closeEmitted) { + state.dataEmitted = true; + this.emit("data", ret); + } + return ret; + }; + function onEofChunk(stream, state) { + debug("onEofChunk"); + if (state.ended) { + return; + } + if (state.decoder) { + const chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + if (state.sync) { + emitReadable(stream); + } else { + state.needReadable = false; + state.emittedReadable = true; + emitReadable_(stream); + } + } + function emitReadable(stream) { + const state = stream._readableState; + debug("emitReadable", state.needReadable, state.emittedReadable); + state.needReadable = false; + if (!state.emittedReadable) { + debug("emitReadable", state.flowing); + state.emittedReadable = true; + process.nextTick(emitReadable_, stream); + } + } + function emitReadable_(stream) { + const state = stream._readableState; + debug("emitReadable_", state.destroyed, state.length, state.ended); + if (!state.destroyed && !state.errored && (state.length || state.ended)) { + stream.emit("readable"); + state.emittedReadable = false; + } + state.needReadable = !state.flowing && !state.ended && + state.length <= state.highWaterMark; + flow(stream); + } + function maybeReadMore(stream, state) { + if (!state.readingMore && state.constructed) { + state.readingMore = true; + process.nextTick(maybeReadMore_, stream, state); + } + } + function maybeReadMore_(stream, state) { + while ( + !state.reading && !state.ended && + (state.length < state.highWaterMark || + state.flowing && state.length === 0) + ) { + const len = state.length; + debug("maybeReadMore read 0"); + stream.read(0); + if (len === state.length) { + break; + } + } + state.readingMore = false; + } + Readable.prototype._read = function (n) { + throw new ERR_METHOD_NOT_IMPLEMENTED("_read()"); + }; + Readable.prototype.pipe = function (dest, pipeOpts) { + const src = this; + const state = this._readableState; + if (state.pipes.length === 1) { + if (!state.multiAwaitDrain) { + state.multiAwaitDrain = true; + state.awaitDrainWriters = new SafeSet( + state.awaitDrainWriters ? [state.awaitDrainWriters] : [], + ); + } + } + state.pipes.push(dest); + debug("pipe count=%d opts=%j", state.pipes.length, pipeOpts); + const doEnd = (!pipeOpts || pipeOpts.end !== false) && + dest !== process.stdout && dest !== process.stderr; + const endFn = doEnd ? onend : unpipe; + if (state.endEmitted) { + process.nextTick(endFn); + } else { + src.once("end", endFn); + } + dest.on("unpipe", onunpipe); + function onunpipe(readable, unpipeInfo) { + debug("onunpipe"); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + function onend() { + debug("onend"); + dest.end(); + } + let ondrain; + let cleanedUp = false; + function cleanup() { + debug("cleanup"); + dest.removeListener("close", onclose); + dest.removeListener("finish", onfinish); + if (ondrain) { + dest.removeListener("drain", ondrain); + } + dest.removeListener("error", onerror); + dest.removeListener("unpipe", onunpipe); + src.removeListener("end", onend); + src.removeListener("end", unpipe); + src.removeListener("data", ondata); + cleanedUp = true; + if ( + ondrain && state.awaitDrainWriters && + (!dest._writableState || dest._writableState.needDrain) + ) { + ondrain(); + } + } + function pause() { + if (!cleanedUp) { + if (state.pipes.length === 1 && state.pipes[0] === dest) { + debug("false write response, pause", 0); + state.awaitDrainWriters = dest; + state.multiAwaitDrain = false; + } else if (state.pipes.length > 1 && state.pipes.includes(dest)) { + debug("false write response, pause", state.awaitDrainWriters.size); + state.awaitDrainWriters.add(dest); + } + src.pause(); + } + if (!ondrain) { + ondrain = pipeOnDrain(src, dest); + dest.on("drain", ondrain); + } + } + src.on("data", ondata); + function ondata(chunk) { + debug("ondata"); + const ret = dest.write(chunk); + debug("dest.write", ret); + if (ret === false) { + pause(); + } + } + function onerror(er) { + debug("onerror", er); + unpipe(); + dest.removeListener("error", onerror); + if (dest.listenerCount("error") === 0) { + const s = dest._writableState || dest._readableState; + if (s && !s.errorEmitted) { + errorOrDestroy(dest, er); + } else { + dest.emit("error", er); + } + } + } + prependListener(dest, "error", onerror); + function onclose() { + dest.removeListener("finish", onfinish); + unpipe(); + } + dest.once("close", onclose); + function onfinish() { + debug("onfinish"); + dest.removeListener("close", onclose); + unpipe(); + } + dest.once("finish", onfinish); + function unpipe() { + debug("unpipe"); + src.unpipe(dest); + } + dest.emit("pipe", src); + if (dest.writableNeedDrain === true) { + if (state.flowing) { + pause(); + } + } else if (!state.flowing) { + debug("pipe resume"); + src.resume(); + } + return dest; + }; + function pipeOnDrain(src, dest) { + return function pipeOnDrainFunctionResult() { + const state = src._readableState; + if (state.awaitDrainWriters === dest) { + debug("pipeOnDrain", 1); + state.awaitDrainWriters = null; + } else if (state.multiAwaitDrain) { + debug("pipeOnDrain", state.awaitDrainWriters.size); + state.awaitDrainWriters.delete(dest); + } + if ( + (!state.awaitDrainWriters || state.awaitDrainWriters.size === 0) && + src.listenerCount("data") + ) { + src.resume(); + } + }; + } + Readable.prototype.unpipe = function (dest) { + const state = this._readableState; + const unpipeInfo = { + hasUnpiped: false, + }; + if (state.pipes.length === 0) { + return this; + } + if (!dest) { + const dests = state.pipes; + state.pipes = []; + this.pause(); + for (let i = 0; i < dests.length; i++) { + dests[i].emit("unpipe", this, { + hasUnpiped: false, + }); + } + return this; + } + const index = ArrayPrototypeIndexOf(state.pipes, dest); + if (index === -1) { + return this; + } + state.pipes.splice(index, 1); + if (state.pipes.length === 0) { + this.pause(); + } + dest.emit("unpipe", this, unpipeInfo); + return this; + }; + Readable.prototype.on = function (ev, fn) { + const res = Stream.prototype.on.call(this, ev, fn); + const state = this._readableState; + if (ev === "data") { + state.readableListening = this.listenerCount("readable") > 0; + if (state.flowing !== false) { + this.resume(); + } + } else if (ev === "readable") { + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.flowing = false; + state.emittedReadable = false; + debug("on readable", state.length, state.reading); + if (state.length) { + emitReadable(this); + } else if (!state.reading) { + process.nextTick(nReadingNextTick, this); + } + } + } + return res; + }; + Readable.prototype.addListener = Readable.prototype.on; + Readable.prototype.removeListener = function (ev, fn) { + const res = Stream.prototype.removeListener.call(this, ev, fn); + if (ev === "readable") { + process.nextTick(updateReadableListening, this); + } + return res; + }; + Readable.prototype.off = Readable.prototype.removeListener; + Readable.prototype.removeAllListeners = function (ev) { + const res = Stream.prototype.removeAllListeners.apply(this, arguments); + if (ev === "readable" || ev === void 0) { + process.nextTick(updateReadableListening, this); + } + return res; + }; + function updateReadableListening(self2) { + const state = self2._readableState; + state.readableListening = self2.listenerCount("readable") > 0; + if (state.resumeScheduled && state[kPaused] === false) { + state.flowing = true; + } else if (self2.listenerCount("data") > 0) { + self2.resume(); + } else if (!state.readableListening) { + state.flowing = null; + } + } + function nReadingNextTick(self2) { + debug("readable nexttick read 0"); + self2.read(0); + } + Readable.prototype.resume = function () { + const state = this._readableState; + if (!state.flowing) { + debug("resume"); + state.flowing = !state.readableListening; + resume(this, state); + } + state[kPaused] = false; + return this; + }; + function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + process.nextTick(resume_, stream, state); + } + } + function resume_(stream, state) { + debug("resume", state.reading); + if (!state.reading) { + stream.read(0); + } + state.resumeScheduled = false; + stream.emit("resume"); + flow(stream); + if (state.flowing && !state.reading) { + stream.read(0); + } + } + Readable.prototype.pause = function () { + debug("call pause flowing=%j", this._readableState.flowing); + if (this._readableState.flowing !== false) { + debug("pause"); + this._readableState.flowing = false; + this.emit("pause"); + } + this._readableState[kPaused] = true; + return this; + }; + function flow(stream) { + const state = stream._readableState; + debug("flow", state.flowing); + while (state.flowing && stream.read() !== null); + } + Readable.prototype.wrap = function (stream) { + let paused = false; + stream.on("data", (chunk) => { + if (!this.push(chunk) && stream.pause) { + paused = true; + stream.pause(); + } + }); + stream.on("end", () => { + this.push(null); + }); + stream.on("error", (err) => { + errorOrDestroy(this, err); + }); + stream.on("close", () => { + this.destroy(); + }); + stream.on("destroy", () => { + this.destroy(); + }); + this._read = () => { + if (paused && stream.resume) { + paused = false; + stream.resume(); + } + }; + const streamKeys = ObjectKeys(stream); + for (let j = 1; j < streamKeys.length; j++) { + const i = streamKeys[j]; + if (this[i] === void 0 && typeof stream[i] === "function") { + this[i] = stream[i].bind(stream); + } + } + return this; + }; + Readable.prototype[SymbolAsyncIterator] = function () { + return streamToAsyncIterator(this); + }; + Readable.prototype.iterator = function (options) { + if (options !== void 0) { + validateObject(options, "options"); + } + return streamToAsyncIterator(this, options); + }; + function streamToAsyncIterator(stream, options) { + if (typeof stream.read !== "function") { + stream = Readable.wrap(stream, { + objectMode: true, + }); + } + const iter = createAsyncIterator(stream, options); + iter.stream = stream; + return iter; + } + async function* createAsyncIterator(stream, options) { + let callback = nop; + function next(resolve) { + if (this === stream) { + callback(); + callback = nop; + } else { + callback = resolve; + } + } + stream.on("readable", next); + let error; + const cleanup = eos( + stream, + { + writable: false, + }, + (err) => { + error = err ? aggregateTwoErrors(error, err) : null; + callback(); + callback = nop; + }, + ); + try { + while (true) { + const chunk = stream.destroyed ? null : stream.read(); + if (chunk !== null) { + yield chunk; + } else if (error) { + throw error; + } else if (error === null) { + return; + } else { + await new Promise2(next); + } + } + } catch (err) { + error = aggregateTwoErrors(error, err); + throw error; + } finally { + if ( + (error || + (options === null || options === void 0 + ? void 0 + : options.destroyOnReturn) !== false) && + (error === void 0 || stream._readableState.autoDestroy) + ) { + destroyImpl.destroyer(stream, null); + } else { + stream.off("readable", next); + cleanup(); + } + } + } + ObjectDefineProperties(Readable.prototype, { + readable: { + __proto__: null, + get() { + const r = this._readableState; + return !!r && r.readable !== false && !r.destroyed && + !r.errorEmitted && !r.endEmitted; + }, + set(val) { + if (this._readableState) { + this._readableState.readable = !!val; + } + }, + }, + readableDidRead: { + __proto__: null, + enumerable: false, + get: function () { + return this._readableState.dataEmitted; + }, + }, + readableAborted: { + __proto__: null, + enumerable: false, + get: function () { + return !!(this._readableState.readable !== false && + (this._readableState.destroyed || this._readableState.errored) && + !this._readableState.endEmitted); + }, + }, + readableHighWaterMark: { + __proto__: null, + enumerable: false, + get: function () { + return this._readableState.highWaterMark; + }, + }, + readableBuffer: { + __proto__: null, + enumerable: false, + get: function () { + return this._readableState && this._readableState.buffer; + }, + }, + readableFlowing: { + __proto__: null, + enumerable: false, + get: function () { + return this._readableState.flowing; + }, + set: function (state) { + if (this._readableState) { + this._readableState.flowing = state; + } + }, + }, + readableLength: { + __proto__: null, + enumerable: false, + get() { + return this._readableState.length; + }, + }, + readableObjectMode: { + __proto__: null, + enumerable: false, + get() { + return this._readableState ? this._readableState.objectMode : false; + }, + }, + readableEncoding: { + __proto__: null, + enumerable: false, + get() { + return this._readableState ? this._readableState.encoding : null; + }, + }, + errored: { + __proto__: null, + enumerable: false, + get() { + return this._readableState ? this._readableState.errored : null; + }, + }, + closed: { + __proto__: null, + get() { + return this._readableState ? this._readableState.closed : false; + }, + }, + destroyed: { + __proto__: null, + enumerable: false, + get() { + return this._readableState ? this._readableState.destroyed : false; + }, + set(value) { + if (!this._readableState) { + return; + } + this._readableState.destroyed = value; + }, + }, + readableEnded: { + __proto__: null, + enumerable: false, + get() { + return this._readableState ? this._readableState.endEmitted : false; + }, + }, + }); + ObjectDefineProperties(ReadableState.prototype, { + // Legacy getter for `pipesCount`. + pipesCount: { + __proto__: null, + get() { + return this.pipes.length; + }, + }, + // Legacy property for `paused`. + paused: { + __proto__: null, + get() { + return this[kPaused] !== false; + }, + set(value) { + this[kPaused] = !!value; + }, + }, + }); + Readable._fromList = fromList; + function fromList(n, state) { + if (state.length === 0) { + return null; + } + let ret; + if (state.objectMode) { + ret = state.buffer.shift(); + } else if (!n || n >= state.length) { + if (state.decoder) { + ret = state.buffer.join(""); + } else if (state.buffer.length === 1) { + ret = state.buffer.first(); + } else { + ret = state.buffer.concat(state.length); + } + state.buffer.clear(); + } else { + ret = state.buffer.consume(n, state.decoder); + } + return ret; + } + function endReadable(stream) { + const state = stream._readableState; + debug("endReadable", state.endEmitted); + if (!state.endEmitted) { + state.ended = true; + process.nextTick(endReadableNT, state, stream); + } + } + function endReadableNT(state, stream) { + debug("endReadableNT", state.endEmitted, state.length); + if ( + !state.errored && !state.closeEmitted && !state.endEmitted && + state.length === 0 + ) { + state.endEmitted = true; + stream.emit("end"); + if (stream.writable && stream.allowHalfOpen === false) { + process.nextTick(endWritableNT, stream); + } else if (state.autoDestroy) { + const wState = stream._writableState; + const autoDestroy = !wState || wState.autoDestroy && // We don't expect the writable to ever 'finish' + // if writable is explicitly set to false. + (wState.finished || wState.writable === false); + if (autoDestroy) { + stream.destroy(); + } + } + } + } + function endWritableNT(stream) { + const writable = stream.writable && !stream.writableEnded && + !stream.destroyed; + if (writable) { + stream.end(); + } + } + Readable.from = function (iterable, opts) { + return from(Readable, iterable, opts); + }; + var webStreamsAdapters; + function lazyWebStreams() { + if (webStreamsAdapters === void 0) { + webStreamsAdapters = {}; + } + return webStreamsAdapters; + } + Readable.fromWeb = function (readableStream, options) { + return lazyWebStreams().newStreamReadableFromReadableStream( + readableStream, + options, + ); + }; + Readable.toWeb = function (streamReadable, options) { + return lazyWebStreams().newReadableStreamFromStreamReadable( + streamReadable, + options, + ); + }; + Readable.wrap = function (src, options) { + var _ref, _src$readableObjectMo; + return new Readable({ + objectMode: + (_ref = (_src$readableObjectMo = src.readableObjectMode) !== null && + _src$readableObjectMo !== void 0 + ? _src$readableObjectMo + : src.objectMode) !== null && _ref !== void 0 + ? _ref + : true, + ...options, + destroy(err, callback) { + destroyImpl.destroyer(src, err); + callback(err); + }, + }).wrap(src); + }; + }, +}); + +// lib/internal/streams/writable.js +var require_writable = __commonJS({ + "lib/internal/streams/writable.js"(exports, module) { + var process = require_browser2(); + var { + ArrayPrototypeSlice, + Error: Error2, + FunctionPrototypeSymbolHasInstance, + ObjectDefineProperty, + ObjectDefineProperties, + ObjectSetPrototypeOf, + StringPrototypeToLowerCase, + Symbol: Symbol2, + SymbolHasInstance, + } = require_primordials(); + module.exports = Writable; + Writable.WritableState = WritableState; + var { EventEmitter: EE } = require_events(); + var Stream = require_legacy().Stream; + var { Buffer: Buffer2 } = require_buffer(); + var destroyImpl = require_destroy(); + var { addAbortSignal } = require_add_abort_signal(); + var { getHighWaterMark, getDefaultHighWaterMark } = require_state(); + var { errorOrDestroy } = destroyImpl; + ObjectSetPrototypeOf(Writable.prototype, Stream.prototype); + ObjectSetPrototypeOf(Writable, Stream); + function nop() { + } + var kOnFinished = Symbol2("kOnFinished"); + function WritableState(options, stream, isDuplex) { + if (typeof isDuplex !== "boolean") { + isDuplex = stream instanceof require_duplex(); + } + this.objectMode = !!(options && options.objectMode); + if (isDuplex) { + this.objectMode = this.objectMode || + !!(options && options.writableObjectMode); + } + this.highWaterMark = options + ? getHighWaterMark(this, options, "writableHighWaterMark", isDuplex) + : getDefaultHighWaterMark(false); + this.finalCalled = false; + this.needDrain = false; + this.ending = false; + this.ended = false; + this.finished = false; + this.destroyed = false; + const noDecode = !!(options && options.decodeStrings === false); + this.decodeStrings = !noDecode; + this.defaultEncoding = options && options.defaultEncoding || "utf8"; + this.length = 0; + this.writing = false; + this.corked = 0; + this.sync = true; + this.bufferProcessing = false; + this.onwrite = onwrite.bind(void 0, stream); + this.writecb = null; + this.writelen = 0; + this.afterWriteTickInfo = null; + resetBuffer(this); + this.pendingcb = 0; + this.constructed = true; + this.prefinished = false; + this.errorEmitted = false; + this.emitClose = !options || options.emitClose !== false; + this.autoDestroy = !options || options.autoDestroy !== false; + this.errored = null; + this.closed = false; + this.closeEmitted = false; + this[kOnFinished] = []; + } + function resetBuffer(state) { + state.buffered = []; + state.bufferedIndex = 0; + state.allBuffers = true; + state.allNoop = true; + } + WritableState.prototype.getBuffer = function getBuffer() { + return ArrayPrototypeSlice(this.buffered, this.bufferedIndex); + }; + ObjectDefineProperty(WritableState.prototype, "bufferedRequestCount", { + __proto__: null, + get() { + return this.buffered.length - this.bufferedIndex; + }, + }); + function Writable(options) { + const isDuplex = this instanceof require_duplex(); + if (!isDuplex && !FunctionPrototypeSymbolHasInstance(Writable, this)) { + return new Writable(options); + } + this._writableState = new WritableState(options, this, isDuplex); + if (options) { + if (typeof options.write === "function") { + this._write = options.write; + } + if (typeof options.writev === "function") { + this._writev = options.writev; + } + if (typeof options.destroy === "function") { + this._destroy = options.destroy; + } + if (typeof options.final === "function") { + this._final = options.final; + } + if (typeof options.construct === "function") { + this._construct = options.construct; + } + if (options.signal) { + addAbortSignal(options.signal, this); + } + } + Stream.call(this, options); + destroyImpl.construct(this, () => { + const state = this._writableState; + if (!state.writing) { + clearBuffer(this, state); + } + finishMaybe(this, state); + }); + } + ObjectDefineProperty(Writable, SymbolHasInstance, { + __proto__: null, + value: function (object) { + if (FunctionPrototypeSymbolHasInstance(this, object)) { + return true; + } + if (this !== Writable) { + return false; + } + return object && object._writableState instanceof WritableState; + }, + }); + Writable.prototype.pipe = function () { + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); + }; + function _write(stream, chunk, encoding, cb) { + const state = stream._writableState; + if (typeof encoding === "function") { + cb = encoding; + encoding = state.defaultEncoding; + } else { + if (!encoding) { + encoding = state.defaultEncoding; + } else if (encoding !== "buffer" && !Buffer2.isEncoding(encoding)) { + throw new ERR_UNKNOWN_ENCODING(encoding); + } + if (typeof cb !== "function") { + cb = nop; + } + } + if (chunk === null) { + throw new ERR_STREAM_NULL_VALUES(); + } else if (!state.objectMode) { + if (typeof chunk === "string") { + if (state.decodeStrings !== false) { + chunk = Buffer2.from(chunk, encoding); + encoding = "buffer"; + } + } else if (chunk instanceof Buffer2) { + encoding = "buffer"; + } else if (Stream._isUint8Array(chunk)) { + chunk = Stream._uint8ArrayToBuffer(chunk); + encoding = "buffer"; + } else { + throw new ERR_INVALID_ARG_TYPE("chunk", [ + "string", + "Buffer", + "Uint8Array", + ], chunk); + } + } + let err; + if (state.ending) { + err = new ERR_STREAM_WRITE_AFTER_END(); + } else if (state.destroyed) { + err = new ERR_STREAM_DESTROYED("write"); + } + if (err) { + process.nextTick(cb, err); + errorOrDestroy(stream, err, true); + return err; + } + state.pendingcb++; + return writeOrBuffer(stream, state, chunk, encoding, cb); + } + Writable.prototype.write = function (chunk, encoding, cb) { + return _write(this, chunk, encoding, cb) === true; + }; + Writable.prototype.cork = function () { + this._writableState.corked++; + }; + Writable.prototype.uncork = function () { + const state = this._writableState; + if (state.corked) { + state.corked--; + if (!state.writing) { + clearBuffer(this, state); + } + } + }; + Writable.prototype.setDefaultEncoding = function setDefaultEncoding( + encoding, + ) { + if (typeof encoding === "string") { + encoding = StringPrototypeToLowerCase(encoding); + } + if (!Buffer2.isEncoding(encoding)) { + throw new ERR_UNKNOWN_ENCODING(encoding); + } + this._writableState.defaultEncoding = encoding; + return this; + }; + function writeOrBuffer(stream, state, chunk, encoding, callback) { + const len = state.objectMode ? 1 : chunk.length; + state.length += len; + const ret = state.length < state.highWaterMark; + if (!ret) { + state.needDrain = true; + } + if ( + state.writing || state.corked || state.errored || !state.constructed + ) { + state.buffered.push({ + chunk, + encoding, + callback, + }); + if (state.allBuffers && encoding !== "buffer") { + state.allBuffers = false; + } + if (state.allNoop && callback !== nop) { + state.allNoop = false; + } + } else { + state.writelen = len; + state.writecb = callback; + state.writing = true; + state.sync = true; + stream._write(chunk, encoding, state.onwrite); + state.sync = false; + } + return ret && !state.errored && !state.destroyed; + } + function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (state.destroyed) { + state.onwrite(new ERR_STREAM_DESTROYED("write")); + } else if (writev) { + stream._writev(chunk, state.onwrite); + } else { + stream._write(chunk, encoding, state.onwrite); + } + state.sync = false; + } + function onwriteError(stream, state, er, cb) { + --state.pendingcb; + cb(er); + errorBuffer(state); + errorOrDestroy(stream, er); + } + function onwrite(stream, er) { + const state = stream._writableState; + const sync = state.sync; + const cb = state.writecb; + if (typeof cb !== "function") { + errorOrDestroy(stream, new ERR_MULTIPLE_CALLBACK()); + return; + } + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; + if (er) { + er.stack; + if (!state.errored) { + state.errored = er; + } + if (stream._readableState && !stream._readableState.errored) { + stream._readableState.errored = er; + } + if (sync) { + process.nextTick(onwriteError, stream, state, er, cb); + } else { + onwriteError(stream, state, er, cb); + } + } else { + if (state.buffered.length > state.bufferedIndex) { + clearBuffer(stream, state); + } + if (sync) { + if ( + state.afterWriteTickInfo !== null && + state.afterWriteTickInfo.cb === cb + ) { + state.afterWriteTickInfo.count++; + } else { + state.afterWriteTickInfo = { + count: 1, + cb, + stream, + state, + }; + process.nextTick(afterWriteTick, state.afterWriteTickInfo); + } + } else { + afterWrite(stream, state, 1, cb); + } + } + } + function afterWriteTick({ stream, state, count, cb }) { + state.afterWriteTickInfo = null; + return afterWrite(stream, state, count, cb); + } + function afterWrite(stream, state, count, cb) { + const needDrain = !state.ending && !stream.destroyed && + state.length === 0 && state.needDrain; + if (needDrain) { + state.needDrain = false; + stream.emit("drain"); + } + while (count-- > 0) { + state.pendingcb--; + cb(); + } + if (state.destroyed) { + errorBuffer(state); + } + finishMaybe(stream, state); + } + function errorBuffer(state) { + if (state.writing) { + return; + } + for (let n = state.bufferedIndex; n < state.buffered.length; ++n) { + var _state$errored; + const { chunk, callback } = state.buffered[n]; + const len = state.objectMode ? 1 : chunk.length; + state.length -= len; + callback( + (_state$errored = state.errored) !== null && _state$errored !== void 0 + ? _state$errored + : new ERR_STREAM_DESTROYED("write"), + ); + } + const onfinishCallbacks = state[kOnFinished].splice(0); + for (let i = 0; i < onfinishCallbacks.length; i++) { + var _state$errored2; + onfinishCallbacks[i]( + (_state$errored2 = state.errored) !== null && + _state$errored2 !== void 0 + ? _state$errored2 + : new ERR_STREAM_DESTROYED("end"), + ); + } + resetBuffer(state); + } + function clearBuffer(stream, state) { + if ( + state.corked || state.bufferProcessing || state.destroyed || + !state.constructed + ) { + return; + } + const { buffered, bufferedIndex, objectMode } = state; + const bufferedLength = buffered.length - bufferedIndex; + if (!bufferedLength) { + return; + } + let i = bufferedIndex; + state.bufferProcessing = true; + if (bufferedLength > 1 && stream._writev) { + state.pendingcb -= bufferedLength - 1; + const callback = state.allNoop ? nop : (err) => { + for (let n = i; n < buffered.length; ++n) { + buffered[n].callback(err); + } + }; + const chunks = state.allNoop && i === 0 + ? buffered + : ArrayPrototypeSlice(buffered, i); + chunks.allBuffers = state.allBuffers; + doWrite(stream, state, true, state.length, chunks, "", callback); + resetBuffer(state); + } else { + do { + const { chunk, encoding, callback } = buffered[i]; + buffered[i++] = null; + const len = objectMode ? 1 : chunk.length; + doWrite(stream, state, false, len, chunk, encoding, callback); + } while (i < buffered.length && !state.writing); + if (i === buffered.length) { + resetBuffer(state); + } else if (i > 256) { + buffered.splice(0, i); + state.bufferedIndex = 0; + } else { + state.bufferedIndex = i; + } + } + state.bufferProcessing = false; + } + Writable.prototype._write = function (chunk, encoding, cb) { + if (this._writev) { + this._writev( + [ + { + chunk, + encoding, + }, + ], + cb, + ); + } else { + throw new ERR_METHOD_NOT_IMPLEMENTED("_write()"); + } + }; + Writable.prototype._writev = null; + Writable.prototype.end = function (chunk, encoding, cb) { + const state = this._writableState; + if (typeof chunk === "function") { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === "function") { + cb = encoding; + encoding = null; + } + let err; + if (chunk !== null && chunk !== void 0) { + const ret = _write(this, chunk, encoding); + if (ret instanceof Error2) { + err = ret; + } + } + if (state.corked) { + state.corked = 1; + this.uncork(); + } + if (err) { + } else if (!state.errored && !state.ending) { + state.ending = true; + finishMaybe(this, state, true); + state.ended = true; + } else if (state.finished) { + err = new ERR_STREAM_ALREADY_FINISHED("end"); + } else if (state.destroyed) { + err = new ERR_STREAM_DESTROYED("end"); + } + if (typeof cb === "function") { + if (err || state.finished) { + process.nextTick(cb, err); + } else { + state[kOnFinished].push(cb); + } + } + return this; + }; + function needFinish(state) { + return state.ending && !state.destroyed && state.constructed && + state.length === 0 && !state.errored && state.buffered.length === 0 && + !state.finished && !state.writing && !state.errorEmitted && + !state.closeEmitted; + } + function callFinal(stream, state) { + let called = false; + function onFinish(err) { + if (called) { + errorOrDestroy( + stream, + err !== null && err !== void 0 ? err : ERR_MULTIPLE_CALLBACK(), + ); + return; + } + called = true; + state.pendingcb--; + if (err) { + const onfinishCallbacks = state[kOnFinished].splice(0); + for (let i = 0; i < onfinishCallbacks.length; i++) { + onfinishCallbacks[i](err); + } + errorOrDestroy(stream, err, state.sync); + } else if (needFinish(state)) { + state.prefinished = true; + stream.emit("prefinish"); + state.pendingcb++; + process.nextTick(finish, stream, state); + } + } + state.sync = true; + state.pendingcb++; + try { + stream._final(onFinish); + } catch (err) { + onFinish(err); + } + state.sync = false; + } + function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === "function" && !state.destroyed) { + state.finalCalled = true; + callFinal(stream, state); + } else { + state.prefinished = true; + stream.emit("prefinish"); + } + } + } + function finishMaybe(stream, state, sync) { + if (needFinish(state)) { + prefinish(stream, state); + if (state.pendingcb === 0) { + if (sync) { + state.pendingcb++; + process.nextTick( + (stream2, state2) => { + if (needFinish(state2)) { + finish(stream2, state2); + } else { + state2.pendingcb--; + } + }, + stream, + state, + ); + } else if (needFinish(state)) { + state.pendingcb++; + finish(stream, state); + } + } + } + } + function finish(stream, state) { + state.pendingcb--; + state.finished = true; + const onfinishCallbacks = state[kOnFinished].splice(0); + for (let i = 0; i < onfinishCallbacks.length; i++) { + onfinishCallbacks[i](); + } + stream.emit("finish"); + if (state.autoDestroy) { + const rState = stream._readableState; + const autoDestroy = !rState || rState.autoDestroy && // We don't expect the readable to ever 'end' + // if readable is explicitly set to false. + (rState.endEmitted || rState.readable === false); + if (autoDestroy) { + stream.destroy(); + } + } + } + ObjectDefineProperties(Writable.prototype, { + closed: { + __proto__: null, + get() { + return this._writableState ? this._writableState.closed : false; + }, + }, + destroyed: { + __proto__: null, + get() { + return this._writableState ? this._writableState.destroyed : false; + }, + set(value) { + if (this._writableState) { + this._writableState.destroyed = value; + } + }, + }, + writable: { + __proto__: null, + get() { + const w = this._writableState; + return !!w && w.writable !== false && !w.destroyed && !w.errored && + !w.ending && !w.ended; + }, + set(val) { + if (this._writableState) { + this._writableState.writable = !!val; + } + }, + }, + writableFinished: { + __proto__: null, + get() { + return this._writableState ? this._writableState.finished : false; + }, + }, + writableObjectMode: { + __proto__: null, + get() { + return this._writableState ? this._writableState.objectMode : false; + }, + }, + writableBuffer: { + __proto__: null, + get() { + return this._writableState && this._writableState.getBuffer(); + }, + }, + writableEnded: { + __proto__: null, + get() { + return this._writableState ? this._writableState.ending : false; + }, + }, + writableNeedDrain: { + __proto__: null, + get() { + const wState = this._writableState; + if (!wState) { + return false; + } + return !wState.destroyed && !wState.ending && wState.needDrain; + }, + }, + writableHighWaterMark: { + __proto__: null, + get() { + return this._writableState && this._writableState.highWaterMark; + }, + }, + writableCorked: { + __proto__: null, + get() { + return this._writableState ? this._writableState.corked : 0; + }, + }, + writableLength: { + __proto__: null, + get() { + return this._writableState && this._writableState.length; + }, + }, + errored: { + __proto__: null, + enumerable: false, + get() { + return this._writableState ? this._writableState.errored : null; + }, + }, + writableAborted: { + __proto__: null, + enumerable: false, + get: function () { + return !!(this._writableState.writable !== false && + (this._writableState.destroyed || this._writableState.errored) && + !this._writableState.finished); + }, + }, + }); + var destroy = destroyImpl.destroy; + Writable.prototype.destroy = function (err, cb) { + const state = this._writableState; + if ( + !state.destroyed && + (state.bufferedIndex < state.buffered.length || + state[kOnFinished].length) + ) { + process.nextTick(errorBuffer, state); + } + destroy.call(this, err, cb); + return this; + }; + Writable.prototype._undestroy = destroyImpl.undestroy; + Writable.prototype._destroy = function (err, cb) { + cb(err); + }; + Writable.prototype[EE.captureRejectionSymbol] = function (err) { + this.destroy(err); + }; + var webStreamsAdapters; + function lazyWebStreams() { + if (webStreamsAdapters === void 0) { + webStreamsAdapters = {}; + } + return webStreamsAdapters; + } + Writable.fromWeb = function (writableStream, options) { + return lazyWebStreams().newStreamWritableFromWritableStream( + writableStream, + options, + ); + }; + Writable.toWeb = function (streamWritable) { + return lazyWebStreams().newWritableStreamFromStreamWritable( + streamWritable, + ); + }; + }, +}); + +// lib/internal/streams/duplexify.js +var require_duplexify = __commonJS({ + "lib/internal/streams/duplexify.js"(exports, module) { + var process = require_browser2(); + var bufferModule = require_buffer(); + var { + isReadable, + isWritable, + isIterable, + isNodeStream, + isReadableNodeStream, + isWritableNodeStream, + isDuplexNodeStream, + } = require_utils(); + var eos = require_end_of_stream(); + var { destroyer } = require_destroy(); + var Duplex = require_duplex(); + var Readable = require_readable(); + var from = require_from(); + var isBlob = typeof Blob !== "undefined" + ? function isBlob2(b) { + return b instanceof Blob; + } + : function isBlob2(b) { + return false; + }; + var { FunctionPrototypeCall } = require_primordials(); + var Duplexify = class extends Duplex { + constructor(options) { + super(options); + if ( + (options === null || options === void 0 + ? void 0 + : options.readable) === false + ) { + this._readableState.readable = false; + this._readableState.ended = true; + this._readableState.endEmitted = true; + } + if ( + (options === null || options === void 0 + ? void 0 + : options.writable) === false + ) { + this._writableState.writable = false; + this._writableState.ending = true; + this._writableState.ended = true; + this._writableState.finished = true; + } + } + }; + module.exports = function duplexify(body, name) { + if (isDuplexNodeStream(body)) { + return body; + } + if (isReadableNodeStream(body)) { + return _duplexify({ + readable: body, + }); + } + if (isWritableNodeStream(body)) { + return _duplexify({ + writable: body, + }); + } + if (isNodeStream(body)) { + return _duplexify({ + writable: false, + readable: false, + }); + } + if (typeof body === "function") { + const { value, write, final, destroy } = fromAsyncGen(body); + if (isIterable(value)) { + return from(Duplexify, value, { + // TODO (ronag): highWaterMark? + objectMode: true, + write, + final, + destroy, + }); + } + const then2 = value === null || value === void 0 ? void 0 : value.then; + if (typeof then2 === "function") { + let d; + const promise = FunctionPrototypeCall( + then2, + value, + (val) => { + if (val != null) { + throw new ERR_INVALID_RETURN_VALUE("nully", "body", val); + } + }, + (err) => { + destroyer(d, err); + }, + ); + return d = new Duplexify({ + // TODO (ronag): highWaterMark? + objectMode: true, + readable: false, + write, + final(cb) { + final(async () => { + try { + await promise; + process.nextTick(cb, null); + } catch (err) { + process.nextTick(cb, err); + } + }); + }, + destroy, + }); + } + throw new ERR_INVALID_RETURN_VALUE( + "Iterable, AsyncIterable or AsyncFunction", + name, + value, + ); + } + if (isBlob(body)) { + return duplexify(body.arrayBuffer()); + } + if (isIterable(body)) { + return from(Duplexify, body, { + // TODO (ronag): highWaterMark? + objectMode: true, + writable: false, + }); + } + if ( + typeof (body === null || body === void 0 ? void 0 : body.writable) === + "object" || + typeof (body === null || body === void 0 ? void 0 : body.readable) === + "object" + ) { + const readable = body !== null && body !== void 0 && body.readable + ? isReadableNodeStream( + body === null || body === void 0 ? void 0 : body.readable, + ) + ? body === null || body === void 0 ? void 0 : body.readable + : duplexify(body.readable) + : void 0; + const writable = body !== null && body !== void 0 && body.writable + ? isWritableNodeStream( + body === null || body === void 0 ? void 0 : body.writable, + ) + ? body === null || body === void 0 ? void 0 : body.writable + : duplexify(body.writable) + : void 0; + return _duplexify({ + readable, + writable, + }); + } + const then = body === null || body === void 0 ? void 0 : body.then; + if (typeof then === "function") { + let d; + FunctionPrototypeCall( + then, + body, + (val) => { + if (val != null) { + d.push(val); + } + d.push(null); + }, + (err) => { + destroyer(d, err); + }, + ); + return d = new Duplexify({ + objectMode: true, + writable: false, + read() { + }, + }); + } + throw new ERR_INVALID_ARG_TYPE( + name, + [ + "Blob", + "ReadableStream", + "WritableStream", + "Stream", + "Iterable", + "AsyncIterable", + "Function", + "{ readable, writable } pair", + "Promise", + ], + body, + ); + }; + function fromAsyncGen(fn) { + let { promise, resolve } = createDeferredPromise(); + const ac = new AbortController(); + const signal = ac.signal; + const value = fn( + async function* () { + while (true) { + const _promise = promise; + promise = null; + const { chunk, done, cb } = await _promise; + process.nextTick(cb); + if (done) { + return; + } + if (signal.aborted) { + throw new AbortError(void 0, { + cause: signal.reason, + }); + } + ({ promise, resolve } = createDeferredPromise()); + yield chunk; + } + }(), + { + signal, + }, + ); + return { + value, + write(chunk, encoding, cb) { + const _resolve = resolve; + resolve = null; + _resolve({ + chunk, + done: false, + cb, + }); + }, + final(cb) { + const _resolve = resolve; + resolve = null; + _resolve({ + done: true, + cb, + }); + }, + destroy(err, cb) { + ac.abort(); + cb(err); + }, + }; + } + function _duplexify(pair) { + const r = pair.readable && typeof pair.readable.read !== "function" + ? Readable.wrap(pair.readable) + : pair.readable; + const w = pair.writable; + let readable = !!isReadable(r); + let writable = !!isWritable(w); + let ondrain; + let onfinish; + let onreadable; + let onclose; + let d; + function onfinished(err) { + const cb = onclose; + onclose = null; + if (cb) { + cb(err); + } else if (err) { + d.destroy(err); + } else if (!readable && !writable) { + d.destroy(); + } + } + d = new Duplexify({ + // TODO (ronag): highWaterMark? + readableObjectMode: + !!(r !== null && r !== void 0 && r.readableObjectMode), + writableObjectMode: + !!(w !== null && w !== void 0 && w.writableObjectMode), + readable, + writable, + }); + if (writable) { + eos(w, (err) => { + writable = false; + if (err) { + destroyer(r, err); + } + onfinished(err); + }); + d._write = function (chunk, encoding, callback) { + if (w.write(chunk, encoding)) { + callback(); + } else { + ondrain = callback; + } + }; + d._final = function (callback) { + w.end(); + onfinish = callback; + }; + w.on("drain", function () { + if (ondrain) { + const cb = ondrain; + ondrain = null; + cb(); + } + }); + w.on("finish", function () { + if (onfinish) { + const cb = onfinish; + onfinish = null; + cb(); + } + }); + } + if (readable) { + eos(r, (err) => { + readable = false; + if (err) { + destroyer(r, err); + } + onfinished(err); + }); + r.on("readable", function () { + if (onreadable) { + const cb = onreadable; + onreadable = null; + cb(); + } + }); + r.on("end", function () { + d.push(null); + }); + d._read = function () { + while (true) { + const buf = r.read(); + if (buf === null) { + onreadable = d._read; + return; + } + if (!d.push(buf)) { + return; + } + } + }; + } + d._destroy = function (err, callback) { + if (!err && onclose !== null) { + err = new AbortError(); + } + onreadable = null; + ondrain = null; + onfinish = null; + if (onclose === null) { + callback(err); + } else { + onclose = callback; + destroyer(w, err); + destroyer(r, err); + } + }; + return d; + } + }, +}); + +// lib/internal/streams/duplex.js +var require_duplex = __commonJS({ + "lib/internal/streams/duplex.js"(exports, module) { + "use strict"; + var { + ObjectDefineProperties, + ObjectGetOwnPropertyDescriptor, + ObjectKeys, + ObjectSetPrototypeOf, + } = require_primordials(); + module.exports = Duplex; + var Readable = require_readable(); + var Writable = require_writable(); + ObjectSetPrototypeOf(Duplex.prototype, Readable.prototype); + ObjectSetPrototypeOf(Duplex, Readable); + { + const keys = ObjectKeys(Writable.prototype); + for (let i = 0; i < keys.length; i++) { + const method = keys[i]; + if (!Duplex.prototype[method]) { + Duplex.prototype[method] = Writable.prototype[method]; + } + } + } + function Duplex(options) { + if (!(this instanceof Duplex)) { + return new Duplex(options); + } + Readable.call(this, options); + Writable.call(this, options); + if (options) { + this.allowHalfOpen = options.allowHalfOpen !== false; + if (options.readable === false) { + this._readableState.readable = false; + this._readableState.ended = true; + this._readableState.endEmitted = true; + } + if (options.writable === false) { + this._writableState.writable = false; + this._writableState.ending = true; + this._writableState.ended = true; + this._writableState.finished = true; + } + } else { + this.allowHalfOpen = true; + } + } + ObjectDefineProperties(Duplex.prototype, { + writable: { + __proto__: null, + ...ObjectGetOwnPropertyDescriptor(Writable.prototype, "writable"), + }, + writableHighWaterMark: { + __proto__: null, + ...ObjectGetOwnPropertyDescriptor( + Writable.prototype, + "writableHighWaterMark", + ), + }, + writableObjectMode: { + __proto__: null, + ...ObjectGetOwnPropertyDescriptor( + Writable.prototype, + "writableObjectMode", + ), + }, + writableBuffer: { + __proto__: null, + ...ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableBuffer"), + }, + writableLength: { + __proto__: null, + ...ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableLength"), + }, + writableFinished: { + __proto__: null, + ...ObjectGetOwnPropertyDescriptor( + Writable.prototype, + "writableFinished", + ), + }, + writableCorked: { + __proto__: null, + ...ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableCorked"), + }, + writableEnded: { + __proto__: null, + ...ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableEnded"), + }, + writableNeedDrain: { + __proto__: null, + ...ObjectGetOwnPropertyDescriptor( + Writable.prototype, + "writableNeedDrain", + ), + }, + destroyed: { + __proto__: null, + get() { + if ( + this._readableState === void 0 || this._writableState === void 0 + ) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set(value) { + if (this._readableState && this._writableState) { + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } + }, + }, + }); + var webStreamsAdapters; + function lazyWebStreams() { + if (webStreamsAdapters === void 0) { + webStreamsAdapters = {}; + } + return webStreamsAdapters; + } + Duplex.fromWeb = function (pair, options) { + return lazyWebStreams().newStreamDuplexFromReadableWritablePair( + pair, + options, + ); + }; + Duplex.toWeb = function (duplex) { + return lazyWebStreams().newReadableWritablePairFromDuplex(duplex); + }; + var duplexify; + Duplex.from = function (body) { + if (!duplexify) { + duplexify = require_duplexify(); + } + return duplexify(body, "body"); + }; + }, +}); + +// lib/internal/streams/transform.js +var require_transform = __commonJS({ + "lib/internal/streams/transform.js"(exports, module) { + "use strict"; + var { ObjectSetPrototypeOf, Symbol: Symbol2 } = require_primordials(); + module.exports = Transform; + var Duplex = require_duplex(); + var { getHighWaterMark } = require_state(); + ObjectSetPrototypeOf(Transform.prototype, Duplex.prototype); + ObjectSetPrototypeOf(Transform, Duplex); + var kCallback = Symbol2("kCallback"); + function Transform(options) { + if (!(this instanceof Transform)) { + return new Transform(options); + } + const readableHighWaterMark = options + ? getHighWaterMark(this, options, "readableHighWaterMark", true) + : null; + if (readableHighWaterMark === 0) { + options = { + ...options, + highWaterMark: null, + readableHighWaterMark, + // TODO (ronag): 0 is not optimal since we have + // a "bug" where we check needDrain before calling _write and not after. + // Refs: https://github.com/nodejs/node/pull/32887 + // Refs: https://github.com/nodejs/node/pull/35941 + writableHighWaterMark: options.writableHighWaterMark || 0, + }; + } + Duplex.call(this, options); + this._readableState.sync = false; + this[kCallback] = null; + if (options) { + if (typeof options.transform === "function") { + this._transform = options.transform; + } + if (typeof options.flush === "function") { + this._flush = options.flush; + } + } + this.on("prefinish", prefinish); + } + function final(cb) { + if (typeof this._flush === "function" && !this.destroyed) { + this._flush((er, data) => { + if (er) { + if (cb) { + cb(er); + } else { + this.destroy(er); + } + return; + } + if (data != null) { + this.push(data); + } + this.push(null); + if (cb) { + cb(); + } + }); + } else { + this.push(null); + if (cb) { + cb(); + } + } + } + function prefinish() { + if (this._final !== final) { + final.call(this); + } + } + Transform.prototype._final = final; + Transform.prototype._transform = function (chunk, encoding, callback) { + throw new ERR_METHOD_NOT_IMPLEMENTED("_transform()"); + }; + Transform.prototype._write = function (chunk, encoding, callback) { + const rState = this._readableState; + const wState = this._writableState; + const length = rState.length; + this._transform(chunk, encoding, (err, val) => { + if (err) { + callback(err); + return; + } + if (val != null) { + this.push(val); + } + if ( + wState.ended || // Backwards compat. + length === rState.length || // Backwards compat. + rState.length < rState.highWaterMark + ) { + callback(); + } else { + this[kCallback] = callback; + } + }); + }; + Transform.prototype._read = function () { + if (this[kCallback]) { + const callback = this[kCallback]; + this[kCallback] = null; + callback(); + } + }; + }, +}); + +// lib/internal/streams/passthrough.js +var require_passthrough = __commonJS({ + "lib/internal/streams/passthrough.js"(exports, module) { + "use strict"; + var { ObjectSetPrototypeOf } = require_primordials(); + module.exports = PassThrough; + var Transform = require_transform(); + ObjectSetPrototypeOf(PassThrough.prototype, Transform.prototype); + ObjectSetPrototypeOf(PassThrough, Transform); + function PassThrough(options) { + if (!(this instanceof PassThrough)) { + return new PassThrough(options); + } + Transform.call(this, options); + } + PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); + }; + }, +}); + +// lib/internal/streams/pipeline.js +var require_pipeline = __commonJS({ + "lib/internal/streams/pipeline.js"(exports, module) { + var process = require_browser2(); + var { ArrayIsArray, Promise: Promise2, SymbolAsyncIterator } = + require_primordials(); + var eos = require_end_of_stream(); + var destroyImpl = require_destroy(); + var Duplex = require_duplex(); + var { validateFunction, validateAbortSignal } = require_validators(); + var { isIterable, isReadable, isReadableNodeStream, isNodeStream } = + require_utils(); + var PassThrough; + var Readable; + function destroyer(stream, reading, writing) { + let finished = false; + stream.on("close", () => { + finished = true; + }); + const cleanup = eos( + stream, + { + readable: reading, + writable: writing, + }, + (err) => { + finished = !err; + }, + ); + return { + destroy: (err) => { + if (finished) { + return; + } + finished = true; + destroyImpl.destroyer( + stream, + err || new ERR_STREAM_DESTROYED("pipe"), + ); + }, + cleanup, + }; + } + function popCallback(streams) { + validateFunction( + streams[streams.length - 1], + "streams[stream.length - 1]", + ); + return streams.pop(); + } + function makeAsyncIterable(val) { + if (isIterable(val)) { + return val; + } else if (isReadableNodeStream(val)) { + return fromReadable(val); + } + throw new ERR_INVALID_ARG_TYPE("val", [ + "Readable", + "Iterable", + "AsyncIterable", + ], val); + } + async function* fromReadable(val) { + if (!Readable) { + Readable = require_readable(); + } + yield* Readable.prototype[SymbolAsyncIterator].call(val); + } + async function pump(iterable, writable, finish, { end }) { + let error; + let onresolve = null; + const resume = (err) => { + if (err) { + error = err; + } + if (onresolve) { + const callback = onresolve; + onresolve = null; + callback(); + } + }; + const wait = () => + new Promise2((resolve, reject) => { + if (error) { + reject(error); + } else { + onresolve = () => { + if (error) { + reject(error); + } else { + resolve(); + } + }; + } + }); + writable.on("drain", resume); + const cleanup = eos( + writable, + { + readable: false, + }, + resume, + ); + try { + if (writable.writableNeedDrain) { + await wait(); + } + for await (const chunk of iterable) { + if (!writable.write(chunk)) { + await wait(); + } + } + if (end) { + writable.end(); + } + await wait(); + finish(); + } catch (err) { + finish(error !== err ? aggregateTwoErrors(error, err) : err); + } finally { + cleanup(); + writable.off("drain", resume); + } + } + function pipeline(...streams) { + return pipelineImpl(streams, once(popCallback(streams))); + } + function pipelineImpl(streams, callback, opts) { + if (streams.length === 1 && ArrayIsArray(streams[0])) { + streams = streams[0]; + } + if (streams.length < 2) { + throw new ERR_MISSING_ARGS("streams"); + } + const ac = new AbortController(); + const signal = ac.signal; + const outerSignal = opts === null || opts === void 0 + ? void 0 + : opts.signal; + const lastStreamCleanup = []; + validateAbortSignal(outerSignal, "options.signal"); + function abort() { + finishImpl(new AbortError()); + } + outerSignal === null || outerSignal === void 0 + ? void 0 + : outerSignal.addEventListener("abort", abort); + let error; + let value; + const destroys = []; + let finishCount = 0; + function finish(err) { + finishImpl(err, --finishCount === 0); + } + function finishImpl(err, final) { + if (err && (!error || error.code === "ERR_STREAM_PREMATURE_CLOSE")) { + error = err; + } + if (!error && !final) { + return; + } + while (destroys.length) { + destroys.shift()(error); + } + outerSignal === null || outerSignal === void 0 + ? void 0 + : outerSignal.removeEventListener("abort", abort); + ac.abort(); + if (final) { + if (!error) { + lastStreamCleanup.forEach((fn) => fn()); + } + process.nextTick(callback, error, value); + } + } + let ret; + for (let i = 0; i < streams.length; i++) { + const stream = streams[i]; + const reading = i < streams.length - 1; + const writing = i > 0; + const end = reading || + (opts === null || opts === void 0 ? void 0 : opts.end) !== false; + const isLastStream = i === streams.length - 1; + if (isNodeStream(stream)) { + let onError2 = function (err) { + if ( + err && err.name !== "AbortError" && + err.code !== "ERR_STREAM_PREMATURE_CLOSE" + ) { + finish(err); + } + }; + var onError = onError2; + if (end) { + const { destroy, cleanup } = destroyer(stream, reading, writing); + destroys.push(destroy); + if (isReadable(stream) && isLastStream) { + lastStreamCleanup.push(cleanup); + } + } + stream.on("error", onError2); + if (isReadable(stream) && isLastStream) { + lastStreamCleanup.push(() => { + stream.removeListener("error", onError2); + }); + } + } + if (i === 0) { + if (typeof stream === "function") { + ret = stream({ + signal, + }); + if (!isIterable(ret)) { + throw new ERR_INVALID_RETURN_VALUE( + "Iterable, AsyncIterable or Stream", + "source", + ret, + ); + } + } else if (isIterable(stream) || isReadableNodeStream(stream)) { + ret = stream; + } else { + ret = Duplex.from(stream); + } + } else if (typeof stream === "function") { + ret = makeAsyncIterable(ret); + ret = stream(ret, { + signal, + }); + if (reading) { + if (!isIterable(ret, true)) { + throw new ERR_INVALID_RETURN_VALUE( + "AsyncIterable", + `transform[${i - 1}]`, + ret, + ); + } + } else { + var _ret; + if (!PassThrough) { + PassThrough = require_passthrough(); + } + const pt = new PassThrough({ + objectMode: true, + }); + const then = (_ret = ret) === null || _ret === void 0 + ? void 0 + : _ret.then; + if (typeof then === "function") { + finishCount++; + then.call( + ret, + (val) => { + value = val; + if (val != null) { + pt.write(val); + } + if (end) { + pt.end(); + } + process.nextTick(finish); + }, + (err) => { + pt.destroy(err); + process.nextTick(finish, err); + }, + ); + } else if (isIterable(ret, true)) { + finishCount++; + pump(ret, pt, finish, { + end, + }); + } else { + throw new ERR_INVALID_RETURN_VALUE( + "AsyncIterable or Promise", + "destination", + ret, + ); + } + ret = pt; + const { destroy, cleanup } = destroyer(ret, false, true); + destroys.push(destroy); + if (isLastStream) { + lastStreamCleanup.push(cleanup); + } + } + } else if (isNodeStream(stream)) { + if (isReadableNodeStream(ret)) { + finishCount += 2; + const cleanup = pipe(ret, stream, finish, { + end, + }); + if (isReadable(stream) && isLastStream) { + lastStreamCleanup.push(cleanup); + } + } else if (isIterable(ret)) { + finishCount++; + pump(ret, stream, finish, { + end, + }); + } else { + throw new ERR_INVALID_ARG_TYPE("val", [ + "Readable", + "Iterable", + "AsyncIterable", + ], ret); + } + ret = stream; + } else { + ret = Duplex.from(stream); + } + } + if ( + signal !== null && signal !== void 0 && signal.aborted || + outerSignal !== null && outerSignal !== void 0 && outerSignal.aborted + ) { + process.nextTick(abort); + } + return ret; + } + function pipe(src, dst, finish, { end }) { + let ended = false; + dst.on("close", () => { + if (!ended) { + finish(new ERR_STREAM_PREMATURE_CLOSE()); + } + }); + src.pipe(dst, { + end, + }); + if (end) { + src.once("end", () => { + ended = true; + dst.end(); + }); + } else { + finish(); + } + eos( + src, + { + readable: true, + writable: false, + }, + (err) => { + const rState = src._readableState; + if ( + err && err.code === "ERR_STREAM_PREMATURE_CLOSE" && rState && + rState.ended && !rState.errored && !rState.errorEmitted + ) { + src.once("end", finish).once("error", finish); + } else { + finish(err); + } + }, + ); + return eos( + dst, + { + readable: false, + writable: true, + }, + finish, + ); + } + module.exports = { + pipelineImpl, + pipeline, + }; + }, +}); + +// lib/internal/streams/compose.js +var require_compose = __commonJS({ + "lib/internal/streams/compose.js"(exports, module) { + "use strict"; + var { pipeline } = require_pipeline(); + var Duplex = require_duplex(); + var { destroyer } = require_destroy(); + var { isNodeStream, isReadable, isWritable } = require_utils(); + module.exports = function compose(...streams) { + if (streams.length === 0) { + throw new ERR_MISSING_ARGS("streams"); + } + if (streams.length === 1) { + return Duplex.from(streams[0]); + } + const orgStreams = [...streams]; + if (typeof streams[0] === "function") { + streams[0] = Duplex.from(streams[0]); + } + if (typeof streams[streams.length - 1] === "function") { + const idx = streams.length - 1; + streams[idx] = Duplex.from(streams[idx]); + } + for (let n = 0; n < streams.length; ++n) { + if (!isNodeStream(streams[n])) { + continue; + } + if (n < streams.length - 1 && !isReadable(streams[n])) { + throw new ERR_INVALID_ARG_VALUE( + `streams[${n}]`, + orgStreams[n], + "must be readable", + ); + } + if (n > 0 && !isWritable(streams[n])) { + throw new ERR_INVALID_ARG_VALUE( + `streams[${n}]`, + orgStreams[n], + "must be writable", + ); + } + } + let ondrain; + let onfinish; + let onreadable; + let onclose; + let d; + function onfinished(err) { + const cb = onclose; + onclose = null; + if (cb) { + cb(err); + } else if (err) { + d.destroy(err); + } else if (!readable && !writable) { + d.destroy(); + } + } + const head = streams[0]; + const tail = pipeline(streams, onfinished); + const writable = !!isWritable(head); + const readable = !!isReadable(tail); + d = new Duplex({ + // TODO (ronag): highWaterMark? + writableObjectMode: + !!(head !== null && head !== void 0 && head.writableObjectMode), + readableObjectMode: + !!(tail !== null && tail !== void 0 && tail.writableObjectMode), + writable, + readable, + }); + if (writable) { + d._write = function (chunk, encoding, callback) { + if (head.write(chunk, encoding)) { + callback(); + } else { + ondrain = callback; + } + }; + d._final = function (callback) { + head.end(); + onfinish = callback; + }; + head.on("drain", function () { + if (ondrain) { + const cb = ondrain; + ondrain = null; + cb(); + } + }); + tail.on("finish", function () { + if (onfinish) { + const cb = onfinish; + onfinish = null; + cb(); + } + }); + } + if (readable) { + tail.on("readable", function () { + if (onreadable) { + const cb = onreadable; + onreadable = null; + cb(); + } + }); + tail.on("end", function () { + d.push(null); + }); + d._read = function () { + while (true) { + const buf = tail.read(); + if (buf === null) { + onreadable = d._read; + return; + } + if (!d.push(buf)) { + return; + } + } + }; + } + d._destroy = function (err, callback) { + if (!err && onclose !== null) { + err = new AbortError(); + } + onreadable = null; + ondrain = null; + onfinish = null; + if (onclose === null) { + callback(err); + } else { + onclose = callback; + destroyer(tail, err); + } + }; + return d; + }; + }, +}); + +// lib/stream/promises.js +var require_promises = __commonJS({ + "lib/stream/promises.js"(exports, module) { + "use strict"; + var { ArrayPrototypePop, Promise: Promise2 } = require_primordials(); + var { isIterable, isNodeStream } = require_utils(); + var { pipelineImpl: pl } = require_pipeline(); + var { finished } = require_end_of_stream(); + function pipeline(...streams) { + return new Promise2((resolve, reject) => { + let signal; + let end; + const lastArg = streams[streams.length - 1]; + if ( + lastArg && typeof lastArg === "object" && !isNodeStream(lastArg) && + !isIterable(lastArg) + ) { + const options = ArrayPrototypePop(streams); + signal = options.signal; + end = options.end; + } + pl( + streams, + (err, value) => { + if (err) { + reject(err); + } else { + resolve(value); + } + }, + { + signal, + end, + }, + ); + }); + } + module.exports = { + finished, + pipeline, + }; + }, +}); + +// lib/stream.js +var require_stream = __commonJS({ + "lib/stream.js"(exports, module) { + var { Buffer: Buffer2 } = require_buffer(); + var { ObjectDefineProperty, ObjectKeys, ReflectApply } = + require_primordials(); + var { streamReturningOperators, promiseReturningOperators } = + require_operators(); + var compose = require_compose(); + var { pipeline } = require_pipeline(); + var { destroyer } = require_destroy(); + var eos = require_end_of_stream(); + var promises = require_promises(); + var utils = require_utils(); + var Stream = module.exports = require_legacy().Stream; + Stream.isDisturbed = utils.isDisturbed; + Stream.isErrored = utils.isErrored; + Stream.isReadable = utils.isReadable; + Stream.Readable = require_readable(); + for (const key of ObjectKeys(streamReturningOperators)) { + let fn2 = function (...args) { + if (new.target) { + throw ERR_ILLEGAL_CONSTRUCTOR(); + } + return Stream.Readable.from(ReflectApply(op, this, args)); + }; + fn = fn2; + const op = streamReturningOperators[key]; + ObjectDefineProperty(fn2, "name", { + __proto__: null, + value: op.name, + }); + ObjectDefineProperty(fn2, "length", { + __proto__: null, + value: op.length, + }); + ObjectDefineProperty(Stream.Readable.prototype, key, { + __proto__: null, + value: fn2, + enumerable: false, + configurable: true, + writable: true, + }); + } + var fn; + for (const key of ObjectKeys(promiseReturningOperators)) { + let fn2 = function (...args) { + if (new.target) { + throw ERR_ILLEGAL_CONSTRUCTOR(); + } + return ReflectApply(op, this, args); + }; + fn = fn2; + const op = promiseReturningOperators[key]; + ObjectDefineProperty(fn2, "name", { + __proto__: null, + value: op.name, + }); + ObjectDefineProperty(fn2, "length", { + __proto__: null, + value: op.length, + }); + ObjectDefineProperty(Stream.Readable.prototype, key, { + __proto__: null, + value: fn2, + enumerable: false, + configurable: true, + writable: true, + }); + } + var fn; + Stream.Writable = require_writable(); + Stream.Duplex = require_duplex(); + Stream.Transform = require_transform(); + Stream.PassThrough = require_passthrough(); + Stream.pipeline = pipeline; + var { addAbortSignal } = require_add_abort_signal(); + Stream.addAbortSignal = addAbortSignal; + Stream.finished = eos; + Stream.destroy = destroyer; + Stream.compose = compose; + ObjectDefineProperty(Stream, "promises", { + __proto__: null, + configurable: true, + enumerable: true, + get() { + return promises; + }, + }); + ObjectDefineProperty(pipeline, promisify, { + __proto__: null, + enumerable: true, + get() { + return promises.pipeline; + }, + }); + ObjectDefineProperty(eos, promisify, { + __proto__: null, + enumerable: true, + get() { + return promises.finished; + }, + }); + Stream.Stream = Stream; + Stream._isUint8Array = function isUint8Array(value) { + return value instanceof Uint8Array; + }; + Stream._uint8ArrayToBuffer = function _uint8ArrayToBuffer(chunk) { + return Buffer2.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); + }; + }, +}); /* End esm.sh bundle */ // The following code implements Readable.fromWeb(), Writable.fromWeb(), and @@ -16,12 +5713,6 @@ const __process$ = { nextTick };import __buffer$ from "ext:deno_node/buffer.ts"; // readable-stream module yet. This can be removed once the following upstream // issue is resolved: https://github.com/nodejs/readable-stream/issues/482 -import { - AbortError, - ERR_INVALID_ARG_TYPE, - ERR_INVALID_ARG_VALUE, - ERR_STREAM_PREMATURE_CLOSE, -} from "ext:deno_node/internal/errors.ts"; import { destroy } from "ext:deno_node/internal/streams/destroy.mjs"; import finished from "ext:deno_node/internal/streams/end-of-stream.mjs"; import { @@ -31,23 +5722,36 @@ import { isWritable, isWritableEnded, } from "ext:deno_node/internal/streams/utils.mjs"; -import { createDeferredPromise, kEmptyObject } from "ext:deno_node/internal/util.mjs"; -import { validateBoolean, validateObject } from "ext:deno_node/internal/validators.mjs"; +import { ReadableStream, WritableStream } from "ext:deno_node/stream/web.ts"; +import { + validateBoolean, + validateObject, +} from "ext:deno_node/internal/validators.mjs"; +const CustomStream = require_stream(); const process = __process$; const { Buffer } = __buffer$; -const Readable = Au; -const Writable = mu; -const Duplex = Tu; -function isReadableStream(object) { - return object instanceof ReadableStream; -} +export const Readable = CustomStream.Readable; +export const Writable = CustomStream.Writable; +export const Duplex = CustomStream.Duplex; +export const PassThrough = CustomStream.PassThrough; +export const Stream = CustomStream.Stream; +export const Transform = CustomStream.Transform; +export const _isUint8Array = CustomStream._isUint8Array; +export const _uint8ArrayToBuffer = CustomStream._uint8ArrayToBuffer; +export const addAbortSignal = CustomStream.addAbortSignal; +export const pipeline = CustomStream.pipeline; +export { finished }; function isWritableStream(object) { return object instanceof WritableStream; } +function isReadableStream(object) { + return object instanceof ReadableStream; +} + Readable.fromWeb = function ( readableStream, options = kEmptyObject, diff --git a/ext/node/polyfills/internal/errors.ts b/ext/node/polyfills/internal/errors.ts index 1894d8e24a..44aba4d161 100644 --- a/ext/node/polyfills/internal/errors.ts +++ b/ext/node/polyfills/internal/errors.ts @@ -13,7 +13,7 @@ * ERR_INVALID_PACKAGE_CONFIG // package.json stuff, probably useless */ -import { inspect } from "ext:deno_node/internal/util/inspect.mjs"; +import { format, inspect } from "ext:deno_node/internal/util/inspect.mjs"; import { codes } from "ext:deno_node/internal/error_codes.ts"; import { codeMap, @@ -2065,7 +2065,7 @@ export class ERR_UNKNOWN_CREDENTIAL extends NodeError { } export class ERR_UNKNOWN_ENCODING extends NodeTypeError { constructor(x: string) { - super("ERR_UNKNOWN_ENCODING", `Unknown encoding: ${x}`); + super("ERR_UNKNOWN_ENCODING", format("Unknown encoding: %s", x)); } } export class ERR_UNKNOWN_FILE_EXTENSION extends NodeTypeError { diff --git a/ext/node/polyfills/stream/promises.mjs b/ext/node/polyfills/stream/promises.mjs index 69ba7fd0c1..98fe38e0a4 100644 --- a/ext/node/polyfills/stream/promises.mjs +++ b/ext/node/polyfills/stream/promises.mjs @@ -1,9 +1,7 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. // Copyright Joyent and Node contributors. All rights reserved. MIT license. -import stream from "ext:deno_node/_stream.mjs"; - -const { finished, pipeline } = stream.promises; +import { finished, pipeline } from "ext:deno_node/_stream.mjs"; export default { finished, From 234cef982c12a6c46aa8ba3787920f7b9a856be3 Mon Sep 17 00:00:00 2001 From: Matt Mastracci Date: Wed, 10 May 2023 13:23:14 +0200 Subject: [PATCH 142/320] feat(ext/http): Automatic compression for Deno.serve (#19031) `Content-Encoding: gzip` support for `Deno.serve`. This doesn't support Brotli (`br`) yet, however it should not be difficult to add. Heuristics for compression are modelled after those in `Deno.serveHttp`. Tests are provided to ensure that the gzip compression is correct. We chunk a number of different streams (zeros, hard-to-compress data, already-gzipped data) in a number of different ways (regular, random, large/small, small/large). --- Cargo.lock | 1 + cli/tests/unit/serve_test.ts | 216 ++++++++----- ext/http/Cargo.toml | 1 + ext/http/http_next.rs | 178 ++++++++-- ext/http/response_body.rs | 607 +++++++++++++++++++++++++++++++---- 5 files changed, 847 insertions(+), 156 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 01cadaa171..4fb2686277 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1046,6 +1046,7 @@ dependencies = [ "percent-encoding", "phf", "pin-project", + "rand", "ring", "serde", "slab", diff --git a/cli/tests/unit/serve_test.ts b/cli/tests/unit/serve_test.ts index ce7267f580..2bd2314b73 100644 --- a/cli/tests/unit/serve_test.ts +++ b/cli/tests/unit/serve_test.ts @@ -1425,41 +1425,6 @@ Deno.test( }, ); -Deno.test( - { permissions: { net: true, write: true, read: true } }, - async function httpServerCorrectSizeResponse() { - const promise = deferred(); - const listeningPromise = deferred(); - const ac = new AbortController(); - - const tmpFile = await Deno.makeTempFile(); - const file = await Deno.open(tmpFile, { write: true, read: true }); - await file.write(new Uint8Array(70 * 1024).fill(1)); // 70kb sent in 64kb + 6kb chunks - file.close(); - - const server = Deno.serve({ - handler: async (request) => { - const f = await Deno.open(tmpFile, { read: true }); - promise.resolve(); - return new Response(f.readable); - }, - port: 4503, - signal: ac.signal, - onListen: onListen(listeningPromise), - onError: createOnErrorCb(ac), - }); - - await listeningPromise; - const resp = await fetch("http://127.0.0.1:4503/"); - await promise; - const body = await resp.arrayBuffer(); - - assertEquals(body.byteLength, 70 * 1024); - ac.abort(); - await server; - }, -); - // https://github.com/denoland/deno/issues/12741 // https://github.com/denoland/deno/pull/12746 // https://github.com/denoland/deno/pull/12798 @@ -2012,38 +1977,146 @@ Deno.test( }, ); -Deno.test( - { permissions: { net: true, write: true, read: true } }, - async function httpServerSendFile() { - const promise = deferred(); - const ac = new AbortController(); - const listeningPromise = deferred(); - const tmpFile = await Deno.makeTempFile(); - const file = await Deno.open(tmpFile, { write: true, read: true }); - const data = new Uint8Array(70 * 1024).fill(1); - await file.write(data); - file.close(); - const server = Deno.serve({ - handler: async () => { - const f = await Deno.open(tmpFile, { read: true }); - promise.resolve(); - return new Response(f.readable, { status: 200 }); - }, - port: 4503, - signal: ac.signal, - onListen: onListen(listeningPromise), - onError: createOnErrorCb(ac), - }); +function makeTempData(size: number) { + return new Uint8Array(size).fill(1); +} - await listeningPromise; - const response = await fetch(`http://localhost:4503/`); - assertEquals(response.status, 200); - await promise; - assertEquals(new Uint8Array(await response.arrayBuffer()), data); - ac.abort(); - await server; +async function makeTempFile(size: number) { + const tmpFile = await Deno.makeTempFile(); + const file = await Deno.open(tmpFile, { write: true, read: true }); + const data = makeTempData(size); + await file.write(data); + file.close(); + + return await Deno.open(tmpFile, { write: true, read: true }); +} + +const compressionTestCases = [ + { name: "Empty", length: 0, in: {}, out: {}, expect: null }, + { + name: "EmptyAcceptGzip", + length: 0, + in: { "Accept-Encoding": "gzip" }, + out: {}, + expect: null, }, -); + // This technically would be compressible if not for the size, however the size_hint is not implemented + // for FileResource and we don't currently peek ahead on resources. + // { + // name: "EmptyAcceptGzip2", + // length: 0, + // in: { "Accept-Encoding": "gzip" }, + // out: { "Content-Type": "text/plain" }, + // expect: null, + // }, + { name: "Uncompressible", length: 1024, in: {}, out: {}, expect: null }, + { + name: "UncompressibleAcceptGzip", + length: 1024, + in: { "Accept-Encoding": "gzip" }, + out: {}, + expect: null, + }, + { + name: "UncompressibleType", + length: 1024, + in: { "Accept-Encoding": "gzip" }, + out: { "Content-Type": "text/fake" }, + expect: null, + }, + { + name: "CompressibleType", + length: 1024, + in: { "Accept-Encoding": "gzip" }, + out: { "Content-Type": "text/plain" }, + expect: "gzip", + }, + { + name: "CompressibleType2", + length: 1024, + in: { "Accept-Encoding": "gzip, deflate, br" }, + out: { "Content-Type": "text/plain" }, + expect: "gzip", + }, + { + name: "UncompressibleRange", + length: 1024, + in: { "Accept-Encoding": "gzip" }, + out: { "Content-Type": "text/plain", "Content-Range": "1" }, + expect: null, + }, + { + name: "UncompressibleCE", + length: 1024, + in: { "Accept-Encoding": "gzip" }, + out: { "Content-Type": "text/plain", "Content-Encoding": "random" }, + expect: null, + }, + { + name: "UncompressibleCC", + length: 1024, + in: { "Accept-Encoding": "gzip" }, + out: { "Content-Type": "text/plain", "Cache-Control": "no-transform" }, + expect: null, + }, +]; + +for (const testCase of compressionTestCases) { + const name = `httpServerCompression${testCase.name}`; + Deno.test( + { permissions: { net: true, write: true, read: true } }, + { + [name]: async function () { + const promise = deferred(); + const ac = new AbortController(); + const listeningPromise = deferred(); + const server = Deno.serve({ + handler: async (request) => { + const f = await makeTempFile(testCase.length); + promise.resolve(); + const headers = testCase.out as any; + headers["Content-Length"] = testCase.length.toString(); + return new Response(f.readable, { + headers: headers as HeadersInit, + }); + }, + port: 4503, + signal: ac.signal, + onListen: onListen(listeningPromise), + onError: createOnErrorCb(ac), + }); + try { + await listeningPromise; + const resp = await fetch("http://127.0.0.1:4503/", { + headers: testCase.in as HeadersInit, + }); + await promise; + const body = await resp.arrayBuffer(); + if (testCase.expect == null) { + assertEquals(body.byteLength, testCase.length); + assertEquals( + resp.headers.get("content-length"), + testCase.length.toString(), + ); + assertEquals( + resp.headers.get("content-encoding"), + testCase.out["Content-Encoding"] || null, + ); + } else if (testCase.expect == "gzip") { + // Note the fetch will transparently decompress this response, BUT we can detect that a response + // was compressed by the lack of a content length. + assertEquals(body.byteLength, testCase.length); + assertEquals(resp.headers.get("content-encoding"), null); + assertEquals(resp.headers.get("content-length"), null); + } + } finally { + ac.abort(); + await server; + } + }, + }[name], + ); +} Deno.test( { permissions: { net: true, write: true, read: true } }, @@ -2052,15 +2125,12 @@ Deno.test( const ac = new AbortController(); const listeningPromise = deferred(); - const tmpFile = await Deno.makeTempFile(); - const file = await Deno.open(tmpFile, { write: true, read: true }); - const data = new Uint8Array(70 * 1024).fill(1); - await file.write(data); - file.close(); - const server = Deno.serve({ handler: async (request) => { - assertEquals(new Uint8Array(await request.arrayBuffer()), data); + assertEquals( + new Uint8Array(await request.arrayBuffer()), + makeTempData(70 * 1024), + ); promise.resolve(); return new Response("ok"); }, @@ -2071,7 +2141,7 @@ Deno.test( }); await listeningPromise; - const f = await Deno.open(tmpFile, { write: true, read: true }); + const f = await makeTempFile(70 * 1024); const response = await fetch(`http://localhost:4503/`, { method: "POST", body: f.readable, diff --git a/ext/http/Cargo.toml b/ext/http/Cargo.toml index 9691879ad6..c1de811705 100644 --- a/ext/http/Cargo.toml +++ b/ext/http/Cargo.toml @@ -50,3 +50,4 @@ tokio-util = { workspace = true, features = ["io"] } [dev-dependencies] bencher.workspace = true +rand.workspace = true diff --git a/ext/http/http_next.rs b/ext/http/http_next.rs index f3d37f7516..080cfea6c3 100644 --- a/ext/http/http_next.rs +++ b/ext/http/http_next.rs @@ -1,3 +1,4 @@ +use crate::compressible::is_content_compressible; // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. use crate::extract_network_stream; use crate::network_buffered_stream::NetworkStreamPrefixCheck; @@ -7,17 +8,18 @@ use crate::request_properties::HttpConnectionProperties; use crate::request_properties::HttpListenProperties; use crate::request_properties::HttpPropertyExtractor; use crate::response_body::CompletionHandle; +use crate::response_body::Compression; use crate::response_body::ResponseBytes; use crate::response_body::ResponseBytesInner; use crate::response_body::V8StreamHttpResponseBody; use crate::websocket_upgrade::WebSocketUpgrade; use crate::LocalExecutor; +use cache_control::CacheControl; use deno_core::error::AnyError; use deno_core::futures::TryFutureExt; use deno_core::op; use deno_core::AsyncRefCell; use deno_core::AsyncResult; -use deno_core::BufView; use deno_core::ByteString; use deno_core::CancelFuture; use deno_core::CancelHandle; @@ -31,7 +33,15 @@ use deno_net::ops_tls::TlsStream; use deno_net::raw::put_network_stream_resource; use deno_net::raw::NetworkStream; use deno_net::raw::NetworkStreamAddress; +use fly_accept_encoding::Encoding; +use http::header::ACCEPT_ENCODING; +use http::header::CACHE_CONTROL; +use http::header::CONTENT_ENCODING; +use http::header::CONTENT_LENGTH; +use http::header::CONTENT_RANGE; +use http::header::CONTENT_TYPE; use http::request::Parts; +use http::HeaderMap; use hyper1::body::Incoming; use hyper1::header::COOKIE; use hyper1::http::HeaderName; @@ -483,6 +493,131 @@ pub fn op_http_set_response_headers( }) } +fn is_request_compressible(headers: &HeaderMap) -> Compression { + let Some(accept_encoding) = headers.get(ACCEPT_ENCODING) else { + return Compression::None; + }; + // Firefox and Chrome send this -- no need to parse + if accept_encoding == "gzip, deflate, br" { + return Compression::GZip; + } + if accept_encoding == "gzip" { + return Compression::GZip; + } + // Fall back to the expensive parser + let accepted = fly_accept_encoding::encodings_iter(headers).filter(|r| { + matches!(r, Ok((Some(Encoding::Identity | Encoding::Gzip), _))) + }); + #[allow(clippy::single_match)] + match fly_accept_encoding::preferred(accepted) { + Ok(Some(fly_accept_encoding::Encoding::Gzip)) => return Compression::GZip, + _ => {} + } + Compression::None +} + +fn is_response_compressible(headers: &HeaderMap) -> bool { + if let Some(content_type) = headers.get(CONTENT_TYPE) { + if !is_content_compressible(content_type) { + return false; + } + } else { + return false; + } + if headers.contains_key(CONTENT_ENCODING) { + return false; + } + if headers.contains_key(CONTENT_RANGE) { + return false; + } + if let Some(cache_control) = headers.get(CACHE_CONTROL) { + if let Ok(s) = std::str::from_utf8(cache_control.as_bytes()) { + if let Some(cache_control) = CacheControl::from_value(s) { + if cache_control.no_transform { + return false; + } + } + } + } + true +} + +fn modify_compressibility_from_response( + compression: Compression, + length: Option, + headers: &mut HeaderMap, +) -> Compression { + ensure_vary_accept_encoding(headers); + if let Some(length) = length { + // By the time we add compression headers and Accept-Encoding, it probably doesn't make sense + // to compress stuff that's smaller than this. + if length < 64 { + return Compression::None; + } + } + if compression == Compression::None { + return Compression::None; + } + if !is_response_compressible(headers) { + return Compression::None; + } + weaken_etag(headers); + headers.remove(CONTENT_LENGTH); + headers.insert(CONTENT_ENCODING, HeaderValue::from_static("gzip")); + compression +} + +/// If the user provided a ETag header for uncompressed data, we need to ensure it is a +/// weak Etag header ("W/"). +fn weaken_etag(hmap: &mut HeaderMap) { + if let Some(etag) = hmap.get_mut(hyper::header::ETAG) { + if !etag.as_bytes().starts_with(b"W/") { + let mut v = Vec::with_capacity(etag.as_bytes().len() + 2); + v.extend(b"W/"); + v.extend(etag.as_bytes()); + *etag = v.try_into().unwrap(); + } + } +} + +// Set Vary: Accept-Encoding header for direct body response. +// Note: we set the header irrespective of whether or not we compress the data +// to make sure cache services do not serve uncompressed data to clients that +// support compression. +fn ensure_vary_accept_encoding(hmap: &mut HeaderMap) { + if let Some(v) = hmap.get_mut(hyper::header::VARY) { + if let Ok(s) = v.to_str() { + if !s.to_lowercase().contains("accept-encoding") { + *v = format!("Accept-Encoding, {s}").try_into().unwrap() + } + return; + } + } + hmap.insert( + hyper::header::VARY, + HeaderValue::from_static("Accept-Encoding"), + ); +} + +fn set_response( + index: u32, + length: Option, + response_fn: impl FnOnce(Compression) -> ResponseBytesInner, +) { + let compression = + with_req(index, |req| is_request_compressible(&req.headers)); + + with_resp_mut(index, move |response| { + let response = response.as_mut().unwrap(); + let compression = modify_compressibility_from_response( + compression, + length, + response.headers_mut(), + ); + response.body_mut().initialize(response_fn(compression)) + }); +} + #[op(fast)] pub fn op_http_set_response_body_resource( state: &mut OpState, @@ -497,14 +632,13 @@ pub fn op_http_set_response_body_resource( state.resource_table.get_any(stream_rid)? }; - with_resp_mut(index, move |response| { - let future = resource.clone().read(64 * 1024); - response - .as_mut() - .unwrap() - .body_mut() - .initialize(ResponseBytesInner::Resource(auto_close, resource, future)); - }); + set_response( + index, + resource.size_hint().1.map(|s| s as usize), + move |compression| { + ResponseBytesInner::from_resource(compression, resource, auto_close) + }, + ); Ok(()) } @@ -516,27 +650,19 @@ pub fn op_http_set_response_body_stream( ) -> Result { // TODO(mmastrac): what should this channel size be? let (tx, rx) = tokio::sync::mpsc::channel(1); - let (tx, rx) = ( - V8StreamHttpResponseBody::new(tx), - ResponseBytesInner::V8Stream(rx), - ); - with_resp_mut(index, move |response| { - response.as_mut().unwrap().body_mut().initialize(rx); + set_response(index, None, |compression| { + ResponseBytesInner::from_v8(compression, rx) }); - Ok(state.resource_table.add(tx)) + Ok(state.resource_table.add(V8StreamHttpResponseBody::new(tx))) } #[op(fast)] pub fn op_http_set_response_body_text(index: u32, text: String) { if !text.is_empty() { - with_resp_mut(index, move |response| { - response - .as_mut() - .unwrap() - .body_mut() - .initialize(ResponseBytesInner::Bytes(BufView::from(text.into_bytes()))) + set_response(index, Some(text.len()), |compression| { + ResponseBytesInner::from_vec(compression, text.into_bytes()) }); } } @@ -544,12 +670,8 @@ pub fn op_http_set_response_body_text(index: u32, text: String) { #[op(fast)] pub fn op_http_set_response_body_bytes(index: u32, buffer: &[u8]) { if !buffer.is_empty() { - with_resp_mut(index, |response| { - response - .as_mut() - .unwrap() - .body_mut() - .initialize(ResponseBytesInner::Bytes(BufView::from(buffer.to_vec()))) + set_response(index, Some(buffer.len()), |compression| { + ResponseBytesInner::from_slice(compression, buffer) }); }; } diff --git a/ext/http/response_body.rs b/ext/http/response_body.rs index 0086e4d782..288d747584 100644 --- a/ext/http/response_body.rs +++ b/ext/http/response_body.rs @@ -2,12 +2,16 @@ use std::borrow::Cow; use std::cell::RefCell; use std::future::Future; +use std::io::Write; use std::pin::Pin; use std::rc::Rc; use std::task::Waker; +use bytes::Bytes; +use bytes::BytesMut; use deno_core::error::bad_resource; use deno_core::error::AnyError; +use deno_core::futures::ready; use deno_core::futures::FutureExt; use deno_core::AsyncRefCell; use deno_core::AsyncResult; @@ -17,9 +21,44 @@ use deno_core::CancelTryFuture; use deno_core::RcRef; use deno_core::Resource; use deno_core::WriteOutcome; +use flate2::write::GzEncoder; +use http::HeaderMap; use hyper1::body::Body; use hyper1::body::Frame; use hyper1::body::SizeHint; +use pin_project::pin_project; + +/// Simplification for nested types we use for our streams. We provide a way to convert from +/// this type into Hyper's body [`Frame`]. +enum ResponseStreamResult { + /// Stream is over. + EndOfStream, + /// Stream provided non-empty data. + NonEmptyBuf(BufView), + /// Stream is ready, but provided no data. Retry. This is a result that is like Pending, but does + /// not register a waker and should be called again at the lowest level of this code. Generally this + /// will only be returned from compression streams that require additional buffering. + NoData, + /// Stream provided trailers. + // TODO(mmastrac): We are threading trailers through the response system to eventually support Grpc. + #[allow(unused)] + Trailers(HeaderMap), + /// Stream failed. + Error(AnyError), +} + +impl From for Option, AnyError>> { + fn from(value: ResponseStreamResult) -> Self { + match value { + ResponseStreamResult::EndOfStream => None, + ResponseStreamResult::NonEmptyBuf(buf) => Some(Ok(Frame::data(buf))), + ResponseStreamResult::Error(err) => Some(Err(err)), + ResponseStreamResult::Trailers(map) => Some(Ok(Frame::trailers(map))), + // This result should be handled by retrying + ResponseStreamResult::NoData => unimplemented!(), + } + } +} #[derive(Clone, Debug, Default)] pub struct CompletionHandle { @@ -62,6 +101,28 @@ impl Future for CompletionHandle { } } +trait PollFrame: Unpin { + fn poll_frame( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll; + + fn size_hint(&self) -> SizeHint; +} + +#[derive(PartialEq, Eq)] +pub enum Compression { + None, + GZip, +} + +pub enum ResponseStream { + /// A resource stream, piped in fast mode. + Resource(ResourceBodyAdapter), + /// A JS-backed stream, written in JS and transported via pipe. + V8Stream(tokio::sync::mpsc::Receiver), +} + #[derive(Default)] pub enum ResponseBytesInner { /// An empty stream. @@ -69,12 +130,12 @@ pub enum ResponseBytesInner { Empty, /// A completed stream. Done, - /// A static buffer of bytes, sent it one fell swoop. + /// A static buffer of bytes, sent in one fell swoop. Bytes(BufView), - /// A resource stream, piped in fast mode. - Resource(bool, Rc, AsyncResult), - /// A JS-backed stream, written in JS and transported via pipe. - V8Stream(tokio::sync::mpsc::Receiver), + /// An uncompressed stream. + UncompressedStream(ResponseStream), + /// A GZip stream. + GZipStream(GZipResponseStream), } impl std::fmt::Debug for ResponseBytesInner { @@ -83,8 +144,8 @@ impl std::fmt::Debug for ResponseBytesInner { Self::Done => f.write_str("Done"), Self::Empty => f.write_str("Empty"), Self::Bytes(..) => f.write_str("Bytes"), - Self::Resource(..) => f.write_str("Resource"), - Self::V8Stream(..) => f.write_str("V8Stream"), + Self::UncompressedStream(..) => f.write_str("Uncompressed"), + Self::GZipStream(..) => f.write_str("GZip"), } } } @@ -122,16 +183,54 @@ impl ResponseBytesInner { Self::Done => SizeHint::with_exact(0), Self::Empty => SizeHint::with_exact(0), Self::Bytes(bytes) => SizeHint::with_exact(bytes.len() as u64), - Self::Resource(_, res, _) => { - let hint = res.size_hint(); - let mut size_hint = SizeHint::new(); - size_hint.set_lower(hint.0); - if let Some(upper) = hint.1 { - size_hint.set_upper(upper) - } - size_hint - } - Self::V8Stream(..) => SizeHint::default(), + Self::UncompressedStream(res) => res.size_hint(), + Self::GZipStream(..) => SizeHint::default(), + } + } + + fn from_stream(compression: Compression, stream: ResponseStream) -> Self { + if compression == Compression::GZip { + Self::GZipStream(GZipResponseStream::new(stream)) + } else { + Self::UncompressedStream(stream) + } + } + + pub fn from_v8( + compression: Compression, + rx: tokio::sync::mpsc::Receiver, + ) -> Self { + Self::from_stream(compression, ResponseStream::V8Stream(rx)) + } + + pub fn from_resource( + compression: Compression, + stm: Rc, + auto_close: bool, + ) -> Self { + Self::from_stream( + compression, + ResponseStream::Resource(ResourceBodyAdapter::new(stm, auto_close)), + ) + } + + pub fn from_slice(compression: Compression, bytes: &[u8]) -> Self { + if compression == Compression::GZip { + let mut writer = GzEncoder::new(Vec::new(), flate2::Compression::fast()); + writer.write_all(bytes).unwrap(); + Self::Bytes(BufView::from(writer.finish().unwrap())) + } else { + Self::Bytes(BufView::from(bytes.to_vec())) + } + } + + pub fn from_vec(compression: Compression, vec: Vec) -> Self { + if compression == Compression::GZip { + let mut writer = GzEncoder::new(Vec::new(), flate2::Compression::fast()); + writer.write_all(&vec).unwrap(); + Self::Bytes(BufView::from(writer.finish().unwrap())) + } else { + Self::Bytes(BufView::from(vec)) } } } @@ -144,48 +243,33 @@ impl Body for ResponseBytes { mut self: Pin<&mut Self>, cx: &mut std::task::Context<'_>, ) -> std::task::Poll, Self::Error>>> { - match &mut self.0 { - ResponseBytesInner::Done | ResponseBytesInner::Empty => { - unreachable!() - } - ResponseBytesInner::Bytes(..) => { - if let ResponseBytesInner::Bytes(data) = self.complete(true) { - std::task::Poll::Ready(Some(Ok(Frame::data(data)))) - } else { + let res = loop { + let res = match &mut self.0 { + ResponseBytesInner::Done | ResponseBytesInner::Empty => { unreachable!() } + ResponseBytesInner::Bytes(..) => { + let ResponseBytesInner::Bytes(data) = self.complete(true) else { unreachable!(); }; + return std::task::Poll::Ready(Some(Ok(Frame::data(data)))); + } + ResponseBytesInner::UncompressedStream(stm) => { + ready!(Pin::new(stm).poll_frame(cx)) + } + ResponseBytesInner::GZipStream(stm) => { + ready!(Pin::new(stm).poll_frame(cx)) + } + }; + // This is where we retry the NoData response + if matches!(res, ResponseStreamResult::NoData) { + continue; } - ResponseBytesInner::Resource(auto_close, stm, ref mut future) => { - match future.poll_unpin(cx) { - std::task::Poll::Pending => std::task::Poll::Pending, - std::task::Poll::Ready(Err(err)) => { - std::task::Poll::Ready(Some(Err(err))) - } - std::task::Poll::Ready(Ok(buf)) => { - if buf.is_empty() { - if *auto_close { - stm.clone().close(); - } - self.complete(true); - return std::task::Poll::Ready(None); - } - // Re-arm the future - *future = stm.clone().read(64 * 1024); - std::task::Poll::Ready(Some(Ok(Frame::data(buf)))) - } - } - } - ResponseBytesInner::V8Stream(stm) => match stm.poll_recv(cx) { - std::task::Poll::Pending => std::task::Poll::Pending, - std::task::Poll::Ready(Some(buf)) => { - std::task::Poll::Ready(Some(Ok(Frame::data(buf)))) - } - std::task::Poll::Ready(None) => { - self.complete(true); - std::task::Poll::Ready(None) - } - }, + break res; + }; + + if matches!(res, ResponseStreamResult::EndOfStream) { + self.complete(true); } + std::task::Poll::Ready(res.into()) } fn is_end_stream(&self) -> bool { @@ -206,6 +290,243 @@ impl Drop for ResponseBytes { } } +pub struct ResourceBodyAdapter { + auto_close: bool, + stm: Rc, + future: AsyncResult, +} + +impl ResourceBodyAdapter { + pub fn new(stm: Rc, auto_close: bool) -> Self { + let future = stm.clone().read(64 * 1024); + ResourceBodyAdapter { + auto_close, + stm, + future, + } + } +} + +impl PollFrame for ResponseStream { + fn poll_frame( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll { + match &mut *self { + ResponseStream::Resource(res) => Pin::new(res).poll_frame(cx), + ResponseStream::V8Stream(res) => Pin::new(res).poll_frame(cx), + } + } + + fn size_hint(&self) -> SizeHint { + match self { + ResponseStream::Resource(res) => res.size_hint(), + ResponseStream::V8Stream(res) => res.size_hint(), + } + } +} + +impl PollFrame for ResourceBodyAdapter { + fn poll_frame( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll { + let res = match ready!(self.future.poll_unpin(cx)) { + Err(err) => ResponseStreamResult::Error(err), + Ok(buf) => { + if buf.is_empty() { + if self.auto_close { + self.stm.clone().close(); + } + ResponseStreamResult::EndOfStream + } else { + // Re-arm the future + self.future = self.stm.clone().read(64 * 1024); + ResponseStreamResult::NonEmptyBuf(buf) + } + } + }; + std::task::Poll::Ready(res) + } + + fn size_hint(&self) -> SizeHint { + let hint = self.stm.size_hint(); + let mut size_hint = SizeHint::new(); + size_hint.set_lower(hint.0); + if let Some(upper) = hint.1 { + size_hint.set_upper(upper) + } + size_hint + } +} + +impl PollFrame for tokio::sync::mpsc::Receiver { + fn poll_frame( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll { + let res = match ready!(self.poll_recv(cx)) { + Some(buf) => ResponseStreamResult::NonEmptyBuf(buf), + None => ResponseStreamResult::EndOfStream, + }; + std::task::Poll::Ready(res) + } + + fn size_hint(&self) -> SizeHint { + SizeHint::default() + } +} + +#[derive(Copy, Clone, Debug)] +enum GZipState { + Header, + Streaming, + Flushing, + Trailer, + EndOfStream, +} + +#[pin_project] +pub struct GZipResponseStream { + stm: flate2::Compress, + crc: flate2::Crc, + next_buf: Option, + partial: Option, + #[pin] + underlying: ResponseStream, + state: GZipState, +} + +impl GZipResponseStream { + pub fn new(underlying: ResponseStream) -> Self { + Self { + stm: flate2::Compress::new(flate2::Compression::fast(), false), + crc: flate2::Crc::new(), + next_buf: None, + partial: None, + state: GZipState::Header, + underlying, + } + } +} + +/// This is a minimal GZip header suitable for serving data from a webserver. We don't need to provide +/// most of the information. We're skipping header name, CRC, etc, and providing a null timestamp. +/// +/// We're using compression level 1, as higher levels don't produce significant size differences. This +/// is probably the reason why nginx's default gzip compression level is also 1: +/// +/// https://nginx.org/en/docs/http/ngx_http_gzip_module.html#gzip_comp_level +static GZIP_HEADER: Bytes = + Bytes::from_static(&[0x1f, 0x8b, 0x08, 0, 0, 0, 0, 0, 0x01, 0xff]); + +impl PollFrame for GZipResponseStream { + fn poll_frame( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll { + let this = self.get_mut(); + let state = &mut this.state; + let orig_state = *state; + let frame = match *state { + GZipState::EndOfStream => { + return std::task::Poll::Ready(ResponseStreamResult::EndOfStream) + } + GZipState::Header => { + *state = GZipState::Streaming; + return std::task::Poll::Ready(ResponseStreamResult::NonEmptyBuf( + BufView::from(GZIP_HEADER.clone()), + )); + } + GZipState::Trailer => { + *state = GZipState::EndOfStream; + let mut v = Vec::with_capacity(8); + v.extend(&this.crc.sum().to_le_bytes()); + v.extend(&this.crc.amount().to_le_bytes()); + return std::task::Poll::Ready(ResponseStreamResult::NonEmptyBuf( + BufView::from(v), + )); + } + GZipState::Streaming => { + if let Some(partial) = this.partial.take() { + ResponseStreamResult::NonEmptyBuf(partial) + } else { + ready!(Pin::new(&mut this.underlying).poll_frame(cx)) + } + } + GZipState::Flushing => ResponseStreamResult::EndOfStream, + }; + + let stm = &mut this.stm; + + // Ideally we could use MaybeUninit here, but flate2 requires &[u8]. We should also try + // to dynamically adjust this buffer. + let mut buf = this + .next_buf + .take() + .unwrap_or_else(|| BytesMut::zeroed(64 * 1024)); + + let start_in = stm.total_in(); + let start_out = stm.total_out(); + let res = match frame { + // Short-circuit these and just return + x @ (ResponseStreamResult::NoData + | ResponseStreamResult::Error(..) + | ResponseStreamResult::Trailers(..)) => { + return std::task::Poll::Ready(x) + } + ResponseStreamResult::EndOfStream => { + *state = GZipState::Flushing; + stm.compress(&[], &mut buf, flate2::FlushCompress::Finish) + } + ResponseStreamResult::NonEmptyBuf(mut input) => { + let res = stm.compress(&input, &mut buf, flate2::FlushCompress::None); + let len_in = (stm.total_in() - start_in) as usize; + debug_assert!(len_in <= input.len()); + this.crc.update(&input[..len_in]); + if len_in < input.len() { + input.advance_cursor(len_in); + this.partial = Some(input); + } + res + } + }; + let len = stm.total_out() - start_out; + let res = match res { + Err(err) => ResponseStreamResult::Error(err.into()), + Ok(flate2::Status::BufError) => { + // This should not happen + unreachable!("old={orig_state:?} new={state:?} buf_len={}", buf.len()); + } + Ok(flate2::Status::Ok) => { + if len == 0 { + this.next_buf = Some(buf); + ResponseStreamResult::NoData + } else { + buf.truncate(len as usize); + ResponseStreamResult::NonEmptyBuf(BufView::from(buf.freeze())) + } + } + Ok(flate2::Status::StreamEnd) => { + *state = GZipState::Trailer; + if len == 0 { + this.next_buf = Some(buf); + ResponseStreamResult::NoData + } else { + buf.truncate(len as usize); + ResponseStreamResult::NonEmptyBuf(BufView::from(buf.freeze())) + } + } + }; + + std::task::Poll::Ready(res) + } + + fn size_hint(&self) -> SizeHint { + SizeHint::default() + } +} + /// A response body object that can be passed to V8. This body will feed byte buffers to a channel which /// feed's hyper's HTTP response. pub struct V8StreamHttpResponseBody( @@ -251,3 +572,179 @@ impl Resource for V8StreamHttpResponseBody { self.1.cancel(); } } + +#[cfg(test)] +mod tests { + use super::*; + use deno_core::futures::future::poll_fn; + use std::hash::Hasher; + use std::io::Read; + use std::io::Write; + + fn zeros() -> Vec { + vec![0; 1024 * 1024] + } + + fn hard_to_gzip_data() -> Vec { + const SIZE: usize = 1024 * 1024; + let mut v = Vec::with_capacity(SIZE); + let mut hasher = std::collections::hash_map::DefaultHasher::new(); + for i in 0..SIZE { + hasher.write_usize(i); + v.push(hasher.finish() as u8); + } + v + } + + fn already_gzipped_data() -> Vec { + let mut v = Vec::with_capacity(1024 * 1024); + let mut gz = + flate2::GzBuilder::new().write(&mut v, flate2::Compression::best()); + gz.write_all(&hard_to_gzip_data()).unwrap(); + _ = gz.finish().unwrap(); + v + } + + fn chunk(v: Vec) -> impl Iterator> { + // Chunk the data into 10k + let mut out = vec![]; + for v in v.chunks(10 * 1024) { + out.push(v.to_vec()); + } + out.into_iter() + } + + fn random(mut v: Vec) -> impl Iterator> { + let mut out = vec![]; + loop { + if v.is_empty() { + break; + } + let rand = (rand::random::() % v.len()) + 1; + let new = v.split_off(rand); + out.push(v); + v = new; + } + // Print the lengths of the vectors if we actually fail this test at some point + let lengths = out.iter().map(|v| v.len()).collect::>(); + eprintln!("Lengths = {:?}", lengths); + out.into_iter() + } + + fn front_load(mut v: Vec) -> impl Iterator> { + // Chunk the data at 90% + let offset = (v.len() * 90) / 100; + let v2 = v.split_off(offset); + vec![v, v2].into_iter() + } + + fn front_load_but_one(mut v: Vec) -> impl Iterator> { + let offset = v.len() - 1; + let v2 = v.split_off(offset); + vec![v, v2].into_iter() + } + + fn back_load(mut v: Vec) -> impl Iterator> { + // Chunk the data at 10% + let offset = (v.len() * 10) / 100; + let v2 = v.split_off(offset); + vec![v, v2].into_iter() + } + + async fn test(i: impl Iterator> + Send + 'static) { + let v = i.collect::>(); + let mut expected: Vec = vec![]; + for v in &v { + expected.extend(v); + } + let (tx, rx) = tokio::sync::mpsc::channel(1); + let underlying = ResponseStream::V8Stream(rx); + let mut resp = GZipResponseStream::new(underlying); + let handle = tokio::task::spawn(async move { + for chunk in v { + tx.send(chunk.into()).await.ok().unwrap(); + } + }); + // Limit how many times we'll loop + const LIMIT: usize = 1000; + let mut v: Vec = vec![]; + for i in 0..=LIMIT { + assert_ne!(i, LIMIT); + let frame = poll_fn(|cx| Pin::new(&mut resp).poll_frame(cx)).await; + if matches!(frame, ResponseStreamResult::EndOfStream) { + break; + } + if matches!(frame, ResponseStreamResult::NoData) { + continue; + } + let ResponseStreamResult::NonEmptyBuf(buf) = frame else { + panic!("Unexpected stream type"); + }; + assert_ne!(buf.len(), 0); + v.extend(&*buf); + } + + let mut gz = flate2::read::GzDecoder::new(&*v); + let mut v = vec![]; + gz.read_to_end(&mut v).unwrap(); + + assert_eq!(v, expected); + + handle.await.unwrap(); + } + + #[tokio::test] + async fn test_simple() { + test(vec![b"hello world".to_vec()].into_iter()).await + } + + #[tokio::test] + async fn test_empty() { + test(vec![].into_iter()).await + } + + #[tokio::test] + async fn test_simple_zeros() { + test(vec![vec![0; 0x10000]].into_iter()).await + } + + macro_rules! test { + ($vec:ident) => { + mod $vec { + #[tokio::test] + async fn chunk() { + let iter = super::chunk(super::$vec()); + super::test(iter).await; + } + + #[tokio::test] + async fn front_load() { + let iter = super::front_load(super::$vec()); + super::test(iter).await; + } + + #[tokio::test] + async fn front_load_but_one() { + let iter = super::front_load_but_one(super::$vec()); + super::test(iter).await; + } + + #[tokio::test] + async fn back_load() { + let iter = super::back_load(super::$vec()); + super::test(iter).await; + } + + #[tokio::test] + async fn random() { + let iter = super::random(super::$vec()); + super::test(iter).await; + } + } + }; + } + + test!(zeros); + test!(hard_to_gzip_data); + test!(already_gzipped_data); +} From 29aa988476ee968ea00f9c026ae276d72c316476 Mon Sep 17 00:00:00 2001 From: Matt Mastracci Date: Wed, 10 May 2023 16:23:26 +0200 Subject: [PATCH 143/320] refactor(core): http_next generic over request extractor (#19071) --- cli/build.rs | 3 +- ext/http/http_next.rs | 79 +++++++++++++++++----------------- ext/http/lib.rs | 10 +++-- ext/http/request_properties.rs | 4 +- runtime/build.rs | 3 +- runtime/web_worker.rs | 3 +- runtime/worker.rs | 3 +- 7 files changed, 57 insertions(+), 48 deletions(-) diff --git a/cli/build.rs b/cli/build.rs index 94b49dfe02..560c8ceae2 100644 --- a/cli/build.rs +++ b/cli/build.rs @@ -9,6 +9,7 @@ use deno_core::Extension; use deno_core::ExtensionFileSource; use deno_core::ExtensionFileSourceCode; use deno_runtime::deno_cache::SqliteBackedCache; +use deno_runtime::deno_http::DefaultHttpPropertyExtractor; use deno_runtime::deno_kv::sqlite::SqliteDbHandler; use deno_runtime::permissions::PermissionsContainer; use deno_runtime::*; @@ -361,7 +362,7 @@ fn create_cli_snapshot(snapshot_path: PathBuf) { false, // No --unstable. ), deno_napi::deno_napi::init_ops::(), - deno_http::deno_http::init_ops(), + deno_http::deno_http::init_ops::(), deno_io::deno_io::init_ops(Default::default()), deno_fs::deno_fs::init_ops::(false, fs.clone()), deno_node::deno_node::init_ops::(None, fs), diff --git a/ext/http/http_next.rs b/ext/http/http_next.rs index 080cfea6c3..12db29b1b7 100644 --- a/ext/http/http_next.rs +++ b/ext/http/http_next.rs @@ -1,9 +1,8 @@ -use crate::compressible::is_content_compressible; // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use crate::compressible::is_content_compressible; use crate::extract_network_stream; use crate::network_buffered_stream::NetworkStreamPrefixCheck; use crate::request_body::HttpRequestBody; -use crate::request_properties::DefaultHttpRequestProperties; use crate::request_properties::HttpConnectionProperties; use crate::request_properties::HttpListenProperties; use crate::request_properties::HttpPropertyExtractor; @@ -375,12 +374,15 @@ pub fn op_http_set_promise_complete(index: u32, status: u16) { } #[op] -pub fn op_http_get_request_method_and_url( +pub fn op_http_get_request_method_and_url( index: u32, -) -> (String, Option, String, String, Option) { +) -> (String, Option, String, String, Option) +where + HTTP: HttpPropertyExtractor, +{ // TODO(mmastrac): Passing method can be optimized with_http(index, |http| { - let request_properties = DefaultHttpRequestProperties::request_properties( + let request_properties = HTTP::request_properties( &http.request_info, &http.request_parts.uri, &http.request_parts.headers, @@ -825,12 +827,15 @@ fn serve_http( spawn_local(serve_http2_autodetect(io, svc).try_or_cancel(cancel)) } -fn serve_http_on( +fn serve_http_on( network_stream: NetworkStream, listen_properties: &HttpListenProperties, cancel: Rc, tx: tokio::sync::mpsc::Sender, -) -> JoinHandle> { +) -> JoinHandle> +where + HTTP: HttpPropertyExtractor, +{ // We always want some sort of peer address. If we can't get one, just make up one. let peer_address = network_stream.peer_address().unwrap_or_else(|_| { NetworkStreamAddress::Ip(SocketAddr::V4(SocketAddrV4::new( @@ -839,10 +844,7 @@ fn serve_http_on( ))) }); let connection_properties: HttpConnectionProperties = - DefaultHttpRequestProperties::connection_properties( - listen_properties, - &peer_address, - ); + HTTP::connection_properties(listen_properties, &peer_address); match network_stream { NetworkStream::Tcp(conn) => { @@ -889,21 +891,21 @@ impl Drop for HttpJoinHandle { } #[op(v8)] -pub fn op_http_serve( +pub fn op_http_serve( state: Rc>, listener_rid: ResourceId, -) -> Result<(ResourceId, &'static str, String), AnyError> { - let listener = - DefaultHttpRequestProperties::get_network_stream_listener_for_rid( - &mut state.borrow_mut(), - listener_rid, - )?; +) -> Result<(ResourceId, &'static str, String), AnyError> +where + HTTP: HttpPropertyExtractor, +{ + let listener = HTTP::get_network_stream_listener_for_rid( + &mut state.borrow_mut(), + listener_rid, + )?; let local_address = listener.listen_address()?; - let listen_properties = DefaultHttpRequestProperties::listen_properties( - listener.stream(), - &local_address, - ); + let listen_properties = + HTTP::listen_properties(listener.stream(), &local_address); let (tx, rx) = tokio::sync::mpsc::channel(10); let resource: Rc = Rc::new(HttpJoinHandle( @@ -920,7 +922,7 @@ pub fn op_http_serve( .accept() .try_or_cancel(cancel_clone.clone()) .await?; - serve_http_on( + serve_http_on::( conn, &listen_properties_clone, cancel_clone.clone(), @@ -944,21 +946,19 @@ pub fn op_http_serve( } #[op(v8)] -pub fn op_http_serve_on( +pub fn op_http_serve_on( state: Rc>, conn: ResourceId, -) -> Result<(ResourceId, &'static str, String), AnyError> { +) -> Result<(ResourceId, &'static str, String), AnyError> +where + HTTP: HttpPropertyExtractor, +{ let network_stream: NetworkStream = - DefaultHttpRequestProperties::get_network_stream_for_rid( - &mut state.borrow_mut(), - conn, - )?; + HTTP::get_network_stream_for_rid(&mut state.borrow_mut(), conn)?; let local_address = network_stream.local_address()?; - let listen_properties = DefaultHttpRequestProperties::listen_properties( - network_stream.stream(), - &local_address, - ); + let listen_properties = + HTTP::listen_properties(network_stream.stream(), &local_address); let (tx, rx) = tokio::sync::mpsc::channel(10); let resource: Rc = Rc::new(HttpJoinHandle( @@ -967,12 +967,13 @@ pub fn op_http_serve_on( AsyncRefCell::new(rx), )); - let handle: JoinHandle> = serve_http_on( - network_stream, - &listen_properties, - resource.cancel_handle(), - tx, - ); + let handle: JoinHandle> = + serve_http_on::( + network_stream, + &listen_properties, + resource.cancel_handle(), + tx, + ); // Set the handle after we start the future *RcRef::map(&resource, |this| &this.0) diff --git a/ext/http/lib.rs b/ext/http/lib.rs index 6dab375a1a..3eb5c89c0a 100644 --- a/ext/http/lib.rs +++ b/ext/http/lib.rs @@ -73,6 +73,7 @@ use tokio::task::spawn_local; use crate::network_buffered_stream::NetworkBufferedStream; use crate::reader_stream::ExternallyAbortableReaderStream; use crate::reader_stream::ShutdownHandle; +use crate::request_properties::HttpPropertyExtractor; pub mod compressible; mod http_next; @@ -83,9 +84,12 @@ mod request_properties; mod response_body; mod websocket_upgrade; +pub use request_properties::DefaultHttpPropertyExtractor; + deno_core::extension!( deno_http, deps = [deno_web, deno_net, deno_fetch, deno_websocket], + parameters = [ HTTP: HttpPropertyExtractor ], ops = [ op_http_accept, op_http_headers, @@ -97,10 +101,10 @@ deno_core::extension!( op_http_write, http_next::op_http_get_request_header, http_next::op_http_get_request_headers, - http_next::op_http_get_request_method_and_url, + http_next::op_http_get_request_method_and_url, http_next::op_http_read_request_body, - http_next::op_http_serve_on, - http_next::op_http_serve, + http_next::op_http_serve_on, + http_next::op_http_serve, http_next::op_http_set_promise_complete, http_next::op_http_set_response_body_bytes, http_next::op_http_set_response_body_resource, diff --git a/ext/http/request_properties.rs b/ext/http/request_properties.rs index 7a7f5219c0..9c0c0e8152 100644 --- a/ext/http/request_properties.rs +++ b/ext/http/request_properties.rs @@ -70,9 +70,9 @@ pub trait HttpPropertyExtractor { ) -> HttpRequestProperties; } -pub struct DefaultHttpRequestProperties {} +pub struct DefaultHttpPropertyExtractor {} -impl HttpPropertyExtractor for DefaultHttpRequestProperties { +impl HttpPropertyExtractor for DefaultHttpPropertyExtractor { fn get_network_stream_for_rid( state: &mut OpState, rid: ResourceId, diff --git a/runtime/build.rs b/runtime/build.rs index 18aaf7a7f0..412257f122 100644 --- a/runtime/build.rs +++ b/runtime/build.rs @@ -18,6 +18,7 @@ mod startup_snapshot { use deno_core::Extension; use deno_core::ExtensionFileSource; use deno_core::ModuleCode; + use deno_http::DefaultHttpPropertyExtractor; use std::path::Path; fn transpile_ts_for_snapshotting( @@ -319,7 +320,7 @@ mod startup_snapshot { false, // No --unstable ), deno_napi::deno_napi::init_ops_and_esm::(), - deno_http::deno_http::init_ops_and_esm(), + deno_http::deno_http::init_ops_and_esm::(), deno_io::deno_io::init_ops_and_esm(Default::default()), deno_fs::deno_fs::init_ops_and_esm::(false, fs.clone()), runtime::init_ops_and_esm(), diff --git a/runtime/web_worker.rs b/runtime/web_worker.rs index 6487239f8b..d8c881ab7c 100644 --- a/runtime/web_worker.rs +++ b/runtime/web_worker.rs @@ -35,6 +35,7 @@ use deno_core::SharedArrayBufferStore; use deno_core::Snapshot; use deno_core::SourceMapGetter; use deno_fs::FileSystem; +use deno_http::DefaultHttpPropertyExtractor; use deno_io::Stdio; use deno_kv::sqlite::SqliteDbHandler; use deno_tls::RootCertStoreProvider; @@ -439,7 +440,7 @@ impl WebWorker { unstable, ), deno_napi::deno_napi::init_ops::(), - deno_http::deno_http::init_ops(), + deno_http::deno_http::init_ops::(), deno_io::deno_io::init_ops(Some(options.stdio)), deno_fs::deno_fs::init_ops::( unstable, diff --git a/runtime/worker.rs b/runtime/worker.rs index 77f16553b6..ae6bd717f6 100644 --- a/runtime/worker.rs +++ b/runtime/worker.rs @@ -31,6 +31,7 @@ use deno_core::SharedArrayBufferStore; use deno_core::Snapshot; use deno_core::SourceMapGetter; use deno_fs::FileSystem; +use deno_http::DefaultHttpPropertyExtractor; use deno_io::Stdio; use deno_kv::sqlite::SqliteDbHandler; use deno_tls::RootCertStoreProvider; @@ -264,7 +265,7 @@ impl MainWorker { unstable, ), deno_napi::deno_napi::init_ops::(), - deno_http::deno_http::init_ops(), + deno_http::deno_http::init_ops::(), deno_io::deno_io::init_ops(Some(options.stdio)), deno_fs::deno_fs::init_ops::( unstable, From b07535cd2e9c32c778118d195534f90435ff8b95 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Wed, 10 May 2023 17:48:17 +0200 Subject: [PATCH 144/320] chore(core): fix a warning (#19072) --- core/modules.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/core/modules.rs b/core/modules.rs index d1e871ba90..2acc146840 100644 --- a/core/modules.rs +++ b/core/modules.rs @@ -1093,6 +1093,7 @@ impl ModuleMap { output } + #[cfg(debug_assertions)] pub(crate) fn assert_all_modules_evaluated( &self, scope: &mut v8::HandleScope, From eb374e8cd34461793aa889e74544e16a86e3f7d4 Mon Sep 17 00:00:00 2001 From: Matt Mastracci Date: Wed, 10 May 2023 18:04:01 +0200 Subject: [PATCH 145/320] refactor(ext/http): HTTP trait structs need to be public (#19075) --- ext/http/lib.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/ext/http/lib.rs b/ext/http/lib.rs index 3eb5c89c0a..21d3dc6519 100644 --- a/ext/http/lib.rs +++ b/ext/http/lib.rs @@ -73,7 +73,6 @@ use tokio::task::spawn_local; use crate::network_buffered_stream::NetworkBufferedStream; use crate::reader_stream::ExternallyAbortableReaderStream; use crate::reader_stream::ShutdownHandle; -use crate::request_properties::HttpPropertyExtractor; pub mod compressible; mod http_next; @@ -85,6 +84,10 @@ mod response_body; mod websocket_upgrade; pub use request_properties::DefaultHttpPropertyExtractor; +pub use request_properties::HttpConnectionProperties; +pub use request_properties::HttpListenProperties; +pub use request_properties::HttpPropertyExtractor; +pub use request_properties::HttpRequestProperties; deno_core::extension!( deno_http, From d55e07f6274d45a027885453ea9f80475e6d9393 Mon Sep 17 00:00:00 2001 From: Matt Mastracci Date: Wed, 10 May 2023 21:35:42 +0200 Subject: [PATCH 146/320] chore(core): Parallelize all WPT tests and reduce timeouts for expected failures (#19061) This speeds up WPT tests in two ways: 1. The `WebCryptoAPI` tests are slow, so create a parallel bucket for each individual test instead of one for all the `WebCryptoAPI` tests. 2. If a test is expected to fail, use a shorter timeout (1 minute rather than 4). --- tools/wpt.ts | 25 ++++++++++++++++++++----- 1 file changed, 20 insertions(+), 5 deletions(-) diff --git a/tools/wpt.ts b/tools/wpt.ts index a3426c5b87..fe2a350b27 100755 --- a/tools/wpt.ts +++ b/tools/wpt.ts @@ -145,6 +145,18 @@ interface TestToRun { expectation: boolean | string[]; } +function getTestTimeout(test: TestToRun) { + if (Deno.env.get("CI")) { + // Don't give expected failures the full time + if (test.expectation === false) { + return { long: 60_000, default: 10_000 }; + } + return { long: 4 * 60_000, default: 4 * 60_000 }; + } + + return { long: 60_000, default: 10_000 }; +} + async function run() { const startTime = new Date().getTime(); assert(Array.isArray(rest), "filter must be array"); @@ -154,11 +166,11 @@ async function run() { expectation, ); assertAllExpectationsHaveTests(expectation, tests, rest); - console.log(`Going to run ${tests.length} test files.`); + const cores = navigator.hardwareConcurrency; + console.log(`Going to run ${tests.length} test files on ${cores} cores.`); const results = await runWithTestUtil(false, async () => { const results: { test: TestToRun; result: TestResult }[] = []; - const cores = navigator.hardwareConcurrency; const inParallel = !(cores === 1 || tests.length === 1); // ideally we would parallelize all tests, but we ran into some flakiness // on the CI, so here we're partitioning based on the start of the test path @@ -174,9 +186,7 @@ async function run() { test.options, inParallel ? () => {} : createReportTestCase(test.expectation), inspectBrk, - Deno.env.get("CI") - ? { long: 4 * 60_000, default: 4 * 60_000 } - : { long: 60_000, default: 10_000 }, + getTestTimeout(test), ); results.push({ test, result }); if (inParallel) { @@ -755,6 +765,11 @@ function discoverTestsToRun( function partitionTests(tests: TestToRun[]): TestToRun[][] { const testsByKey: { [key: string]: TestToRun[] } = {}; for (const test of tests) { + // Run all WebCryptoAPI tests in parallel + if (test.path.includes("/WebCryptoAPI")) { + testsByKey[test.path] = [test]; + continue; + } // Paths looks like: /fetch/corb/img-html-correctly-labeled.sub-ref.html const key = test.path.split("/")[1]; if (!(key in testsByKey)) { From ec67e96a124398a2eb12ed8721076ad0147be113 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Wed, 10 May 2023 16:04:30 -0400 Subject: [PATCH 147/320] fix(dts): align `seekSync` `position` arg with `seek` (#19077) Closes #19060 --- cli/tsc/dts/lib.deno.ns.d.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cli/tsc/dts/lib.deno.ns.d.ts b/cli/tsc/dts/lib.deno.ns.d.ts index 395f8c667d..4d8c9293e7 100644 --- a/cli/tsc/dts/lib.deno.ns.d.ts +++ b/cli/tsc/dts/lib.deno.ns.d.ts @@ -1565,7 +1565,7 @@ declare namespace Deno { * * It returns the updated offset. */ - seekSync(offset: number, whence: SeekMode): number; + seekSync(offset: number | bigint, whence: SeekMode): number; } /** @@ -1893,7 +1893,7 @@ declare namespace Deno { */ export function seekSync( rid: number, - offset: number, + offset: number | bigint, whence: SeekMode, ): number; From e72485fb1776d2ffebd90ff716374edfba42d603 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Wed, 10 May 2023 23:55:48 +0200 Subject: [PATCH 148/320] fix(node): conditional exports edge case (#19082) Fixes https://github.com/denoland/deno/issues/18743 --- cli/tests/testdata/npm/conditional_exports/main.js | 2 ++ cli/tests/testdata/npm/conditional_exports/main.out | 1 + .../testdata/npm/conditional_exports/main_node_modules.out | 1 + .../@denotest/conditional-exports/1.0.0/esm/client/m.js | 3 +++ ext/node/resolution.rs | 2 +- 5 files changed, 8 insertions(+), 1 deletion(-) create mode 100644 cli/tests/testdata/npm/registry/@denotest/conditional-exports/1.0.0/esm/client/m.js diff --git a/cli/tests/testdata/npm/conditional_exports/main.js b/cli/tests/testdata/npm/conditional_exports/main.js index eb243a52c7..52b78bc22d 100644 --- a/cli/tests/testdata/npm/conditional_exports/main.js +++ b/cli/tests/testdata/npm/conditional_exports/main.js @@ -3,6 +3,7 @@ import foo from "npm:@denotest/conditional-exports/foo.js"; import client from "npm:@denotest/conditional-exports/client"; import clientFoo from "npm:@denotest/conditional-exports/client/foo"; import clientBar from "npm:@denotest/conditional-exports/client/bar"; +import clientM from "npm:@denotest/conditional-exports/client/m"; import supportsESM from "npm:supports-esm"; console.log(mod); @@ -10,4 +11,5 @@ console.log(foo); console.log(client); console.log(clientFoo); console.log(clientBar); +console.log(clientM); console.log(supportsESM); diff --git a/cli/tests/testdata/npm/conditional_exports/main.out b/cli/tests/testdata/npm/conditional_exports/main.out index b374d9f6c9..9f65c1f9a6 100644 --- a/cli/tests/testdata/npm/conditional_exports/main.out +++ b/cli/tests/testdata/npm/conditional_exports/main.out @@ -11,4 +11,5 @@ Download http://localhost:4545/npm/registry/supports-esm/supports-esm-1.0.0.tgz { hello: "from esm client" } { hello: "from esm client foo" } { hello: "from esm client bar" } +{ hello: "from esm client m" } true diff --git a/cli/tests/testdata/npm/conditional_exports/main_node_modules.out b/cli/tests/testdata/npm/conditional_exports/main_node_modules.out index 525f31d5c6..94ce955812 100644 --- a/cli/tests/testdata/npm/conditional_exports/main_node_modules.out +++ b/cli/tests/testdata/npm/conditional_exports/main_node_modules.out @@ -15,4 +15,5 @@ Initialize supports-esm@1.0.0 { hello: "from esm client" } { hello: "from esm client foo" } { hello: "from esm client bar" } +{ hello: "from esm client m" } true diff --git a/cli/tests/testdata/npm/registry/@denotest/conditional-exports/1.0.0/esm/client/m.js b/cli/tests/testdata/npm/registry/@denotest/conditional-exports/1.0.0/esm/client/m.js new file mode 100644 index 0000000000..40e769031a --- /dev/null +++ b/cli/tests/testdata/npm/registry/@denotest/conditional-exports/1.0.0/esm/client/m.js @@ -0,0 +1,3 @@ +export default { + hello: "from esm client m", +} \ No newline at end of file diff --git a/ext/node/resolution.rs b/ext/node/resolution.rs index 16720f22c0..9664915052 100644 --- a/ext/node/resolution.rs +++ b/ext/node/resolution.rs @@ -922,7 +922,7 @@ impl NodeResolver { // emitTrailingSlashPatternDeprecation(); } let pattern_trailer = &key[pattern_index + 1..]; - if package_subpath.len() > key.len() + if package_subpath.len() >= key.len() && package_subpath.ends_with(&pattern_trailer) && pattern_key_compare(best_match, key) == 1 && key.rfind('*') == Some(pattern_index) From 5fd74bfa1c5ed514c3e19fdb2e8590fe251d3ee6 Mon Sep 17 00:00:00 2001 From: Marvin Hagemeister Date: Thu, 11 May 2023 00:13:45 +0200 Subject: [PATCH 149/320] feat(node): add `Module.runMain()` (#19080) This PR adds the missing `Module.runMain()` function which is required for tools like `ts-node`. Fixes #19033 --- cli/tests/node_compat/config.jsonc | 1 + cli/tests/node_compat/test/fixtures/run-main.js | 1 + .../test/parallel/test-module-run-main.js | 15 +++++++++++++++ ext/node/polyfills/01_require.js | 5 +++++ 4 files changed, 22 insertions(+) create mode 100644 cli/tests/node_compat/test/fixtures/run-main.js create mode 100644 cli/tests/node_compat/test/parallel/test-module-run-main.js diff --git a/cli/tests/node_compat/config.jsonc b/cli/tests/node_compat/config.jsonc index 81463bcaf5..87530d4f5c 100644 --- a/cli/tests/node_compat/config.jsonc +++ b/cli/tests/node_compat/config.jsonc @@ -366,6 +366,7 @@ "test-http-outgoing-message-inheritance.js", "test-http-outgoing-renderHeaders.js", "test-http-outgoing-settimeout.js", + "test-module-run-main.js", "test-net-access-byteswritten.js", "test-net-better-error-messages-listen-path.js", "test-net-better-error-messages-path.js", diff --git a/cli/tests/node_compat/test/fixtures/run-main.js b/cli/tests/node_compat/test/fixtures/run-main.js new file mode 100644 index 0000000000..9a081cbbae --- /dev/null +++ b/cli/tests/node_compat/test/fixtures/run-main.js @@ -0,0 +1 @@ +globalThis.foo = 42; diff --git a/cli/tests/node_compat/test/parallel/test-module-run-main.js b/cli/tests/node_compat/test/parallel/test-module-run-main.js new file mode 100644 index 0000000000..8e30de2671 --- /dev/null +++ b/cli/tests/node_compat/test/parallel/test-module-run-main.js @@ -0,0 +1,15 @@ +// deno-fmt-ignore-file +// deno-lint-ignore-file + +"use strict"; + +const Module = require("module"); +const assert = require("assert/strict"); +const path = require("path"); + +const file = path.join(__dirname, "..", "fixtures", "run-main.js"); +process.argv = [process.argv[0], file]; +Module.runMain(); + +// The required file via `Module.runMain()` sets this global +assert.equal(globalThis.foo, 42); diff --git a/ext/node/polyfills/01_require.js b/ext/node/polyfills/01_require.js index c73701ba80..a8a70c2fca 100644 --- a/ext/node/polyfills/01_require.js +++ b/ext/node/polyfills/01_require.js @@ -1108,6 +1108,11 @@ Module.syncBuiltinESMExports = function syncBuiltinESMExports() { throw new Error("not implemented"); }; +// Mostly used by tools like ts-node. +Module.runMain = function () { + Module._load(process.argv[1], null, true); +}; + Module.Module = Module; nativeModuleExports.module = Module; From 28aa489de9cd4f995ec2fc02e2c9d224e89f4c01 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Wed, 10 May 2023 20:06:59 -0400 Subject: [PATCH 150/320] feat(compile): unstable npm and node specifier support (#19005) This is the initial support for npm and node specifiers in `deno compile`. The npm packages are included in the binary and read from it via a virtual file system. This also supports the `--node-modules-dir` flag, dependencies specified in a package.json, and npm binary commands (ex. `deno compile --unstable npm:cowsay`) Closes #16632 --- Cargo.lock | 4 +- cli/Cargo.toml | 2 +- cli/args/flags.rs | 7 +- cli/args/mod.rs | 13 +- cli/args/package_json.rs | 27 + cli/factory.rs | 50 +- cli/graph_util.rs | 4 +- cli/lsp/documents.rs | 6 +- cli/lsp/language_server.rs | 11 +- cli/main.rs | 3 +- cli/module_loader.rs | 76 +- cli/npm/installer.rs | 26 +- cli/npm/resolution.rs | 4 + cli/npm/resolvers/common.rs | 6 +- cli/npm/resolvers/global.rs | 6 +- cli/npm/resolvers/local.rs | 5 +- cli/npm/resolvers/mod.rs | 26 +- cli/resolver.rs | 116 ++- cli/standalone/binary.rs | 314 +++++- cli/standalone/file_system.rs | 337 ++++++ cli/standalone/mod.rs | 255 ++++- cli/standalone/virtual_fs.rs | 983 ++++++++++++++++++ cli/tests/integration/compile_tests.rs | 305 +++++- cli/tests/integration/npm_tests.rs | 10 +- cli/tests/testdata/compile/npm_fs/main.out | 1 + cli/tests/testdata/compile/npm_fs/main.ts | 259 +++++ .../@denotest/esm-basic/1.0.0/main.d.mts | 1 + .../@denotest/esm-basic/1.0.0/main.mjs | 2 + .../testdata/package_json/basic/main.info.out | 2 +- cli/tools/{standalone.rs => compile.rs} | 15 +- cli/tools/mod.rs | 2 +- cli/tools/task.rs | 3 +- cli/tools/vendor/test.rs | 9 +- cli/util/fs.rs | 9 +- ext/fs/interface.rs | 2 +- ext/io/fs.rs | 9 + runtime/build.rs | 1 + runtime/clippy.toml | 45 + runtime/examples/hello_runtime.rs | 6 +- runtime/fs_util.rs | 25 +- runtime/ops/os/mod.rs | 1 + runtime/ops/os/sys_info.rs | 1 + test_util/src/builders.rs | 1 + test_util/src/temp_dir.rs | 4 + 44 files changed, 2733 insertions(+), 261 deletions(-) create mode 100644 cli/standalone/file_system.rs create mode 100644 cli/standalone/virtual_fs.rs create mode 100644 cli/tests/testdata/compile/npm_fs/main.out create mode 100644 cli/tests/testdata/compile/npm_fs/main.ts rename cli/tools/{standalone.rs => compile.rs} (95%) create mode 100644 runtime/clippy.toml diff --git a/Cargo.lock b/Cargo.lock index 4fb2686277..48351f7a37 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2966,9 +2966,9 @@ dependencies = [ [[package]] name = "monch" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1120c1ab92ab8cdacb3b89ac9a214f512d2e78e90e3b57c00d9551ced19f646f" +checksum = "bb73e1dc7d232e1ab47ef27f45fa1d173a0979b370e763a9d0584556011150e0" [[package]] name = "napi-build" diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 7d0e99d395..b415f53d8e 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -84,7 +84,7 @@ lazy-regex.workspace = true libc.workspace = true log = { workspace = true, features = ["serde"] } lsp-types.workspace = true -monch = "=0.4.1" +monch = "=0.4.2" notify.workspace = true once_cell.workspace = true os_pipe.workspace = true diff --git a/cli/args/flags.rs b/cli/args/flags.rs index 3d88cda913..fa28241a3a 100644 --- a/cli/args/flags.rs +++ b/cli/args/flags.rs @@ -527,8 +527,11 @@ impl Flags { .ok() } Task(_) | Check(_) | Coverage(_) | Cache(_) | Info(_) | Eval(_) - | Test(_) | Bench(_) | Repl(_) => std::env::current_dir().ok(), - _ => None, + | Test(_) | Bench(_) | Repl(_) | Compile(_) => { + std::env::current_dir().ok() + } + Bundle(_) | Completions(_) | Doc(_) | Fmt(_) | Init(_) | Install(_) + | Uninstall(_) | Lsp | Lint(_) | Types | Upgrade(_) | Vendor(_) => None, } } diff --git a/cli/args/mod.rs b/cli/args/mod.rs index b5975536a1..31035fdd04 100644 --- a/cli/args/mod.rs +++ b/cli/args/mod.rs @@ -33,6 +33,7 @@ pub use config_file::TsTypeLib; pub use flags::*; pub use lockfile::Lockfile; pub use lockfile::LockfileError; +pub use package_json::PackageJsonDepsProvider; use deno_ast::ModuleSpecifier; use deno_core::anyhow::anyhow; @@ -556,7 +557,7 @@ struct CliOptionOverrides { import_map_specifier: Option>, } -/// Holds the resolved options of many sources used by sub commands +/// Holds the resolved options of many sources used by subcommands /// and provides some helper function for creating common objects. pub struct CliOptions { // the source of the options is a detail the rest of the @@ -1303,6 +1304,16 @@ fn has_flag_env_var(name: &str) -> bool { matches!(value.as_ref().map(|s| s.as_str()), Ok("1")) } +pub fn npm_pkg_req_ref_to_binary_command( + req_ref: &NpmPackageReqReference, +) -> String { + let binary_name = req_ref + .sub_path + .as_deref() + .unwrap_or(req_ref.req.name.as_str()); + binary_name.to_string() +} + #[cfg(test)] mod test { use super::*; diff --git a/cli/args/package_json.rs b/cli/args/package_json.rs index c4d4ce9564..a8c6eaad45 100644 --- a/cli/args/package_json.rs +++ b/cli/args/package_json.rs @@ -28,6 +28,33 @@ pub enum PackageJsonDepValueParseError { pub type PackageJsonDeps = BTreeMap>; +#[derive(Debug, Default)] +pub struct PackageJsonDepsProvider(Option); + +impl PackageJsonDepsProvider { + pub fn new(deps: Option) -> Self { + Self(deps) + } + + pub fn deps(&self) -> Option<&PackageJsonDeps> { + self.0.as_ref() + } + + pub fn reqs(&self) -> Vec<&NpmPackageReq> { + match &self.0 { + Some(deps) => { + let mut package_reqs = deps + .values() + .filter_map(|r| r.as_ref().ok()) + .collect::>(); + package_reqs.sort(); // deterministic resolution + package_reqs + } + None => Vec::new(), + } + } +} + /// Gets an application level package.json's npm package requirements. /// /// Note that this function is not general purpose. It is specifically for diff --git a/cli/factory.rs b/cli/factory.rs index 3bc5ef9e29..a3da400361 100644 --- a/cli/factory.rs +++ b/cli/factory.rs @@ -1,9 +1,11 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use crate::args::npm_pkg_req_ref_to_binary_command; use crate::args::CliOptions; use crate::args::DenoSubcommand; use crate::args::Flags; use crate::args::Lockfile; +use crate::args::PackageJsonDepsProvider; use crate::args::StorageKeyResolver; use crate::args::TsConfigType; use crate::cache::Caches; @@ -30,6 +32,7 @@ use crate::npm::NpmCache; use crate::npm::NpmResolution; use crate::npm::PackageJsonDepsInstaller; use crate::resolver::CliGraphResolver; +use crate::standalone::DenoCompileBinaryWriter; use crate::tools::check::TypeChecker; use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; @@ -151,6 +154,7 @@ struct CliFactoryServices { npm_cache: Deferred>, npm_resolver: Deferred>, npm_resolution: Deferred>, + package_json_deps_provider: Deferred>, package_json_deps_installer: Deferred>, text_only_progress_bar: Deferred, type_checker: Deferred>, @@ -301,8 +305,9 @@ impl CliFactory { .npm_resolver .get_or_try_init_async(async { let npm_resolution = self.npm_resolution().await?; + let fs = self.fs().clone(); let npm_fs_resolver = create_npm_fs_resolver( - self.fs().clone(), + fs.clone(), self.npm_cache()?.clone(), self.text_only_progress_bar(), CliNpmRegistryApi::default_url().to_owned(), @@ -310,6 +315,7 @@ impl CliFactory { self.options.node_modules_dir_path(), ); Ok(Arc::new(CliNpmResolver::new( + fs.clone(), npm_resolution.clone(), npm_fs_resolver, self.maybe_lockfile().as_ref().cloned(), @@ -318,6 +324,14 @@ impl CliFactory { .await } + pub fn package_json_deps_provider(&self) -> &Arc { + self.services.package_json_deps_provider.get_or_init(|| { + Arc::new(PackageJsonDepsProvider::new( + self.options.maybe_package_json_deps(), + )) + }) + } + pub async fn package_json_deps_installer( &self, ) -> Result<&Arc, AnyError> { @@ -325,12 +339,10 @@ impl CliFactory { .services .package_json_deps_installer .get_or_try_init_async(async { - let npm_api = self.npm_api()?; - let npm_resolution = self.npm_resolution().await?; Ok(Arc::new(PackageJsonDepsInstaller::new( - npm_api.clone(), - npm_resolution.clone(), - self.options.maybe_package_json_deps(), + self.package_json_deps_provider().clone(), + self.npm_api()?.clone(), + self.npm_resolution().await?.clone(), ))) }) .await @@ -365,6 +377,7 @@ impl CliFactory { self.options.no_npm(), self.npm_api()?.clone(), self.npm_resolution().await?.clone(), + self.package_json_deps_provider().clone(), self.package_json_deps_installer().await?.clone(), ))) }) @@ -535,6 +548,21 @@ impl CliFactory { self.services.cjs_resolutions.get_or_init(Default::default) } + pub async fn create_compile_binary_writer( + &self, + ) -> Result { + Ok(DenoCompileBinaryWriter::new( + self.file_fetcher()?, + self.http_client(), + self.deno_dir()?, + self.npm_api()?, + self.npm_cache()?, + self.npm_resolver().await?, + self.npm_resolution().await?, + self.package_json_deps_provider(), + )) + } + /// Gets a function that can be used to create a CliMainWorkerFactory /// for a file watcher. pub async fn create_cli_main_worker_factory_func( @@ -572,6 +600,7 @@ impl CliFactory { NpmModuleLoader::new( cjs_resolutions.clone(), node_code_translator.clone(), + fs.clone(), node_resolver.clone(), ), )), @@ -587,6 +616,7 @@ impl CliFactory { &self, ) -> Result { let node_resolver = self.node_resolver().await?; + let fs = self.fs(); Ok(CliMainWorkerFactory::new( StorageKeyResolver::from_options(&self.options), self.npm_resolver().await?.clone(), @@ -603,6 +633,7 @@ impl CliFactory { NpmModuleLoader::new( self.cjs_resolutions().clone(), self.node_code_translator().await?.clone(), + fs.clone(), node_resolver.clone(), ), )), @@ -637,11 +668,8 @@ impl CliFactory { if let Ok(pkg_ref) = NpmPackageReqReference::from_str(&flags.script) { // if the user ran a binary command, we'll need to set process.argv[0] // to be the name of the binary command instead of deno - let binary_name = pkg_ref - .sub_path - .as_deref() - .unwrap_or(pkg_ref.req.name.as_str()); - maybe_binary_command_name = Some(binary_name.to_string()); + maybe_binary_command_name = + Some(npm_pkg_req_ref_to_binary_command(&pkg_ref)); } } maybe_binary_command_name diff --git a/cli/graph_util.rs b/cli/graph_util.rs index f9dafbb573..53d06071c8 100644 --- a/cli/graph_util.rs +++ b/cli/graph_util.rs @@ -345,10 +345,10 @@ pub fn error_for_any_npm_specifier( for module in graph.modules() { match module { Module::Npm(module) => { - bail!("npm specifiers have not yet been implemented for this sub command (https://github.com/denoland/deno/issues/15960). Found: {}", module.specifier) + bail!("npm specifiers have not yet been implemented for this subcommand (https://github.com/denoland/deno/issues/15960). Found: {}", module.specifier) } Module::Node(module) => { - bail!("Node specifiers have not yet been implemented for this sub command (https://github.com/denoland/deno/issues/15960). Found: node:{}", module.module_name) + bail!("Node specifiers have not yet been implemented for this subcommand (https://github.com/denoland/deno/issues/15960). Found: node:{}", module.module_name) } Module::Esm(_) | Module::Json(_) | Module::External(_) => {} } diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs index 3f77eaaa26..b55d3ca206 100644 --- a/cli/lsp/documents.rs +++ b/cli/lsp/documents.rs @@ -46,6 +46,7 @@ use deno_semver::npm::NpmPackageReqReference; use indexmap::IndexMap; use lsp::Url; use once_cell::sync::Lazy; +use package_json::PackageJsonDepsProvider; use std::collections::BTreeMap; use std::collections::HashMap; use std::collections::HashSet; @@ -1218,10 +1219,12 @@ impl Documents { maybe_jsx_config.as_ref(), maybe_package_json_deps.as_ref(), ); + let deps_provider = + Arc::new(PackageJsonDepsProvider::new(maybe_package_json_deps)); let deps_installer = Arc::new(PackageJsonDepsInstaller::new( + deps_provider.clone(), npm_registry_api.clone(), npm_resolution.clone(), - maybe_package_json_deps, )); self.resolver = Arc::new(CliGraphResolver::new( maybe_jsx_config, @@ -1229,6 +1232,7 @@ impl Documents { false, npm_registry_api, npm_resolution, + deps_provider, deps_installer, )); self.imports = Arc::new( diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index d00b8f3138..d32d12ec8b 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -457,8 +457,9 @@ fn create_lsp_structs( )); let resolution = Arc::new(NpmResolution::from_serialized(api.clone(), None, None)); + let fs = Arc::new(deno_fs::RealFs); let fs_resolver = create_npm_fs_resolver( - Arc::new(deno_fs::RealFs), + fs.clone(), npm_cache.clone(), &progress_bar, registry_url.clone(), @@ -468,7 +469,12 @@ fn create_lsp_structs( ( api, npm_cache, - Arc::new(CliNpmResolver::new(resolution.clone(), fs_resolver, None)), + Arc::new(CliNpmResolver::new( + fs, + resolution.clone(), + fs_resolver, + None, + )), resolution, ) } @@ -711,6 +717,7 @@ impl Inner { )); let node_fs = Arc::new(deno_fs::RealFs); let npm_resolver = Arc::new(CliNpmResolver::new( + node_fs.clone(), npm_resolution.clone(), create_npm_fs_resolver( node_fs.clone(), diff --git a/cli/main.rs b/cli/main.rs index c3421b0cd9..03e7cf41e7 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -36,7 +36,6 @@ static GLOBAL: Jemalloc = Jemalloc; use crate::args::flags_from_vec; use crate::args::DenoSubcommand; use crate::args::Flags; -use crate::resolver::CliGraphResolver; use crate::util::display; use crate::util::v8::get_v8_flags_from_env; use crate::util::v8::init_v8_flags; @@ -97,7 +96,7 @@ async fn run_subcommand(flags: Flags) -> Result { Ok(0) } DenoSubcommand::Compile(compile_flags) => { - tools::standalone::compile(flags, compile_flags).await?; + tools::compile::compile(flags, compile_flags).await?; Ok(0) } DenoSubcommand::Coverage(coverage_flags) => { diff --git a/cli/module_loader.rs b/cli/module_loader.rs index 0ed84a20f6..5465ad1b8a 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -46,11 +46,13 @@ use deno_graph::JsonModule; use deno_graph::Module; use deno_graph::Resolution; use deno_lockfile::Lockfile; +use deno_runtime::deno_fs; use deno_runtime::deno_node; use deno_runtime::deno_node::NodeResolution; use deno_runtime::deno_node::NodeResolutionMode; use deno_runtime::deno_node::NodeResolver; use deno_runtime::permissions::PermissionsContainer; +use deno_semver::npm::NpmPackageNvReference; use deno_semver::npm::NpmPackageReqReference; use std::borrow::Cow; use std::cell::RefCell; @@ -417,13 +419,12 @@ impl CliModuleLoader { } else { &self.root_permissions }; - let code_source = if let Some(code_source) = - self.shared.npm_module_loader.load_sync( - specifier, - maybe_referrer, - permissions, - )? { - code_source + let code_source = if let Some(result) = self + .shared + .npm_module_loader + .load_sync_if_in_npm_package(specifier, maybe_referrer, permissions) + { + result? } else { self .shared @@ -494,7 +495,7 @@ impl ModuleLoader for CliModuleLoader { Some(Module::Npm(module)) => self .shared .npm_module_loader - .resolve_npm_module(module, permissions), + .resolve_nv_ref(&module.nv_reference, permissions), Some(Module::Node(module)) => { deno_node::resolve_builtin_node_module(&module.module_name) } @@ -547,7 +548,7 @@ impl ModuleLoader for CliModuleLoader { return self .shared .npm_module_loader - .resolve_for_repl(&reference, permissions); + .resolve_req_reference(&reference, permissions); } } } @@ -652,6 +653,7 @@ impl SourceMapGetter for CliSourceMapGetter { pub struct NpmModuleLoader { cjs_resolutions: Arc, node_code_translator: Arc, + fs: Arc, node_resolver: Arc, } @@ -659,11 +661,13 @@ impl NpmModuleLoader { pub fn new( cjs_resolutions: Arc, node_code_translator: Arc, + fs: Arc, node_resolver: Arc, ) -> Self { Self { cjs_resolutions, node_code_translator, + fs, node_resolver, } } @@ -693,21 +697,21 @@ impl NpmModuleLoader { } } - pub fn resolve_npm_module( + pub fn resolve_nv_ref( &self, - module: &deno_graph::NpmModule, + nv_ref: &NpmPackageNvReference, permissions: &PermissionsContainer, ) -> Result { self .handle_node_resolve_result(self.node_resolver.resolve_npm_reference( - &module.nv_reference, + nv_ref, NodeResolutionMode::Execution, permissions, )) - .with_context(|| format!("Could not resolve '{}'.", module.nv_reference)) + .with_context(|| format!("Could not resolve '{}'.", nv_ref)) } - pub fn resolve_for_repl( + pub fn resolve_req_reference( &self, reference: &NpmPackageReqReference, permissions: &PermissionsContainer, @@ -733,25 +737,39 @@ impl NpmModuleLoader { } } - pub fn load_sync( + pub fn load_sync_if_in_npm_package( &self, specifier: &ModuleSpecifier, maybe_referrer: Option<&ModuleSpecifier>, permissions: &PermissionsContainer, - ) -> Result, AnyError> { - if !self.node_resolver.in_npm_package(specifier) { - return Ok(None); + ) -> Option> { + if self.node_resolver.in_npm_package(specifier) { + Some(self.load_sync(specifier, maybe_referrer, permissions)) + } else { + None } + } + + fn load_sync( + &self, + specifier: &ModuleSpecifier, + maybe_referrer: Option<&ModuleSpecifier>, + permissions: &PermissionsContainer, + ) -> Result { let file_path = specifier.to_file_path().unwrap(); - let code = std::fs::read_to_string(&file_path).with_context(|| { - let mut msg = "Unable to load ".to_string(); - msg.push_str(&file_path.to_string_lossy()); - if let Some(referrer) = &maybe_referrer { - msg.push_str(" imported from "); - msg.push_str(referrer.as_str()); - } - msg - })?; + let code = self + .fs + .read_to_string(&file_path) + .map_err(AnyError::from) + .with_context(|| { + let mut msg = "Unable to load ".to_string(); + msg.push_str(&file_path.to_string_lossy()); + if let Some(referrer) = &maybe_referrer { + msg.push_str(" imported from "); + msg.push_str(referrer.as_str()); + } + msg + })?; let code = if self.cjs_resolutions.contains(specifier) { // translate cjs to esm if it's cjs and inject node globals @@ -766,11 +784,11 @@ impl NpmModuleLoader { .node_code_translator .esm_code_with_node_globals(specifier, &code)? }; - Ok(Some(ModuleCodeSource { + Ok(ModuleCodeSource { code: code.into(), found_url: specifier.clone(), media_type: MediaType::from_specifier(specifier), - })) + }) } fn handle_node_resolve_result( diff --git a/cli/npm/installer.rs b/cli/npm/installer.rs index bdcafb5420..43f79d8f06 100644 --- a/cli/npm/installer.rs +++ b/cli/npm/installer.rs @@ -10,7 +10,7 @@ use deno_npm::registry::NpmRegistryApi; use deno_npm::registry::NpmRegistryPackageInfoLoadError; use deno_semver::npm::NpmPackageReq; -use crate::args::package_json::PackageJsonDeps; +use crate::args::PackageJsonDepsProvider; use crate::util::sync::AtomicFlag; use super::CliNpmRegistryApi; @@ -18,23 +18,13 @@ use super::NpmResolution; #[derive(Debug)] struct PackageJsonDepsInstallerInner { + deps_provider: Arc, has_installed_flag: AtomicFlag, npm_registry_api: Arc, npm_resolution: Arc, - package_deps: PackageJsonDeps, } impl PackageJsonDepsInstallerInner { - pub fn reqs(&self) -> Vec<&NpmPackageReq> { - let mut package_reqs = self - .package_deps - .values() - .filter_map(|r| r.as_ref().ok()) - .collect::>(); - package_reqs.sort(); // deterministic resolution - package_reqs - } - pub fn reqs_with_info_futures( &self, ) -> FuturesOrdered< @@ -45,7 +35,7 @@ impl PackageJsonDepsInstallerInner { >, >, > { - let package_reqs = self.reqs(); + let package_reqs = self.deps_provider.reqs(); FuturesOrdered::from_iter(package_reqs.into_iter().map(|req| { let api = self.npm_registry_api.clone(); @@ -63,22 +53,18 @@ pub struct PackageJsonDepsInstaller(Option); impl PackageJsonDepsInstaller { pub fn new( + deps_provider: Arc, npm_registry_api: Arc, npm_resolution: Arc, - deps: Option, ) -> Self { - Self(deps.map(|package_deps| PackageJsonDepsInstallerInner { + Self(Some(PackageJsonDepsInstallerInner { + deps_provider, has_installed_flag: Default::default(), npm_registry_api, npm_resolution, - package_deps, })) } - pub fn package_deps(&self) -> Option<&PackageJsonDeps> { - self.0.as_ref().map(|inner| &inner.package_deps) - } - /// Installs the top level dependencies in the package.json file /// without going through and resolving the descendant dependencies yet. pub async fn ensure_top_level_install(&self) -> Result<(), AnyError> { diff --git a/cli/npm/resolution.rs b/cli/npm/resolution.rs index 1b191b2455..edc7ec6475 100644 --- a/cli/npm/resolution.rs +++ b/cli/npm/resolution.rs @@ -237,6 +237,10 @@ impl NpmResolution { Ok(nv) } + pub fn all_packages(&self) -> Vec { + self.snapshot.read().all_packages() + } + pub fn all_packages_partitioned(&self) -> NpmPackagesPartitioned { self.snapshot.read().all_packages_partitioned() } diff --git a/cli/npm/resolvers/common.rs b/cli/npm/resolvers/common.rs index ccba00d435..fc040a7ccb 100644 --- a/cli/npm/resolvers/common.rs +++ b/cli/npm/resolvers/common.rs @@ -12,6 +12,7 @@ use deno_core::futures; use deno_core::url::Url; use deno_npm::NpmPackageId; use deno_npm::NpmResolutionPackage; +use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_node::NodePermissions; use deno_runtime::deno_node::NodeResolutionMode; @@ -90,6 +91,7 @@ pub async fn cache_packages( } pub fn ensure_registry_read_permission( + fs: &Arc, permissions: &dyn NodePermissions, registry_path: &Path, path: &Path, @@ -101,8 +103,8 @@ pub fn ensure_registry_read_permission( .all(|c| !matches!(c, std::path::Component::ParentDir)) { // todo(dsherret): cache this? - if let Ok(registry_path) = std::fs::canonicalize(registry_path) { - match std::fs::canonicalize(path) { + if let Ok(registry_path) = fs.realpath_sync(registry_path) { + match fs.realpath_sync(path) { Ok(path) if path.starts_with(registry_path) => { return Ok(()); } diff --git a/cli/npm/resolvers/global.rs b/cli/npm/resolvers/global.rs index 79df161875..fe8764b0cf 100644 --- a/cli/npm/resolvers/global.rs +++ b/cli/npm/resolvers/global.rs @@ -14,6 +14,7 @@ use deno_npm::resolution::PackageNotFoundFromReferrerError; use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageId; use deno_npm::NpmResolutionPackage; +use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_node::NodePermissions; use deno_runtime::deno_node::NodeResolutionMode; @@ -28,6 +29,7 @@ use super::common::NpmPackageFsResolver; /// Resolves packages from the global npm cache. #[derive(Debug)] pub struct GlobalNpmPackageResolver { + fs: Arc, cache: Arc, resolution: Arc, registry_url: Url, @@ -35,11 +37,13 @@ pub struct GlobalNpmPackageResolver { impl GlobalNpmPackageResolver { pub fn new( + fs: Arc, cache: Arc, registry_url: Url, resolution: Arc, ) -> Self { Self { + fs, cache, resolution, registry_url, @@ -130,7 +134,7 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver { path: &Path, ) -> Result<(), AnyError> { let registry_path = self.cache.registry_folder(&self.registry_url); - ensure_registry_read_permission(permissions, ®istry_path, path) + ensure_registry_read_permission(&self.fs, permissions, ®istry_path, path) } } diff --git a/cli/npm/resolvers/local.rs b/cli/npm/resolvers/local.rs index 038d9eea1f..cd1dc36715 100644 --- a/cli/npm/resolvers/local.rs +++ b/cli/npm/resolvers/local.rs @@ -154,7 +154,7 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver { loop { current_folder = get_next_node_modules_ancestor(current_folder); let sub_dir = join_package_name(current_folder, name); - if sub_dir.is_dir() { + if self.fs.is_dir(&sub_dir) { // if doing types resolution, only resolve the package if it specifies a types property if mode.is_types() && !name.starts_with("@types/") { let package_json = PackageJson::load_skip_read_permission( @@ -173,7 +173,7 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver { if mode.is_types() && !name.starts_with("@types/") { let sub_dir = join_package_name(current_folder, &types_package_name(name)); - if sub_dir.is_dir() { + if self.fs.is_dir(&sub_dir) { return Ok(sub_dir); } } @@ -214,6 +214,7 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver { path: &Path, ) -> Result<(), AnyError> { ensure_registry_read_permission( + &self.fs, permissions, &self.root_node_modules_path, path, diff --git a/cli/npm/resolvers/mod.rs b/cli/npm/resolvers/mod.rs index 86d3840f31..f54e509f0e 100644 --- a/cli/npm/resolvers/mod.rs +++ b/cli/npm/resolvers/mod.rs @@ -18,7 +18,7 @@ use deno_npm::resolution::NpmResolutionSnapshot; use deno_npm::resolution::PackageReqNotFoundError; use deno_npm::resolution::SerializedNpmResolutionSnapshot; use deno_npm::NpmPackageId; -use deno_runtime::deno_fs; +use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_node::NodePermissions; use deno_runtime::deno_node::NodeResolutionMode; use deno_runtime::deno_node::NpmResolver; @@ -32,7 +32,7 @@ use serde::Deserialize; use serde::Serialize; use crate::args::Lockfile; -use crate::util::fs::canonicalize_path_maybe_not_exists; +use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs; use crate::util::progress_bar::ProgressBar; use self::common::NpmPackageFsResolver; @@ -49,6 +49,7 @@ pub struct NpmProcessState { /// Brings together the npm resolution with the file system. pub struct CliNpmResolver { + fs: Arc, fs_resolver: Arc, resolution: Arc, maybe_lockfile: Option>>, @@ -57,6 +58,7 @@ pub struct CliNpmResolver { impl std::fmt::Debug for CliNpmResolver { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("NpmPackageResolver") + .field("fs", &"") .field("fs_resolver", &"") .field("resolution", &"") .field("maybe_lockfile", &"") @@ -66,11 +68,13 @@ impl std::fmt::Debug for CliNpmResolver { impl CliNpmResolver { pub fn new( + fs: Arc, resolution: Arc, fs_resolver: Arc, maybe_lockfile: Option>>, ) -> Self { Self { + fs, fs_resolver, resolution, maybe_lockfile, @@ -81,6 +85,10 @@ impl CliNpmResolver { self.fs_resolver.root_dir_url() } + pub fn node_modules_path(&self) -> Option { + self.fs_resolver.node_modules_path() + } + pub fn resolve_pkg_id_from_pkg_req( &self, req: &NpmPackageReq, @@ -88,12 +96,17 @@ impl CliNpmResolver { self.resolution.resolve_pkg_id_from_pkg_req(req) } - fn resolve_pkg_folder_from_deno_module_at_pkg_id( + pub fn resolve_pkg_folder_from_pkg_id( &self, pkg_id: &NpmPackageId, ) -> Result { let path = self.fs_resolver.package_folder(pkg_id)?; - let path = canonicalize_path_maybe_not_exists(&path)?; + let path = canonicalize_path_maybe_not_exists_with_fs(&path, |path| { + self + .fs + .realpath_sync(path) + .map_err(|err| err.into_io_error()) + })?; log::debug!( "Resolved package folder of {} to {}", pkg_id.as_serialized(), @@ -237,7 +250,7 @@ impl NpmResolver for CliNpmResolver { pkg_nv: &NpmPackageNv, ) -> Result { let pkg_id = self.resolution.resolve_pkg_id_from_deno_module(pkg_nv)?; - self.resolve_pkg_folder_from_deno_module_at_pkg_id(&pkg_id) + self.resolve_pkg_folder_from_pkg_id(&pkg_id) } fn resolve_pkg_id_from_pkg_req( @@ -270,7 +283,7 @@ impl NpmResolver for CliNpmResolver { } pub fn create_npm_fs_resolver( - fs: Arc, + fs: Arc, cache: Arc, progress_bar: &ProgressBar, registry_url: Url, @@ -287,6 +300,7 @@ pub fn create_npm_fs_resolver( resolution, )), None => Arc::new(GlobalNpmPackageResolver::new( + fs, cache, registry_url, resolution, diff --git a/cli/resolver.rs b/cli/resolver.rs index 113a06fe79..7f49116f46 100644 --- a/cli/resolver.rs +++ b/cli/resolver.rs @@ -20,16 +20,86 @@ use std::sync::Arc; use crate::args::package_json::PackageJsonDeps; use crate::args::JsxImportSourceConfig; +use crate::args::PackageJsonDepsProvider; use crate::npm::CliNpmRegistryApi; use crate::npm::NpmResolution; use crate::npm::PackageJsonDepsInstaller; use crate::util::sync::AtomicFlag; +/// Result of checking if a specifier is mapped via +/// an import map or package.json. +pub enum MappedResolution { + None, + PackageJson(ModuleSpecifier), + ImportMap(ModuleSpecifier), +} + +impl MappedResolution { + pub fn into_specifier(self) -> Option { + match self { + MappedResolution::None => Option::None, + MappedResolution::PackageJson(specifier) => Some(specifier), + MappedResolution::ImportMap(specifier) => Some(specifier), + } + } +} + +/// Resolver for specifiers that could be mapped via an +/// import map or package.json. +#[derive(Debug)] +pub struct MappedSpecifierResolver { + maybe_import_map: Option>, + package_json_deps_provider: Arc, +} + +impl MappedSpecifierResolver { + pub fn new( + maybe_import_map: Option>, + package_json_deps_provider: Arc, + ) -> Self { + Self { + maybe_import_map, + package_json_deps_provider, + } + } + + pub fn resolve( + &self, + specifier: &str, + referrer: &ModuleSpecifier, + ) -> Result { + // attempt to resolve with the import map first + let maybe_import_map_err = match self + .maybe_import_map + .as_ref() + .map(|import_map| import_map.resolve(specifier, referrer)) + { + Some(Ok(value)) => return Ok(MappedResolution::ImportMap(value)), + Some(Err(err)) => Some(err), + None => None, + }; + + // then with package.json + if let Some(deps) = self.package_json_deps_provider.deps() { + if let Some(specifier) = resolve_package_json_dep(specifier, deps)? { + return Ok(MappedResolution::PackageJson(specifier)); + } + } + + // otherwise, surface the import map error or try resolving when has no import map + if let Some(err) = maybe_import_map_err { + Err(err.into()) + } else { + Ok(MappedResolution::None) + } + } +} + /// A resolver that takes care of resolution, taking into account loaded /// import map, JSX settings. #[derive(Debug)] pub struct CliGraphResolver { - maybe_import_map: Option>, + mapped_specifier_resolver: MappedSpecifierResolver, maybe_default_jsx_import_source: Option, maybe_jsx_import_source_module: Option, no_npm: bool, @@ -51,7 +121,10 @@ impl Default for CliGraphResolver { None, )); Self { - maybe_import_map: Default::default(), + mapped_specifier_resolver: MappedSpecifierResolver { + maybe_import_map: Default::default(), + package_json_deps_provider: Default::default(), + }, maybe_default_jsx_import_source: Default::default(), maybe_jsx_import_source_module: Default::default(), no_npm: false, @@ -71,10 +144,14 @@ impl CliGraphResolver { no_npm: bool, npm_registry_api: Arc, npm_resolution: Arc, + package_json_deps_provider: Arc, package_json_deps_installer: Arc, ) -> Self { Self { - maybe_import_map, + mapped_specifier_resolver: MappedSpecifierResolver { + maybe_import_map, + package_json_deps_provider, + }, maybe_default_jsx_import_source: maybe_jsx_import_source_config .as_ref() .and_then(|c| c.default_specifier.clone()), @@ -135,31 +212,20 @@ impl Resolver for CliGraphResolver { specifier: &str, referrer: &ModuleSpecifier, ) -> Result { - // attempt to resolve with the import map first - let maybe_import_map_err = match self - .maybe_import_map - .as_ref() - .map(|import_map| import_map.resolve(specifier, referrer)) + use MappedResolution::*; + match self + .mapped_specifier_resolver + .resolve(specifier, referrer)? { - Some(Ok(value)) => return Ok(value), - Some(Err(err)) => Some(err), - None => None, - }; - - // then with package.json - if let Some(deps) = self.package_json_deps_installer.package_deps().as_ref() - { - if let Some(specifier) = resolve_package_json_dep(specifier, deps)? { + ImportMap(specifier) => Ok(specifier), + PackageJson(specifier) => { + // found a specifier in the package.json, so mark that + // we need to do an "npm install" later self.found_package_json_dep_flag.raise(); - return Ok(specifier); + Ok(specifier) } - } - - // otherwise, surface the import map error or try resolving when has no import map - if let Some(err) = maybe_import_map_err { - Err(err.into()) - } else { - deno_graph::resolve_import(specifier, referrer).map_err(|err| err.into()) + None => deno_graph::resolve_import(specifier, referrer) + .map_err(|err| err.into()), } } } diff --git a/cli/standalone/binary.rs b/cli/standalone/binary.rs index 51d8db79e1..9ccb39e544 100644 --- a/cli/standalone/binary.rs +++ b/cli/standalone/binary.rs @@ -1,10 +1,13 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use std::collections::BTreeMap; +use std::env::current_exe; use std::io::Read; use std::io::Seek; use std::io::SeekFrom; use std::io::Write; use std::path::Path; +use std::path::PathBuf; use deno_ast::ModuleSpecifier; use deno_core::anyhow::Context; @@ -14,22 +17,112 @@ use deno_core::futures::AsyncReadExt; use deno_core::futures::AsyncSeekExt; use deno_core::serde_json; use deno_core::url::Url; +use deno_npm::registry::PackageDepNpmSchemeValueParseError; +use deno_npm::resolution::SerializedNpmResolutionSnapshot; use deno_runtime::permissions::PermissionsOptions; +use deno_semver::npm::NpmPackageReq; +use deno_semver::npm::NpmVersionReqSpecifierParseError; use log::Level; use serde::Deserialize; use serde::Serialize; +use crate::args::package_json::PackageJsonDepValueParseError; +use crate::args::package_json::PackageJsonDeps; use crate::args::CaData; use crate::args::CliOptions; use crate::args::CompileFlags; +use crate::args::PackageJsonDepsProvider; use crate::cache::DenoDir; use crate::file_fetcher::FileFetcher; use crate::http_util::HttpClient; +use crate::npm::CliNpmRegistryApi; +use crate::npm::CliNpmResolver; +use crate::npm::NpmCache; +use crate::npm::NpmResolution; use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; +use super::virtual_fs::FileBackedVfs; +use super::virtual_fs::VfsBuilder; +use super::virtual_fs::VfsRoot; +use super::virtual_fs::VirtualDirectory; + const MAGIC_TRAILER: &[u8; 8] = b"d3n0l4nd"; +#[derive(Serialize, Deserialize)] +enum SerializablePackageJsonDepValueParseError { + SchemeValue(String), + Specifier(String), + Unsupported { scheme: String }, +} + +impl SerializablePackageJsonDepValueParseError { + pub fn from_err(err: PackageJsonDepValueParseError) -> Self { + match err { + PackageJsonDepValueParseError::SchemeValue(err) => { + Self::SchemeValue(err.value) + } + PackageJsonDepValueParseError::Specifier(err) => { + Self::Specifier(err.source.to_string()) + } + PackageJsonDepValueParseError::Unsupported { scheme } => { + Self::Unsupported { scheme } + } + } + } + + pub fn into_err(self) -> PackageJsonDepValueParseError { + match self { + SerializablePackageJsonDepValueParseError::SchemeValue(value) => { + PackageJsonDepValueParseError::SchemeValue( + PackageDepNpmSchemeValueParseError { value }, + ) + } + SerializablePackageJsonDepValueParseError::Specifier(source) => { + PackageJsonDepValueParseError::Specifier( + NpmVersionReqSpecifierParseError { + source: monch::ParseErrorFailureError::new(source), + }, + ) + } + SerializablePackageJsonDepValueParseError::Unsupported { scheme } => { + PackageJsonDepValueParseError::Unsupported { scheme } + } + } + } +} + +#[derive(Serialize, Deserialize)] +pub struct SerializablePackageJsonDeps( + BTreeMap< + String, + Result, + >, +); + +impl SerializablePackageJsonDeps { + pub fn from_deps(deps: PackageJsonDeps) -> Self { + Self( + deps + .into_iter() + .map(|(name, req)| { + let res = + req.map_err(SerializablePackageJsonDepValueParseError::from_err); + (name, res) + }) + .collect(), + ) + } + + pub fn into_deps(self) -> PackageJsonDeps { + self + .0 + .into_iter() + .map(|(name, res)| (name, res.map_err(|err| err.into_err()))) + .collect() + } +} + #[derive(Deserialize, Serialize)] pub struct Metadata { pub argv: Vec, @@ -44,27 +137,74 @@ pub struct Metadata { pub unsafely_ignore_certificate_errors: Option>, pub maybe_import_map: Option<(Url, String)>, pub entrypoint: ModuleSpecifier, + /// Whether this uses a node_modules directory (true) or the global cache (false). + pub node_modules_dir: bool, + pub npm_snapshot: Option, + pub package_json_deps: Option, } -pub fn write_binary_bytes( +pub fn load_npm_vfs(root_dir_path: PathBuf) -> Result { + let file_path = current_exe().unwrap(); + let mut file = std::fs::File::open(file_path)?; + file.seek(SeekFrom::End(-(TRAILER_SIZE as i64)))?; + let mut trailer = [0; TRAILER_SIZE]; + file.read_exact(&mut trailer)?; + let trailer = Trailer::parse(&trailer)?.unwrap(); + file.seek(SeekFrom::Start(trailer.npm_vfs_pos))?; + let mut vfs_data = vec![0; trailer.npm_vfs_len() as usize]; + file.read_exact(&mut vfs_data)?; + let mut dir: VirtualDirectory = serde_json::from_slice(&vfs_data)?; + + // align the name of the directory with the root dir + dir.name = root_dir_path + .file_name() + .unwrap() + .to_string_lossy() + .to_string(); + + let fs_root = VfsRoot { + dir, + root_path: root_dir_path, + start_file_offset: trailer.npm_files_pos, + }; + Ok(FileBackedVfs::new(file, fs_root)) +} + +fn write_binary_bytes( writer: &mut impl Write, original_bin: Vec, metadata: &Metadata, eszip: eszip::EszipV2, + npm_vfs: Option<&VirtualDirectory>, + npm_files: &Vec>, ) -> Result<(), AnyError> { let metadata = serde_json::to_string(metadata)?.as_bytes().to_vec(); + let npm_vfs = serde_json::to_string(&npm_vfs)?.as_bytes().to_vec(); let eszip_archive = eszip.into_bytes(); - let eszip_pos = original_bin.len(); - let metadata_pos = eszip_pos + eszip_archive.len(); - let mut trailer = MAGIC_TRAILER.to_vec(); - trailer.write_all(&eszip_pos.to_be_bytes())?; - trailer.write_all(&metadata_pos.to_be_bytes())?; - writer.write_all(&original_bin)?; writer.write_all(&eszip_archive)?; writer.write_all(&metadata)?; - writer.write_all(&trailer)?; + writer.write_all(&npm_vfs)?; + for file in npm_files { + writer.write_all(file)?; + } + + // write the trailer, which includes the positions + // of the data blocks in the file + writer.write_all(&{ + let eszip_pos = original_bin.len() as u64; + let metadata_pos = eszip_pos + (eszip_archive.len() as u64); + let npm_vfs_pos = metadata_pos + (metadata.len() as u64); + let npm_files_pos = npm_vfs_pos + (npm_vfs.len() as u64); + Trailer { + eszip_pos, + metadata_pos, + npm_vfs_pos, + npm_files_pos, + } + .as_bytes() + })?; Ok(()) } @@ -73,12 +213,15 @@ pub fn is_standalone_binary(exe_path: &Path) -> bool { let Ok(mut output_file) = std::fs::File::open(exe_path) else { return false; }; - if output_file.seek(SeekFrom::End(-24)).is_err() { + if output_file + .seek(SeekFrom::End(-(TRAILER_SIZE as i64))) + .is_err() + { // This seek may fail because the file is too small to possibly be // `deno compile` output. return false; } - let mut trailer = [0; 24]; + let mut trailer = [0; TRAILER_SIZE]; if output_file.read_exact(&mut trailer).is_err() { return false; }; @@ -88,13 +231,9 @@ pub fn is_standalone_binary(exe_path: &Path) -> bool { /// This function will try to run this binary as a standalone binary /// produced by `deno compile`. It determines if this is a standalone -/// binary by checking for the magic trailer string `d3n0l4nd` at EOF-24 (8 bytes * 3). -/// The magic trailer is followed by: -/// - a u64 pointer to the JS bundle embedded in the binary -/// - a u64 pointer to JSON metadata (serialized flags) embedded in the binary -/// These are dereferenced, and the bundle is executed under the configuration -/// specified by the metadata. If no magic trailer is present, this function -/// exits with `Ok(None)`. +/// binary by skipping over the trailer width at the end of the file, +/// then checking for the magic trailer string `d3n0l4nd`. If found, +/// the bundle is executed. If not, this function exits with `Ok(None)`. pub async fn extract_standalone( exe_path: &Path, cli_args: Vec, @@ -104,21 +243,17 @@ pub async fn extract_standalone( let mut bufreader = deno_core::futures::io::BufReader::new(AllowStdIo::new(file)); - let trailer_pos = bufreader.seek(SeekFrom::End(-24)).await?; - let mut trailer = [0; 24]; + let _trailer_pos = bufreader + .seek(SeekFrom::End(-(TRAILER_SIZE as i64))) + .await?; + let mut trailer = [0; TRAILER_SIZE]; bufreader.read_exact(&mut trailer).await?; - let (magic_trailer, rest) = trailer.split_at(8); - if magic_trailer != MAGIC_TRAILER { - return Ok(None); - } + let trailer = match Trailer::parse(&trailer)? { + None => return Ok(None), + Some(trailer) => trailer, + }; - let (eszip_archive_pos, rest) = rest.split_at(8); - let metadata_pos = rest; - let eszip_archive_pos = u64_from_bytes(eszip_archive_pos)?; - let metadata_pos = u64_from_bytes(metadata_pos)?; - let metadata_len = trailer_pos - metadata_pos; - - bufreader.seek(SeekFrom::Start(eszip_archive_pos)).await?; + bufreader.seek(SeekFrom::Start(trailer.eszip_pos)).await?; let (eszip, loader) = eszip::EszipV2::parse(bufreader) .await @@ -126,12 +261,14 @@ pub async fn extract_standalone( let mut bufreader = loader.await.context("Failed to parse eszip archive")?; - bufreader.seek(SeekFrom::Start(metadata_pos)).await?; + bufreader + .seek(SeekFrom::Start(trailer.metadata_pos)) + .await?; let mut metadata = String::new(); bufreader - .take(metadata_len) + .take(trailer.metadata_len()) .read_to_string(&mut metadata) .await .context("Failed to read metadata from the current executable")?; @@ -142,6 +279,57 @@ pub async fn extract_standalone( Ok(Some((metadata, eszip))) } +const TRAILER_SIZE: usize = std::mem::size_of::() + 8; // 8 bytes for the magic trailer string + +struct Trailer { + eszip_pos: u64, + metadata_pos: u64, + npm_vfs_pos: u64, + npm_files_pos: u64, +} + +impl Trailer { + pub fn parse(trailer: &[u8]) -> Result, AnyError> { + let (magic_trailer, rest) = trailer.split_at(8); + if magic_trailer != MAGIC_TRAILER { + return Ok(None); + } + + let (eszip_archive_pos, rest) = rest.split_at(8); + let (metadata_pos, rest) = rest.split_at(8); + let (npm_vfs_pos, npm_files_pos) = rest.split_at(8); + let eszip_archive_pos = u64_from_bytes(eszip_archive_pos)?; + let metadata_pos = u64_from_bytes(metadata_pos)?; + let npm_vfs_pos = u64_from_bytes(npm_vfs_pos)?; + let npm_files_pos = u64_from_bytes(npm_files_pos)?; + Ok(Some(Trailer { + eszip_pos: eszip_archive_pos, + metadata_pos, + npm_vfs_pos, + npm_files_pos, + })) + } + + pub fn metadata_len(&self) -> u64 { + self.npm_vfs_pos - self.metadata_pos + } + + pub fn npm_vfs_len(&self) -> u64 { + self.npm_files_pos - self.npm_vfs_pos + } + + pub fn as_bytes(&self) -> Vec { + let mut trailer = MAGIC_TRAILER.to_vec(); + trailer.write_all(&self.eszip_pos.to_be_bytes()).unwrap(); + trailer.write_all(&self.metadata_pos.to_be_bytes()).unwrap(); + trailer.write_all(&self.npm_vfs_pos.to_be_bytes()).unwrap(); + trailer + .write_all(&self.npm_files_pos.to_be_bytes()) + .unwrap(); + trailer + } +} + fn u64_from_bytes(arr: &[u8]) -> Result { let fixed_arr: &[u8; 8] = arr .try_into() @@ -153,18 +341,34 @@ pub struct DenoCompileBinaryWriter<'a> { file_fetcher: &'a FileFetcher, client: &'a HttpClient, deno_dir: &'a DenoDir, + npm_api: &'a CliNpmRegistryApi, + npm_cache: &'a NpmCache, + npm_resolver: &'a CliNpmResolver, + resolution: &'a NpmResolution, + package_json_deps_provider: &'a PackageJsonDepsProvider, } impl<'a> DenoCompileBinaryWriter<'a> { + #[allow(clippy::too_many_arguments)] pub fn new( file_fetcher: &'a FileFetcher, client: &'a HttpClient, deno_dir: &'a DenoDir, + npm_api: &'a CliNpmRegistryApi, + npm_cache: &'a NpmCache, + npm_resolver: &'a CliNpmResolver, + resolution: &'a NpmResolution, + package_json_deps_provider: &'a PackageJsonDepsProvider, ) -> Self { Self { file_fetcher, client, deno_dir, + npm_api, + npm_cache, + npm_resolver, + resolution, + package_json_deps_provider, } } @@ -284,6 +488,14 @@ impl<'a> DenoCompileBinaryWriter<'a> { .resolve_import_map(self.file_fetcher) .await? .map(|import_map| (import_map.base_url().clone(), import_map.to_json())); + let (npm_snapshot, npm_vfs, npm_files) = if self.resolution.has_packages() { + let (root_dir, files) = self.build_vfs()?.into_dir_and_files(); + let snapshot = self.resolution.serialized_snapshot(); + (Some(snapshot), Some(root_dir), files) + } else { + (None, None, Vec::new()) + }; + let metadata = Metadata { argv: compile_flags.args.clone(), unstable: cli_options.unstable(), @@ -299,8 +511,44 @@ impl<'a> DenoCompileBinaryWriter<'a> { ca_data, entrypoint: entrypoint.clone(), maybe_import_map, + node_modules_dir: self.npm_resolver.node_modules_path().is_some(), + npm_snapshot, + package_json_deps: self + .package_json_deps_provider + .deps() + .map(|deps| SerializablePackageJsonDeps::from_deps(deps.clone())), }; - write_binary_bytes(writer, original_bin, &metadata, eszip) + write_binary_bytes( + writer, + original_bin, + &metadata, + eszip, + npm_vfs.as_ref(), + &npm_files, + ) + } + + fn build_vfs(&self) -> Result { + if let Some(node_modules_path) = self.npm_resolver.node_modules_path() { + let mut builder = VfsBuilder::new(node_modules_path.clone()); + builder.add_dir_recursive(&node_modules_path)?; + Ok(builder) + } else { + // DO NOT include the user's registry url as it may contain credentials, + // but also don't make this dependent on the registry url + let registry_url = self.npm_api.base_url(); + let root_path = self.npm_cache.registry_folder(registry_url); + let mut builder = VfsBuilder::new(root_path); + for package in self.resolution.all_packages() { + let folder = self + .npm_resolver + .resolve_pkg_folder_from_pkg_id(&package.pkg_id)?; + builder.add_dir_recursive(&folder)?; + } + // overwrite the root directory's name to obscure the user's registry url + builder.set_root_dir_name("node_modules".to_string()); + Ok(builder) + } } } diff --git a/cli/standalone/file_system.rs b/cli/standalone/file_system.rs new file mode 100644 index 0000000000..f0891f71d5 --- /dev/null +++ b/cli/standalone/file_system.rs @@ -0,0 +1,337 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +use std::path::Path; +use std::path::PathBuf; +use std::rc::Rc; +use std::sync::Arc; + +use deno_runtime::deno_fs::FileSystem; +use deno_runtime::deno_fs::FsDirEntry; +use deno_runtime::deno_fs::FsFileType; +use deno_runtime::deno_fs::OpenOptions; +use deno_runtime::deno_fs::RealFs; +use deno_runtime::deno_io::fs::File; +use deno_runtime::deno_io::fs::FsError; +use deno_runtime::deno_io::fs::FsResult; +use deno_runtime::deno_io::fs::FsStat; + +use super::virtual_fs::FileBackedVfs; + +#[derive(Debug, Clone)] +pub struct DenoCompileFileSystem(Arc); + +impl DenoCompileFileSystem { + pub fn new(vfs: FileBackedVfs) -> Self { + Self(Arc::new(vfs)) + } + + fn error_if_in_vfs(&self, path: &Path) -> FsResult<()> { + if self.0.is_path_within(path) { + Err(FsError::NotSupported) + } else { + Ok(()) + } + } + + fn copy_to_real_path(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> { + let old_file = self.0.file_entry(oldpath)?; + let old_file_bytes = self.0.read_file_all(old_file)?; + RealFs.write_file_sync( + newpath, + OpenOptions { + read: false, + write: true, + create: true, + truncate: true, + append: false, + create_new: false, + mode: None, + }, + &old_file_bytes, + ) + } +} + +#[async_trait::async_trait(?Send)] +impl FileSystem for DenoCompileFileSystem { + fn cwd(&self) -> FsResult { + RealFs.cwd() + } + + fn tmp_dir(&self) -> FsResult { + RealFs.tmp_dir() + } + + fn chdir(&self, path: &Path) -> FsResult<()> { + self.error_if_in_vfs(path)?; + RealFs.chdir(path) + } + + fn umask(&self, mask: Option) -> FsResult { + RealFs.umask(mask) + } + + fn open_sync( + &self, + path: &Path, + options: OpenOptions, + ) -> FsResult> { + if self.0.is_path_within(path) { + Ok(self.0.open_file(path)?) + } else { + RealFs.open_sync(path, options) + } + } + async fn open_async( + &self, + path: PathBuf, + options: OpenOptions, + ) -> FsResult> { + if self.0.is_path_within(&path) { + Ok(self.0.open_file(&path)?) + } else { + RealFs.open_async(path, options).await + } + } + + fn mkdir_sync( + &self, + path: &Path, + recursive: bool, + mode: u32, + ) -> FsResult<()> { + self.error_if_in_vfs(path)?; + RealFs.mkdir_sync(path, recursive, mode) + } + async fn mkdir_async( + &self, + path: PathBuf, + recursive: bool, + mode: u32, + ) -> FsResult<()> { + self.error_if_in_vfs(&path)?; + RealFs.mkdir_async(path, recursive, mode).await + } + + fn chmod_sync(&self, path: &Path, mode: u32) -> FsResult<()> { + self.error_if_in_vfs(path)?; + RealFs.chmod_sync(path, mode) + } + async fn chmod_async(&self, path: PathBuf, mode: u32) -> FsResult<()> { + self.error_if_in_vfs(&path)?; + RealFs.chmod_async(path, mode).await + } + + fn chown_sync( + &self, + path: &Path, + uid: Option, + gid: Option, + ) -> FsResult<()> { + self.error_if_in_vfs(path)?; + RealFs.chown_sync(path, uid, gid) + } + async fn chown_async( + &self, + path: PathBuf, + uid: Option, + gid: Option, + ) -> FsResult<()> { + self.error_if_in_vfs(&path)?; + RealFs.chown_async(path, uid, gid).await + } + + fn remove_sync(&self, path: &Path, recursive: bool) -> FsResult<()> { + self.error_if_in_vfs(path)?; + RealFs.remove_sync(path, recursive) + } + async fn remove_async(&self, path: PathBuf, recursive: bool) -> FsResult<()> { + self.error_if_in_vfs(&path)?; + RealFs.remove_async(path, recursive).await + } + + fn copy_file_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> { + self.error_if_in_vfs(newpath)?; + if self.0.is_path_within(oldpath) { + self.copy_to_real_path(oldpath, newpath) + } else { + RealFs.copy_file_sync(oldpath, newpath) + } + } + async fn copy_file_async( + &self, + oldpath: PathBuf, + newpath: PathBuf, + ) -> FsResult<()> { + self.error_if_in_vfs(&newpath)?; + if self.0.is_path_within(&oldpath) { + let fs = self.clone(); + tokio::task::spawn_blocking(move || { + fs.copy_to_real_path(&oldpath, &newpath) + }) + .await? + } else { + RealFs.copy_file_async(oldpath, newpath).await + } + } + + fn stat_sync(&self, path: &Path) -> FsResult { + if self.0.is_path_within(path) { + Ok(self.0.stat(path)?) + } else { + RealFs.stat_sync(path) + } + } + async fn stat_async(&self, path: PathBuf) -> FsResult { + if self.0.is_path_within(&path) { + Ok(self.0.stat(&path)?) + } else { + RealFs.stat_async(path).await + } + } + + fn lstat_sync(&self, path: &Path) -> FsResult { + if self.0.is_path_within(path) { + Ok(self.0.lstat(path)?) + } else { + RealFs.lstat_sync(path) + } + } + async fn lstat_async(&self, path: PathBuf) -> FsResult { + if self.0.is_path_within(&path) { + Ok(self.0.lstat(&path)?) + } else { + RealFs.lstat_async(path).await + } + } + + fn realpath_sync(&self, path: &Path) -> FsResult { + if self.0.is_path_within(path) { + Ok(self.0.canonicalize(path)?) + } else { + RealFs.realpath_sync(path) + } + } + async fn realpath_async(&self, path: PathBuf) -> FsResult { + if self.0.is_path_within(&path) { + Ok(self.0.canonicalize(&path)?) + } else { + RealFs.realpath_async(path).await + } + } + + fn read_dir_sync(&self, path: &Path) -> FsResult> { + if self.0.is_path_within(path) { + Ok(self.0.read_dir(path)?) + } else { + RealFs.read_dir_sync(path) + } + } + async fn read_dir_async(&self, path: PathBuf) -> FsResult> { + if self.0.is_path_within(&path) { + Ok(self.0.read_dir(&path)?) + } else { + RealFs.read_dir_async(path).await + } + } + + fn rename_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> { + self.error_if_in_vfs(oldpath)?; + self.error_if_in_vfs(newpath)?; + RealFs.rename_sync(oldpath, newpath) + } + async fn rename_async( + &self, + oldpath: PathBuf, + newpath: PathBuf, + ) -> FsResult<()> { + self.error_if_in_vfs(&oldpath)?; + self.error_if_in_vfs(&newpath)?; + RealFs.rename_async(oldpath, newpath).await + } + + fn link_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> { + self.error_if_in_vfs(oldpath)?; + self.error_if_in_vfs(newpath)?; + RealFs.link_sync(oldpath, newpath) + } + async fn link_async( + &self, + oldpath: PathBuf, + newpath: PathBuf, + ) -> FsResult<()> { + self.error_if_in_vfs(&oldpath)?; + self.error_if_in_vfs(&newpath)?; + RealFs.link_async(oldpath, newpath).await + } + + fn symlink_sync( + &self, + oldpath: &Path, + newpath: &Path, + file_type: Option, + ) -> FsResult<()> { + self.error_if_in_vfs(oldpath)?; + self.error_if_in_vfs(newpath)?; + RealFs.symlink_sync(oldpath, newpath, file_type) + } + async fn symlink_async( + &self, + oldpath: PathBuf, + newpath: PathBuf, + file_type: Option, + ) -> FsResult<()> { + self.error_if_in_vfs(&oldpath)?; + self.error_if_in_vfs(&newpath)?; + RealFs.symlink_async(oldpath, newpath, file_type).await + } + + fn read_link_sync(&self, path: &Path) -> FsResult { + if self.0.is_path_within(path) { + Ok(self.0.read_link(path)?) + } else { + RealFs.read_link_sync(path) + } + } + async fn read_link_async(&self, path: PathBuf) -> FsResult { + if self.0.is_path_within(&path) { + Ok(self.0.read_link(&path)?) + } else { + RealFs.read_link_async(path).await + } + } + + fn truncate_sync(&self, path: &Path, len: u64) -> FsResult<()> { + self.error_if_in_vfs(path)?; + RealFs.truncate_sync(path, len) + } + async fn truncate_async(&self, path: PathBuf, len: u64) -> FsResult<()> { + self.error_if_in_vfs(&path)?; + RealFs.truncate_async(path, len).await + } + + fn utime_sync( + &self, + path: &Path, + atime_secs: i64, + atime_nanos: u32, + mtime_secs: i64, + mtime_nanos: u32, + ) -> FsResult<()> { + self.error_if_in_vfs(path)?; + RealFs.utime_sync(path, atime_secs, atime_nanos, mtime_secs, mtime_nanos) + } + async fn utime_async( + &self, + path: PathBuf, + atime_secs: i64, + atime_nanos: u32, + mtime_secs: i64, + mtime_nanos: u32, + ) -> FsResult<()> { + self.error_if_in_vfs(&path)?; + RealFs + .utime_async(path, atime_secs, atime_nanos, mtime_secs, mtime_nanos) + .await + } +} diff --git a/cli/standalone/mod.rs b/cli/standalone/mod.rs index e00ab8ab25..db2743be86 100644 --- a/cli/standalone/mod.rs +++ b/cli/standalone/mod.rs @@ -1,17 +1,25 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. use crate::args::get_root_cert_store; +use crate::args::npm_pkg_req_ref_to_binary_command; use crate::args::CaData; use crate::args::CacheSetting; +use crate::args::PackageJsonDepsProvider; use crate::args::StorageKeyResolver; +use crate::cache::Caches; use crate::cache::DenoDir; +use crate::cache::NodeAnalysisCache; use crate::file_fetcher::get_source_from_data_url; use crate::http_util::HttpClient; +use crate::module_loader::CjsResolutionStore; +use crate::module_loader::NpmModuleLoader; +use crate::node::CliCjsEsmCodeAnalyzer; use crate::npm::create_npm_fs_resolver; use crate::npm::CliNpmRegistryApi; use crate::npm::CliNpmResolver; use crate::npm::NpmCache; use crate::npm::NpmResolution; +use crate::resolver::MappedSpecifierResolver; use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; use crate::util::v8::construct_v8_flags; @@ -19,7 +27,7 @@ use crate::worker::CliMainWorkerFactory; use crate::worker::CliMainWorkerOptions; use crate::worker::HasNodeSpecifierChecker; use crate::worker::ModuleLoaderFactory; -use crate::CliGraphResolver; +use deno_ast::MediaType; use deno_core::anyhow::Context; use deno_core::error::type_error; use deno_core::error::AnyError; @@ -29,31 +37,44 @@ use deno_core::ModuleLoader; use deno_core::ModuleSpecifier; use deno_core::ModuleType; use deno_core::ResolutionKind; -use deno_graph::source::Resolver; use deno_runtime::deno_fs; +use deno_runtime::deno_node; +use deno_runtime::deno_node::analyze::NodeCodeTranslator; use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_tls::rustls::RootCertStore; use deno_runtime::deno_tls::RootCertStoreProvider; use deno_runtime::deno_web::BlobStore; use deno_runtime::permissions::Permissions; use deno_runtime::permissions::PermissionsContainer; +use deno_semver::npm::NpmPackageReqReference; use import_map::parse_from_json; use std::pin::Pin; use std::rc::Rc; use std::sync::Arc; mod binary; +mod file_system; +mod virtual_fs; pub use binary::extract_standalone; pub use binary::is_standalone_binary; pub use binary::DenoCompileBinaryWriter; +use self::binary::load_npm_vfs; use self::binary::Metadata; +use self::file_system::DenoCompileFileSystem; + +struct SharedModuleLoaderState { + eszip: eszip::EszipV2, + mapped_specifier_resolver: MappedSpecifierResolver, + npm_module_loader: Arc, +} #[derive(Clone)] struct EmbeddedModuleLoader { - eszip: Arc, - maybe_import_map_resolver: Option>, + shared: Arc, + root_permissions: PermissionsContainer, + dynamic_permissions: PermissionsContainer, } impl ModuleLoader for EmbeddedModuleLoader { @@ -61,10 +82,10 @@ impl ModuleLoader for EmbeddedModuleLoader { &self, specifier: &str, referrer: &str, - _kind: ResolutionKind, + kind: ResolutionKind, ) -> Result { // Try to follow redirects when resolving. - let referrer = match self.eszip.get_module(referrer) { + let referrer = match self.shared.eszip.get_module(referrer) { Some(eszip::Module { ref specifier, .. }) => { ModuleSpecifier::parse(specifier)? } @@ -74,27 +95,93 @@ impl ModuleLoader for EmbeddedModuleLoader { } }; - self - .maybe_import_map_resolver + let permissions = if matches!(kind, ResolutionKind::DynamicImport) { + &self.dynamic_permissions + } else { + &self.root_permissions + }; + + if let Some(result) = self + .shared + .npm_module_loader + .resolve_if_in_npm_package(specifier, &referrer, permissions) + { + return result; + } + + let maybe_mapped = self + .shared + .mapped_specifier_resolver + .resolve(specifier, &referrer)? + .into_specifier(); + + // npm specifier + let specifier_text = maybe_mapped .as_ref() - .map(|r| r.resolve(specifier, &referrer)) - .unwrap_or_else(|| { - deno_core::resolve_import(specifier, referrer.as_str()) - .map_err(|err| err.into()) - }) + .map(|r| r.as_str()) + .unwrap_or(specifier); + if let Ok(reference) = NpmPackageReqReference::from_str(specifier_text) { + return self + .shared + .npm_module_loader + .resolve_req_reference(&reference, permissions); + } + + // Built-in Node modules + if let Some(module_name) = specifier_text.strip_prefix("node:") { + return deno_node::resolve_builtin_node_module(module_name); + } + + match maybe_mapped { + Some(resolved) => Ok(resolved), + None => deno_core::resolve_import(specifier, referrer.as_str()) + .map_err(|err| err.into()), + } } fn load( &self, module_specifier: &ModuleSpecifier, - _maybe_referrer: Option<&ModuleSpecifier>, - _is_dynamic: bool, + maybe_referrer: Option<&ModuleSpecifier>, + is_dynamic: bool, ) -> Pin> { let is_data_uri = get_source_from_data_url(module_specifier).ok(); + let permissions = if is_dynamic { + &self.dynamic_permissions + } else { + &self.root_permissions + }; + + if let Some(result) = + self.shared.npm_module_loader.load_sync_if_in_npm_package( + module_specifier, + maybe_referrer, + permissions, + ) + { + return match result { + Ok(code_source) => Box::pin(deno_core::futures::future::ready(Ok( + deno_core::ModuleSource::new_with_redirect( + match code_source.media_type { + MediaType::Json => ModuleType::Json, + _ => ModuleType::JavaScript, + }, + code_source.code, + module_specifier, + &code_source.found_url, + ), + ))), + Err(err) => Box::pin(deno_core::futures::future::ready(Err(err))), + }; + } + let module = self + .shared .eszip .get_module(module_specifier.as_str()) - .ok_or_else(|| type_error("Module not found")); + .ok_or_else(|| { + type_error(format!("Module not found: {}", module_specifier)) + }); // TODO(mmastrac): This clone can probably be removed in the future if ModuleSpecifier is no longer a full-fledged URL let module_specifier = module_specifier.clone(); @@ -128,24 +215,32 @@ impl ModuleLoader for EmbeddedModuleLoader { } struct StandaloneModuleLoaderFactory { - loader: EmbeddedModuleLoader, + shared: Arc, } impl ModuleLoaderFactory for StandaloneModuleLoaderFactory { fn create_for_main( &self, - _root_permissions: PermissionsContainer, - _dynamic_permissions: PermissionsContainer, + root_permissions: PermissionsContainer, + dynamic_permissions: PermissionsContainer, ) -> Rc { - Rc::new(self.loader.clone()) + Rc::new(EmbeddedModuleLoader { + shared: self.shared.clone(), + root_permissions, + dynamic_permissions, + }) } fn create_for_worker( &self, - _root_permissions: PermissionsContainer, - _dynamic_permissions: PermissionsContainer, + root_permissions: PermissionsContainer, + dynamic_permissions: PermissionsContainer, ) -> Rc { - Rc::new(self.loader.clone()) + Rc::new(EmbeddedModuleLoader { + shared: self.shared.clone(), + root_permissions, + dynamic_permissions, + }) } fn create_source_map_getter( @@ -183,6 +278,9 @@ pub async fn run( metadata: Metadata, ) -> Result<(), AnyError> { let main_module = &metadata.entrypoint; + let current_exe_path = std::env::current_exe().unwrap(); + let current_exe_name = + current_exe_path.file_name().unwrap().to_string_lossy(); let dir = DenoDir::new(None)?; let root_cert_store_provider = Arc::new(StandaloneRootCertStoreProvider { ca_stores: metadata.ca_stores, @@ -194,9 +292,14 @@ pub async fn run( Some(root_cert_store_provider.clone()), metadata.unsafely_ignore_certificate_errors.clone(), )); - let npm_registry_url = CliNpmRegistryApi::default_url().to_owned(); + // use a dummy npm registry url + let npm_registry_url = ModuleSpecifier::parse("https://localhost/").unwrap(); + let root_path = std::env::temp_dir() + .join(format!("deno-compile-{}", current_exe_name)) + .join("node_modules"); + let npm_cache = Arc::new(NpmCache::new( - dir.npm_folder_path(), + root_path.clone(), CacheSetting::Use, http_client.clone(), progress_bar.clone(), @@ -207,44 +310,92 @@ pub async fn run( http_client.clone(), progress_bar.clone(), )); - let fs = Arc::new(deno_fs::RealFs); - let npm_resolution = - Arc::new(NpmResolution::from_serialized(npm_api.clone(), None, None)); + let (fs, node_modules_path, snapshot) = if let Some(snapshot) = + metadata.npm_snapshot + { + let vfs_root_dir_path = if metadata.node_modules_dir { + root_path + } else { + npm_cache.registry_folder(&npm_registry_url) + }; + let vfs = + load_npm_vfs(vfs_root_dir_path).context("Failed to load npm vfs.")?; + let node_modules_path = if metadata.node_modules_dir { + Some(vfs.root().to_path_buf()) + } else { + None + }; + ( + Arc::new(DenoCompileFileSystem::new(vfs)) as Arc, + node_modules_path, + Some(snapshot.into_valid()?), + ) + } else { + ( + Arc::new(deno_fs::RealFs) as Arc, + None, + None, + ) + }; + let npm_resolution = Arc::new(NpmResolution::from_serialized( + npm_api.clone(), + snapshot, + None, + )); + let has_node_modules_dir = node_modules_path.is_some(); let npm_fs_resolver = create_npm_fs_resolver( fs.clone(), npm_cache, &progress_bar, npm_registry_url, npm_resolution.clone(), - None, + node_modules_path, ); let npm_resolver = Arc::new(CliNpmResolver::new( + fs.clone(), npm_resolution.clone(), npm_fs_resolver, None, )); let node_resolver = Arc::new(NodeResolver::new(fs.clone(), npm_resolver.clone())); + let cjs_resolutions = Arc::new(CjsResolutionStore::default()); + let cache_db = Caches::new(dir.clone()); + let node_analysis_cache = NodeAnalysisCache::new(cache_db.node_analysis_db()); + let cjs_esm_code_analyzer = CliCjsEsmCodeAnalyzer::new(node_analysis_cache); + let node_code_translator = Arc::new(NodeCodeTranslator::new( + cjs_esm_code_analyzer, + fs.clone(), + node_resolver.clone(), + npm_resolver.clone(), + )); + let package_json_deps_provider = Arc::new(PackageJsonDepsProvider::new( + metadata + .package_json_deps + .map(|serialized| serialized.into_deps()), + )); + let maybe_import_map = metadata.maybe_import_map.map(|(base, source)| { + Arc::new(parse_from_json(&base, &source).unwrap().import_map) + }); let module_loader_factory = StandaloneModuleLoaderFactory { - loader: EmbeddedModuleLoader { - eszip: Arc::new(eszip), - maybe_import_map_resolver: metadata.maybe_import_map.map( - |(base, source)| { - Arc::new(CliGraphResolver::new( - None, - Some(Arc::new( - parse_from_json(&base, &source).unwrap().import_map, - )), - false, - npm_api.clone(), - npm_resolution.clone(), - Default::default(), - )) - }, + shared: Arc::new(SharedModuleLoaderState { + eszip, + mapped_specifier_resolver: MappedSpecifierResolver::new( + maybe_import_map.clone(), + package_json_deps_provider.clone(), ), - }, + npm_module_loader: Arc::new(NpmModuleLoader::new( + cjs_resolutions, + node_code_translator, + fs.clone(), + node_resolver.clone(), + )), + }), }; + let permissions = PermissionsContainer::new(Permissions::from_options( + &metadata.permissions, + )?); let worker_factory = CliMainWorkerFactory::new( StorageKeyResolver::empty(), npm_resolver.clone(), @@ -260,14 +411,17 @@ pub async fn run( debug: false, coverage_dir: None, enable_testing_features: false, - has_node_modules_dir: false, + has_node_modules_dir, inspect_brk: false, inspect_wait: false, is_inspecting: false, - is_npm_main: false, + is_npm_main: main_module.scheme() == "npm", location: metadata.location, - // todo(dsherret): support a binary command being compiled - maybe_binary_npm_command_name: None, + maybe_binary_npm_command_name: NpmPackageReqReference::from_specifier( + main_module, + ) + .ok() + .map(|req_ref| npm_pkg_req_ref_to_binary_command(&req_ref)), origin_data_folder_path: None, seed: metadata.seed, unsafely_ignore_certificate_errors: metadata @@ -278,9 +432,6 @@ pub async fn run( v8_set_flags(construct_v8_flags(&metadata.v8_flags, vec![])); - let permissions = PermissionsContainer::new(Permissions::from_options( - &metadata.permissions, - )?); let mut worker = worker_factory .create_main_worker(main_module.clone(), permissions) .await?; diff --git a/cli/standalone/virtual_fs.rs b/cli/standalone/virtual_fs.rs new file mode 100644 index 0000000000..9c0601bcc1 --- /dev/null +++ b/cli/standalone/virtual_fs.rs @@ -0,0 +1,983 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +use std::borrow::Cow; +use std::collections::HashMap; +use std::collections::HashSet; +use std::fs::File; +use std::io::Read; +use std::io::Seek; +use std::io::SeekFrom; +use std::path::Path; +use std::path::PathBuf; +use std::rc::Rc; +use std::sync::Arc; + +use deno_core::anyhow::Context; +use deno_core::error::AnyError; +use deno_core::parking_lot::Mutex; +use deno_core::BufMutView; +use deno_core::BufView; +use deno_runtime::deno_fs::FsDirEntry; +use deno_runtime::deno_io; +use deno_runtime::deno_io::fs::FsError; +use deno_runtime::deno_io::fs::FsResult; +use deno_runtime::deno_io::fs::FsStat; +use serde::Deserialize; +use serde::Serialize; + +use crate::util; + +pub struct VfsBuilder { + root_path: PathBuf, + root_dir: VirtualDirectory, + files: Vec>, + current_offset: u64, + file_offsets: HashMap, +} + +impl VfsBuilder { + pub fn new(root_path: PathBuf) -> Self { + Self { + root_dir: VirtualDirectory { + name: root_path + .file_stem() + .unwrap() + .to_string_lossy() + .into_owned(), + entries: Vec::new(), + }, + root_path, + files: Vec::new(), + current_offset: 0, + file_offsets: Default::default(), + } + } + + pub fn set_root_dir_name(&mut self, name: String) { + self.root_dir.name = name; + } + + pub fn add_dir_recursive(&mut self, path: &Path) -> Result<(), AnyError> { + self.add_dir(path); + let read_dir = std::fs::read_dir(path) + .with_context(|| format!("Reading {}", path.display()))?; + + for entry in read_dir { + let entry = entry?; + let file_type = entry.file_type()?; + let path = entry.path(); + + if file_type.is_dir() { + self.add_dir_recursive(&path)?; + } else if file_type.is_file() { + let file_bytes = std::fs::read(&path) + .with_context(|| format!("Reading {}", path.display()))?; + self.add_file(&path, file_bytes); + } else if file_type.is_symlink() { + let target = std::fs::read_link(&path) + .with_context(|| format!("Reading symlink {}", path.display()))?; + self.add_symlink(&path, &target); + } + } + + Ok(()) + } + + pub fn add_dir(&mut self, path: &Path) -> &mut VirtualDirectory { + let path = path.strip_prefix(&self.root_path).unwrap(); + let mut current_dir = &mut self.root_dir; + + for component in path.components() { + let name = component.as_os_str().to_string_lossy(); + let index = match current_dir + .entries + .binary_search_by(|e| e.name().cmp(&name)) + { + Ok(index) => index, + Err(insert_index) => { + current_dir.entries.insert( + insert_index, + VfsEntry::Dir(VirtualDirectory { + name: name.to_string(), + entries: Vec::new(), + }), + ); + insert_index + } + }; + match &mut current_dir.entries[index] { + VfsEntry::Dir(dir) => { + current_dir = dir; + } + _ => unreachable!(), + }; + } + + current_dir + } + + pub fn add_file(&mut self, path: &Path, data: Vec) { + let checksum = util::checksum::gen(&[&data]); + let offset = if let Some(offset) = self.file_offsets.get(&checksum) { + // duplicate file, reuse an old offset + *offset + } else { + self.file_offsets.insert(checksum, self.current_offset); + self.current_offset + }; + + let dir = self.add_dir(path.parent().unwrap()); + let name = path.file_name().unwrap().to_string_lossy(); + let data_len = data.len(); + match dir.entries.binary_search_by(|e| e.name().cmp(&name)) { + Ok(_) => unreachable!(), + Err(insert_index) => { + dir.entries.insert( + insert_index, + VfsEntry::File(VirtualFile { + name: name.to_string(), + offset, + len: data.len() as u64, + }), + ); + } + } + + // new file, update the list of files + if self.current_offset == offset { + self.files.push(data); + self.current_offset += data_len as u64; + } + } + + pub fn add_symlink(&mut self, path: &Path, target: &Path) { + let dest = target.strip_prefix(&self.root_path).unwrap().to_path_buf(); + let dir = self.add_dir(path.parent().unwrap()); + let name = path.file_name().unwrap().to_string_lossy(); + match dir.entries.binary_search_by(|e| e.name().cmp(&name)) { + Ok(_) => unreachable!(), + Err(insert_index) => { + dir.entries.insert( + insert_index, + VfsEntry::Symlink(VirtualSymlink { + name: name.to_string(), + dest_parts: dest + .components() + .map(|c| c.as_os_str().to_string_lossy().to_string()) + .collect::>(), + }), + ); + } + } + } + + pub fn into_dir_and_files(self) -> (VirtualDirectory, Vec>) { + (self.root_dir, self.files) + } +} + +#[derive(Debug)] +enum VfsEntryRef<'a> { + Dir(&'a VirtualDirectory), + File(&'a VirtualFile), + Symlink(&'a VirtualSymlink), +} + +impl<'a> VfsEntryRef<'a> { + pub fn as_fs_stat(&self) -> FsStat { + match self { + VfsEntryRef::Dir(_) => FsStat { + is_directory: true, + is_file: false, + is_symlink: false, + atime: None, + birthtime: None, + mtime: None, + blksize: 0, + size: 0, + dev: 0, + ino: 0, + mode: 0, + nlink: 0, + uid: 0, + gid: 0, + rdev: 0, + blocks: 0, + }, + VfsEntryRef::File(file) => FsStat { + is_directory: false, + is_file: true, + is_symlink: false, + atime: None, + birthtime: None, + mtime: None, + blksize: 0, + size: file.len, + dev: 0, + ino: 0, + mode: 0, + nlink: 0, + uid: 0, + gid: 0, + rdev: 0, + blocks: 0, + }, + VfsEntryRef::Symlink(_) => FsStat { + is_directory: false, + is_file: false, + is_symlink: true, + atime: None, + birthtime: None, + mtime: None, + blksize: 0, + size: 0, + dev: 0, + ino: 0, + mode: 0, + nlink: 0, + uid: 0, + gid: 0, + rdev: 0, + blocks: 0, + }, + } + } +} + +// todo(dsherret): we should store this more efficiently in the binary +#[derive(Debug, Serialize, Deserialize)] +pub enum VfsEntry { + Dir(VirtualDirectory), + File(VirtualFile), + Symlink(VirtualSymlink), +} + +impl VfsEntry { + pub fn name(&self) -> &str { + match self { + VfsEntry::Dir(dir) => &dir.name, + VfsEntry::File(file) => &file.name, + VfsEntry::Symlink(symlink) => &symlink.name, + } + } + + fn as_ref(&self) -> VfsEntryRef { + match self { + VfsEntry::Dir(dir) => VfsEntryRef::Dir(dir), + VfsEntry::File(file) => VfsEntryRef::File(file), + VfsEntry::Symlink(symlink) => VfsEntryRef::Symlink(symlink), + } + } +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct VirtualDirectory { + pub name: String, + // should be sorted by name + pub entries: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct VirtualFile { + pub name: String, + pub offset: u64, + pub len: u64, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct VirtualSymlink { + pub name: String, + pub dest_parts: Vec, +} + +impl VirtualSymlink { + pub fn resolve_dest_from_root(&self, root: &Path) -> PathBuf { + let mut dest = root.to_path_buf(); + for part in &self.dest_parts { + dest.push(part); + } + dest + } +} + +#[derive(Debug)] +pub struct VfsRoot { + pub dir: VirtualDirectory, + pub root_path: PathBuf, + pub start_file_offset: u64, +} + +impl VfsRoot { + fn find_entry<'a>( + &'a self, + path: &Path, + ) -> std::io::Result<(PathBuf, VfsEntryRef<'a>)> { + self.find_entry_inner(path, &mut HashSet::new()) + } + + fn find_entry_inner<'a>( + &'a self, + path: &Path, + seen: &mut HashSet, + ) -> std::io::Result<(PathBuf, VfsEntryRef<'a>)> { + let mut path = Cow::Borrowed(path); + loop { + let (resolved_path, entry) = + self.find_entry_no_follow_inner(&path, seen)?; + match entry { + VfsEntryRef::Symlink(symlink) => { + if !seen.insert(path.to_path_buf()) { + return Err(std::io::Error::new( + std::io::ErrorKind::Other, + "circular symlinks", + )); + } + path = Cow::Owned(symlink.resolve_dest_from_root(&self.root_path)); + } + _ => { + return Ok((resolved_path, entry)); + } + } + } + } + + fn find_entry_no_follow( + &self, + path: &Path, + ) -> std::io::Result<(PathBuf, VfsEntryRef)> { + self.find_entry_no_follow_inner(path, &mut HashSet::new()) + } + + fn find_entry_no_follow_inner<'a>( + &'a self, + path: &Path, + seen: &mut HashSet, + ) -> std::io::Result<(PathBuf, VfsEntryRef<'a>)> { + let relative_path = match path.strip_prefix(&self.root_path) { + Ok(p) => p, + Err(_) => { + return Err(std::io::Error::new( + std::io::ErrorKind::NotFound, + "path not found", + )); + } + }; + let mut final_path = self.root_path.clone(); + let mut current_entry = VfsEntryRef::Dir(&self.dir); + for component in relative_path.components() { + let component = component.as_os_str().to_string_lossy(); + let current_dir = match current_entry { + VfsEntryRef::Dir(dir) => { + final_path.push(component.as_ref()); + dir + } + VfsEntryRef::Symlink(symlink) => { + let dest = symlink.resolve_dest_from_root(&self.root_path); + let (resolved_path, entry) = self.find_entry_inner(&dest, seen)?; + final_path = resolved_path; // overwrite with the new resolved path + match entry { + VfsEntryRef::Dir(dir) => { + final_path.push(component.as_ref()); + dir + } + _ => { + return Err(std::io::Error::new( + std::io::ErrorKind::NotFound, + "path not found", + )); + } + } + } + _ => { + return Err(std::io::Error::new( + std::io::ErrorKind::NotFound, + "path not found", + )); + } + }; + match current_dir + .entries + .binary_search_by(|e| e.name().cmp(&component)) + { + Ok(index) => { + current_entry = current_dir.entries[index].as_ref(); + } + Err(_) => { + return Err(std::io::Error::new( + std::io::ErrorKind::NotFound, + "path not found", + )); + } + } + } + + Ok((final_path, current_entry)) + } +} + +#[derive(Clone)] +struct FileBackedVfsFile { + file: VirtualFile, + pos: Arc>, + vfs: Arc, +} + +impl FileBackedVfsFile { + fn seek(&self, pos: SeekFrom) -> FsResult { + match pos { + SeekFrom::Start(pos) => { + *self.pos.lock() = pos; + Ok(pos) + } + SeekFrom::End(offset) => { + if offset < 0 && -offset as u64 > self.file.len { + Err(std::io::Error::new(std::io::ErrorKind::PermissionDenied, "An attempt was made to move the file pointer before the beginning of the file.").into()) + } else { + let mut current_pos = self.pos.lock(); + *current_pos = if offset >= 0 { + self.file.len - (offset as u64) + } else { + self.file.len + (-offset as u64) + }; + Ok(*current_pos) + } + } + SeekFrom::Current(offset) => { + let mut current_pos = self.pos.lock(); + if offset >= 0 { + *current_pos += offset as u64; + } else if -offset as u64 > *current_pos { + return Err(std::io::Error::new(std::io::ErrorKind::PermissionDenied, "An attempt was made to move the file pointer before the beginning of the file.").into()); + } else { + *current_pos -= -offset as u64; + } + Ok(*current_pos) + } + } + } + + fn read_to_buf(&self, buf: &mut [u8]) -> FsResult { + let pos = { + let mut pos = self.pos.lock(); + let read_pos = *pos; + // advance the position due to the read + *pos = std::cmp::min(self.file.len, *pos + buf.len() as u64); + read_pos + }; + self + .vfs + .read_file(&self.file, pos, buf) + .map_err(|err| err.into()) + } + + fn read_to_end(&self) -> FsResult> { + let pos = { + let mut pos = self.pos.lock(); + let read_pos = *pos; + // todo(dsherret): should this always set it to the end of the file? + if *pos < self.file.len { + // advance the position due to the read + *pos = self.file.len; + } + read_pos + }; + if pos > self.file.len { + return Ok(Vec::new()); + } + let size = (self.file.len - pos) as usize; + let mut buf = vec![0; size]; + self.vfs.read_file(&self.file, pos, &mut buf)?; + Ok(buf) + } +} + +#[async_trait::async_trait(?Send)] +impl deno_io::fs::File for FileBackedVfsFile { + fn read_sync(self: Rc, buf: &mut [u8]) -> FsResult { + self.read_to_buf(buf) + } + async fn read_byob( + self: Rc, + mut buf: BufMutView, + ) -> FsResult<(usize, BufMutView)> { + let inner = (*self).clone(); + tokio::task::spawn(async move { + let nread = inner.read_to_buf(&mut buf)?; + Ok((nread, buf)) + }) + .await? + } + + fn write_sync(self: Rc, _buf: &[u8]) -> FsResult { + Err(FsError::NotSupported) + } + async fn write( + self: Rc, + _buf: BufView, + ) -> FsResult { + Err(FsError::NotSupported) + } + + fn write_all_sync(self: Rc, _buf: &[u8]) -> FsResult<()> { + Err(FsError::NotSupported) + } + async fn write_all(self: Rc, _buf: BufView) -> FsResult<()> { + Err(FsError::NotSupported) + } + + fn read_all_sync(self: Rc) -> FsResult> { + self.read_to_end() + } + async fn read_all_async(self: Rc) -> FsResult> { + let inner = (*self).clone(); + tokio::task::spawn_blocking(move || inner.read_to_end()).await? + } + + fn chmod_sync(self: Rc, _pathmode: u32) -> FsResult<()> { + Err(FsError::NotSupported) + } + async fn chmod_async(self: Rc, _mode: u32) -> FsResult<()> { + Err(FsError::NotSupported) + } + + fn seek_sync(self: Rc, pos: SeekFrom) -> FsResult { + self.seek(pos) + } + async fn seek_async(self: Rc, pos: SeekFrom) -> FsResult { + self.seek(pos) + } + + fn datasync_sync(self: Rc) -> FsResult<()> { + Err(FsError::NotSupported) + } + async fn datasync_async(self: Rc) -> FsResult<()> { + Err(FsError::NotSupported) + } + + fn sync_sync(self: Rc) -> FsResult<()> { + Err(FsError::NotSupported) + } + async fn sync_async(self: Rc) -> FsResult<()> { + Err(FsError::NotSupported) + } + + fn stat_sync(self: Rc) -> FsResult { + Err(FsError::NotSupported) + } + async fn stat_async(self: Rc) -> FsResult { + Err(FsError::NotSupported) + } + + fn lock_sync(self: Rc, _exclusive: bool) -> FsResult<()> { + Err(FsError::NotSupported) + } + async fn lock_async(self: Rc, _exclusive: bool) -> FsResult<()> { + Err(FsError::NotSupported) + } + + fn unlock_sync(self: Rc) -> FsResult<()> { + Err(FsError::NotSupported) + } + async fn unlock_async(self: Rc) -> FsResult<()> { + Err(FsError::NotSupported) + } + + fn truncate_sync(self: Rc, _len: u64) -> FsResult<()> { + Err(FsError::NotSupported) + } + async fn truncate_async(self: Rc, _len: u64) -> FsResult<()> { + Err(FsError::NotSupported) + } + + fn utime_sync( + self: Rc, + _atime_secs: i64, + _atime_nanos: u32, + _mtime_secs: i64, + _mtime_nanos: u32, + ) -> FsResult<()> { + Err(FsError::NotSupported) + } + async fn utime_async( + self: Rc, + _atime_secs: i64, + _atime_nanos: u32, + _mtime_secs: i64, + _mtime_nanos: u32, + ) -> FsResult<()> { + Err(FsError::NotSupported) + } + + // lower level functionality + fn as_stdio(self: Rc) -> FsResult { + Err(FsError::NotSupported) + } + #[cfg(unix)] + fn backing_fd(self: Rc) -> Option { + None + } + #[cfg(windows)] + fn backing_fd(self: Rc) -> Option { + None + } + fn try_clone_inner(self: Rc) -> FsResult> { + Ok(self) + } +} + +#[derive(Debug)] +pub struct FileBackedVfs { + file: Mutex, + fs_root: VfsRoot, +} + +impl FileBackedVfs { + pub fn new(file: File, fs_root: VfsRoot) -> Self { + Self { + file: Mutex::new(file), + fs_root, + } + } + + pub fn root(&self) -> &Path { + &self.fs_root.root_path + } + + pub fn is_path_within(&self, path: &Path) -> bool { + path.starts_with(&self.fs_root.root_path) + } + + pub fn open_file( + self: &Arc, + path: &Path, + ) -> std::io::Result> { + let file = self.file_entry(path)?; + Ok(Rc::new(FileBackedVfsFile { + file: file.clone(), + vfs: self.clone(), + pos: Default::default(), + })) + } + + pub fn read_dir(&self, path: &Path) -> std::io::Result> { + let dir = self.dir_entry(path)?; + Ok( + dir + .entries + .iter() + .map(|entry| FsDirEntry { + name: entry.name().to_string(), + is_file: matches!(entry, VfsEntry::File(_)), + is_directory: matches!(entry, VfsEntry::Dir(_)), + is_symlink: matches!(entry, VfsEntry::Symlink(_)), + }) + .collect(), + ) + } + + pub fn read_link(&self, path: &Path) -> std::io::Result { + let (_, entry) = self.fs_root.find_entry_no_follow(path)?; + match entry { + VfsEntryRef::Symlink(symlink) => { + Ok(symlink.resolve_dest_from_root(&self.fs_root.root_path)) + } + VfsEntryRef::Dir(_) | VfsEntryRef::File(_) => Err(std::io::Error::new( + std::io::ErrorKind::Other, + "not a symlink", + )), + } + } + + pub fn lstat(&self, path: &Path) -> std::io::Result { + let (_, entry) = self.fs_root.find_entry_no_follow(path)?; + Ok(entry.as_fs_stat()) + } + + pub fn stat(&self, path: &Path) -> std::io::Result { + let (_, entry) = self.fs_root.find_entry(path)?; + Ok(entry.as_fs_stat()) + } + + pub fn canonicalize(&self, path: &Path) -> std::io::Result { + let (path, _) = self.fs_root.find_entry(path)?; + Ok(path) + } + + pub fn read_file_all(&self, file: &VirtualFile) -> std::io::Result> { + let mut buf = vec![0; file.len as usize]; + self.read_file(file, 0, &mut buf)?; + Ok(buf) + } + + pub fn read_file( + &self, + file: &VirtualFile, + pos: u64, + buf: &mut [u8], + ) -> std::io::Result { + let mut fs_file = self.file.lock(); + fs_file.seek(SeekFrom::Start( + self.fs_root.start_file_offset + file.offset + pos, + ))?; + fs_file.read(buf) + } + + pub fn dir_entry(&self, path: &Path) -> std::io::Result<&VirtualDirectory> { + let (_, entry) = self.fs_root.find_entry(path)?; + match entry { + VfsEntryRef::Dir(dir) => Ok(dir), + VfsEntryRef::Symlink(_) => unreachable!(), + VfsEntryRef::File(_) => Err(std::io::Error::new( + std::io::ErrorKind::Other, + "path is a file", + )), + } + } + + pub fn file_entry(&self, path: &Path) -> std::io::Result<&VirtualFile> { + let (_, entry) = self.fs_root.find_entry(path)?; + match entry { + VfsEntryRef::Dir(_) => Err(std::io::Error::new( + std::io::ErrorKind::Other, + "path is a directory", + )), + VfsEntryRef::Symlink(_) => unreachable!(), + VfsEntryRef::File(file) => Ok(file), + } + } +} + +#[cfg(test)] +mod test { + use std::io::Write; + use test_util::TempDir; + + use super::*; + + fn read_file(vfs: &FileBackedVfs, path: &Path) -> String { + let file = vfs.file_entry(path).unwrap(); + String::from_utf8(vfs.read_file_all(file).unwrap()).unwrap() + } + + #[test] + fn builds_and_uses_virtual_fs() { + let temp_dir = TempDir::new(); + let src_path = temp_dir.path().join("src"); + let mut builder = VfsBuilder::new(src_path.clone()); + builder.add_file(&src_path.join("a.txt"), "data".into()); + builder.add_file(&src_path.join("b.txt"), "data".into()); + assert_eq!(builder.files.len(), 1); // because duplicate data + builder.add_file(&src_path.join("c.txt"), "c".into()); + builder.add_file(&src_path.join("sub_dir").join("d.txt"), "d".into()); + builder.add_file(&src_path.join("e.txt"), "e".into()); + builder.add_symlink( + &src_path.join("sub_dir").join("e.txt"), + &src_path.join("e.txt"), + ); + + // get the virtual fs + let (dest_path, virtual_fs) = into_virtual_fs(builder, &temp_dir); + + assert_eq!(read_file(&virtual_fs, &dest_path.join("a.txt")), "data"); + assert_eq!(read_file(&virtual_fs, &dest_path.join("b.txt")), "data"); + + // attempt reading a symlink + assert_eq!( + read_file(&virtual_fs, &dest_path.join("sub_dir").join("e.txt")), + "e", + ); + + // canonicalize symlink + assert_eq!( + virtual_fs + .canonicalize(&dest_path.join("sub_dir").join("e.txt")) + .unwrap(), + dest_path.join("e.txt"), + ); + + // metadata + assert!( + virtual_fs + .lstat(&dest_path.join("sub_dir").join("e.txt")) + .unwrap() + .is_symlink + ); + assert!( + virtual_fs + .stat(&dest_path.join("sub_dir").join("e.txt")) + .unwrap() + .is_file + ); + assert!( + virtual_fs + .stat(&dest_path.join("sub_dir")) + .unwrap() + .is_directory, + ); + assert!(virtual_fs.stat(&dest_path.join("e.txt")).unwrap().is_file,); + } + + #[test] + fn test_include_dir_recursive() { + let temp_dir = TempDir::new(); + temp_dir.create_dir_all("src/nested/sub_dir"); + temp_dir.write("src/a.txt", "data"); + temp_dir.write("src/b.txt", "data"); + util::fs::symlink_dir( + &temp_dir.path().join("src/nested/sub_dir"), + &temp_dir.path().join("src/sub_dir_link"), + ) + .unwrap(); + temp_dir.write("src/nested/sub_dir/c.txt", "c"); + + // build and create the virtual fs + let src_path = temp_dir.path().join("src"); + let mut builder = VfsBuilder::new(src_path.clone()); + builder.add_dir_recursive(&src_path).unwrap(); + let (dest_path, virtual_fs) = into_virtual_fs(builder, &temp_dir); + + assert_eq!(read_file(&virtual_fs, &dest_path.join("a.txt")), "data",); + assert_eq!(read_file(&virtual_fs, &dest_path.join("b.txt")), "data",); + + assert_eq!( + read_file( + &virtual_fs, + &dest_path.join("nested").join("sub_dir").join("c.txt") + ), + "c", + ); + assert_eq!( + read_file(&virtual_fs, &dest_path.join("sub_dir_link").join("c.txt")), + "c", + ); + assert!( + virtual_fs + .lstat(&dest_path.join("sub_dir_link")) + .unwrap() + .is_symlink + ); + + assert_eq!( + virtual_fs + .canonicalize(&dest_path.join("sub_dir_link").join("c.txt")) + .unwrap(), + dest_path.join("nested").join("sub_dir").join("c.txt"), + ); + } + + fn into_virtual_fs( + builder: VfsBuilder, + temp_dir: &TempDir, + ) -> (PathBuf, FileBackedVfs) { + let virtual_fs_file = temp_dir.path().join("virtual_fs"); + let (root_dir, files) = builder.into_dir_and_files(); + { + let mut file = std::fs::File::create(&virtual_fs_file).unwrap(); + for file_data in &files { + file.write_all(file_data).unwrap(); + } + } + let file = std::fs::File::open(&virtual_fs_file).unwrap(); + let dest_path = temp_dir.path().join("dest"); + ( + dest_path.clone(), + FileBackedVfs::new( + file, + VfsRoot { + dir: root_dir, + root_path: dest_path, + start_file_offset: 0, + }, + ), + ) + } + + #[test] + fn circular_symlink() { + let temp_dir = TempDir::new(); + let src_path = temp_dir.path().join("src"); + let mut builder = VfsBuilder::new(src_path.clone()); + builder.add_symlink(&src_path.join("a.txt"), &src_path.join("b.txt")); + builder.add_symlink(&src_path.join("b.txt"), &src_path.join("c.txt")); + builder.add_symlink(&src_path.join("c.txt"), &src_path.join("a.txt")); + let (dest_path, virtual_fs) = into_virtual_fs(builder, &temp_dir); + assert_eq!( + virtual_fs + .file_entry(&dest_path.join("a.txt")) + .err() + .unwrap() + .to_string(), + "circular symlinks", + ); + assert_eq!( + virtual_fs.read_link(&dest_path.join("a.txt")).unwrap(), + dest_path.join("b.txt") + ); + assert_eq!( + virtual_fs.read_link(&dest_path.join("b.txt")).unwrap(), + dest_path.join("c.txt") + ); + } + + #[tokio::test] + async fn test_open_file() { + let temp_dir = TempDir::new(); + let temp_path = temp_dir.path(); + let mut builder = VfsBuilder::new(temp_path.to_path_buf()); + builder.add_file( + &temp_path.join("a.txt"), + "0123456789".to_string().into_bytes(), + ); + let (dest_path, virtual_fs) = into_virtual_fs(builder, &temp_dir); + let virtual_fs = Arc::new(virtual_fs); + let file = virtual_fs.open_file(&dest_path.join("a.txt")).unwrap(); + file.clone().seek_sync(SeekFrom::Current(2)).unwrap(); + let mut buf = vec![0; 2]; + file.clone().read_sync(&mut buf).unwrap(); + assert_eq!(buf, b"23"); + file.clone().read_sync(&mut buf).unwrap(); + assert_eq!(buf, b"45"); + file.clone().seek_sync(SeekFrom::Current(-4)).unwrap(); + file.clone().read_sync(&mut buf).unwrap(); + assert_eq!(buf, b"23"); + file.clone().seek_sync(SeekFrom::Start(2)).unwrap(); + file.clone().read_sync(&mut buf).unwrap(); + assert_eq!(buf, b"23"); + file.clone().seek_sync(SeekFrom::End(2)).unwrap(); + file.clone().read_sync(&mut buf).unwrap(); + assert_eq!(buf, b"89"); + file.clone().seek_sync(SeekFrom::Current(-8)).unwrap(); + file.clone().read_sync(&mut buf).unwrap(); + assert_eq!(buf, b"23"); + assert_eq!( + file + .clone() + .seek_sync(SeekFrom::Current(-5)) + .err() + .unwrap() + .into_io_error() + .to_string(), + "An attempt was made to move the file pointer before the beginning of the file." + ); + // go beyond the file length, then back + file.clone().seek_sync(SeekFrom::Current(40)).unwrap(); + file.clone().seek_sync(SeekFrom::Current(-38)).unwrap(); + let read_buf = file.clone().read(2).await.unwrap(); + assert_eq!(read_buf.to_vec(), b"67"); + file.clone().seek_sync(SeekFrom::Current(-2)).unwrap(); + + // read to the end of the file + let all_buf = file.clone().read_all_sync().unwrap(); + assert_eq!(all_buf.to_vec(), b"6789"); + file.clone().seek_sync(SeekFrom::Current(-9)).unwrap(); + + // try try_clone_inner and read_all_async + let all_buf = file + .try_clone_inner() + .unwrap() + .read_all_async() + .await + .unwrap(); + assert_eq!(all_buf.to_vec(), b"123456789"); + } +} diff --git a/cli/tests/integration/compile_tests.rs b/cli/tests/integration/compile_tests.rs index 7835d7f0d8..ac088ca903 100644 --- a/cli/tests/integration/compile_tests.rs +++ b/cli/tests/integration/compile_tests.rs @@ -5,6 +5,8 @@ use std::process::Command; use test_util as util; use test_util::TempDir; use util::assert_contains; +use util::env_vars_for_npm_tests; +use util::TestContextBuilder; #[test] fn compile() { @@ -675,30 +677,40 @@ fn workers_basic() { #[test] fn workers_not_in_module_map() { - let _guard = util::http_server(); - let dir = TempDir::new(); + let context = TestContextBuilder::for_npm() + .use_http_server() + .use_temp_cwd() + .build(); + let temp_dir = context.temp_dir(); let exe = if cfg!(windows) { - dir.path().join("not_in_module_map.exe") + temp_dir.path().join("not_in_module_map.exe") } else { - dir.path().join("not_in_module_map") + temp_dir.path().join("not_in_module_map") }; - let output = util::deno_cmd() - .current_dir(util::root_path()) - .arg("compile") - .arg("--output") - .arg(&exe) - .arg(util::testdata_path().join("./compile/workers/not_in_module_map.ts")) - .output() - .unwrap(); - assert!(output.status.success()); + let main_path = + util::testdata_path().join("./compile/workers/not_in_module_map.ts"); + let output = context + .new_command() + .args_vec([ + "compile", + "--output", + &exe.to_string_lossy(), + &main_path.to_string_lossy(), + ]) + .run(); + output.assert_exit_code(0); + output.skip_output_check(); - let output = Command::new(&exe).env("NO_COLOR", "").output().unwrap(); - assert!(!output.status.success()); - let stderr = String::from_utf8(output.stderr).unwrap(); - assert!(stderr.starts_with(concat!( - "error: Uncaught (in worker \"\") Module not found\n", - "error: Uncaught (in promise) Error: Unhandled error in child worker.\n" - ))); + let output = context + .new_command() + .command_name(exe.to_string_lossy()) + .env("NO_COLOR", "") + .run(); + output.assert_exit_code(1); + output.assert_matches_text(concat!( + "error: Uncaught (in worker \"\") Module not found: [WILDCARD]", + "error: Uncaught (in promise) Error: Unhandled error in child worker.\n[WILDCARD]" + )); } #[test] @@ -790,3 +802,256 @@ fn dynamic_import_unanalyzable() { .unwrap(); assert_eq!(String::from_utf8(output.stdout).unwrap(), expected); } + +itest!(npm_specifiers_errors_no_unstable { + args: "compile -A --quiet npm/cached_only/main.ts", + output_str: Some( + concat!( + "error: Using npm specifiers with deno compile requires the --unstable flag.", + "\n\n", + "Caused by:\n", + " npm specifiers have not yet been implemented for this subcommand (https://github.com/denoland/deno/issues/15960). Found: npm:chalk@5.0.1\n" + ) + ), + exit_code: 1, + envs: env_vars_for_npm_tests(), + http_server: true, +}); + +#[test] +fn compile_npm_specifiers() { + let context = TestContextBuilder::for_npm() + .use_sync_npm_download() + .use_temp_cwd() + .build(); + + let temp_dir = context.temp_dir(); + temp_dir.write( + "main.ts", + concat!( + "import path from 'node:path';\n", + "import { getValue, setValue } from 'npm:@denotest/esm-basic';\n", + "import getValueDefault from 'npm:@denotest/esm-import-cjs-default';\n", + "setValue(2);\n", + "console.log(path.join('testing', 'this'));", + "console.log(getValue());", + "console.log(getValueDefault());", + ), + ); + + let binary_path = if cfg!(windows) { + temp_dir.path().join("binary.exe") + } else { + temp_dir.path().join("binary") + }; + + // try with and without --node-modules-dir + let compile_commands = &[ + "compile --unstable --output binary main.ts", + "compile --unstable --node-modules-dir --output binary main.ts", + ]; + + for compile_command in compile_commands { + let output = context.new_command().args(compile_command).run(); + output.assert_exit_code(0); + output.skip_output_check(); + + let output = context + .new_command() + .command_name(binary_path.to_string_lossy()) + .run(); + output.assert_matches_text( + r#"Node esm importing node cjs +=========================== +{ + default: [Function (anonymous)], + named: [Function (anonymous)], + MyClass: [class MyClass] +} +{ default: [Function (anonymous)], named: [Function (anonymous)] } +[Module: null prototype] { + MyClass: [class MyClass], + __esModule: true, + default: { + default: [Function (anonymous)], + named: [Function (anonymous)], + MyClass: [class MyClass] + }, + named: [Function (anonymous)] +} +[Module: null prototype] { + __esModule: true, + default: { default: [Function (anonymous)], named: [Function (anonymous)] }, + named: [Function (anonymous)] +} +=========================== +static method +testing[WILDCARD]this +2 +5 +"#, + ); + } + + // try with a package.json + temp_dir.remove_dir_all("node_modules"); + temp_dir.write( + "main.ts", + concat!( + "import { getValue, setValue } from '@denotest/esm-basic';\n", + "setValue(2);\n", + "console.log(getValue());", + ), + ); + temp_dir.write( + "package.json", + r#"{ "dependencies": { "@denotest/esm-basic": "1" } }"#, + ); + + let output = context + .new_command() + .args("compile --unstable --output binary main.ts") + .run(); + output.assert_exit_code(0); + output.skip_output_check(); + + let output = context + .new_command() + .command_name(binary_path.to_string_lossy()) + .run(); + output.assert_matches_text("2\n"); +} + +#[test] +fn compile_npm_file_system() { + run_npm_bin_compile_test(RunNpmBinCompileOptions { + input_specifier: "compile/npm_fs/main.ts", + output_file: "compile/npm_fs/main.out", + node_modules_dir: true, + input_name: Some("binary"), + expected_name: "binary", + run_args: vec![], + }); +} + +#[test] +fn compile_npm_bin_esm() { + run_npm_bin_compile_test(RunNpmBinCompileOptions { + input_specifier: "npm:@denotest/bin/cli-esm", + run_args: vec!["this", "is", "a", "test"], + output_file: "npm/deno_run_esm.out", + node_modules_dir: false, + input_name: None, + expected_name: "cli-esm", + }); +} + +#[test] +fn compile_npm_bin_cjs() { + run_npm_bin_compile_test(RunNpmBinCompileOptions { + input_specifier: "npm:@denotest/bin/cli-cjs", + run_args: vec!["this", "is", "a", "test"], + output_file: "npm/deno_run_cjs.out", + node_modules_dir: false, + input_name: None, + expected_name: "cli-cjs", + }); +} + +#[test] +fn compile_npm_cowsay() { + run_npm_bin_compile_test(RunNpmBinCompileOptions { + input_specifier: "npm:cowsay@1.5.0", + run_args: vec!["Hello"], + output_file: "npm/deno_run_cowsay.out", + node_modules_dir: false, + input_name: None, + expected_name: "cowsay", + }); +} + +#[test] +fn compile_npm_cowsay_explicit() { + run_npm_bin_compile_test(RunNpmBinCompileOptions { + input_specifier: "npm:cowsay@1.5.0/cowsay", + run_args: vec!["Hello"], + output_file: "npm/deno_run_cowsay.out", + node_modules_dir: false, + input_name: None, + expected_name: "cowsay", + }); +} + +#[test] +fn compile_npm_cowthink() { + run_npm_bin_compile_test(RunNpmBinCompileOptions { + input_specifier: "npm:cowsay@1.5.0/cowthink", + run_args: vec!["Hello"], + output_file: "npm/deno_run_cowthink.out", + node_modules_dir: false, + input_name: None, + expected_name: "cowthink", + }); +} + +struct RunNpmBinCompileOptions<'a> { + input_specifier: &'a str, + output_file: &'a str, + node_modules_dir: bool, + input_name: Option<&'a str>, + expected_name: &'a str, + run_args: Vec<&'a str>, +} + +fn run_npm_bin_compile_test(opts: RunNpmBinCompileOptions) { + let context = TestContextBuilder::for_npm() + .use_sync_npm_download() + .use_temp_cwd() + .build(); + + let temp_dir = context.temp_dir(); + let testdata_path = context.testdata_path(); + let main_specifier = if opts.input_specifier.starts_with("npm:") { + opts.input_specifier.to_string() + } else { + testdata_path + .join(opts.input_specifier) + .to_string_lossy() + .to_string() + }; + + let mut args = vec![ + "compile".to_string(), + "-A".to_string(), + "--unstable".to_string(), + ]; + + if opts.node_modules_dir { + args.push("--node-modules-dir".to_string()); + } + + if let Some(bin_name) = opts.input_name { + args.push("--output".to_string()); + args.push(bin_name.to_string()); + } + + args.push(main_specifier); + + // compile + let output = context.new_command().args_vec(args).run(); + output.assert_exit_code(0); + output.skip_output_check(); + + // run + let binary_path = if cfg!(windows) { + temp_dir.path().join(format!("{}.exe", opts.expected_name)) + } else { + temp_dir.path().join(opts.expected_name) + }; + let output = context + .new_command() + .command_name(binary_path.to_string_lossy()) + .args_vec(opts.run_args) + .run(); + output.assert_matches_file(opts.output_file); +} diff --git a/cli/tests/integration/npm_tests.rs b/cli/tests/integration/npm_tests.rs index d4f2d3e455..c043220275 100644 --- a/cli/tests/integration/npm_tests.rs +++ b/cli/tests/integration/npm_tests.rs @@ -855,17 +855,9 @@ fn ensure_registry_files_local() { } } -itest!(compile_errors { - args: "compile -A --quiet npm/cached_only/main.ts", - output_str: Some("error: npm specifiers have not yet been implemented for this sub command (https://github.com/denoland/deno/issues/15960). Found: npm:chalk@5.0.1\n"), - exit_code: 1, - envs: env_vars_for_npm_tests(), - http_server: true, - }); - itest!(bundle_errors { args: "bundle --quiet npm/esm/main.js", - output_str: Some("error: npm specifiers have not yet been implemented for this sub command (https://github.com/denoland/deno/issues/15960). Found: npm:chalk@5.0.1\n"), + output_str: Some("error: npm specifiers have not yet been implemented for this subcommand (https://github.com/denoland/deno/issues/15960). Found: npm:chalk@5.0.1\n"), exit_code: 1, envs: env_vars_for_npm_tests(), http_server: true, diff --git a/cli/tests/testdata/compile/npm_fs/main.out b/cli/tests/testdata/compile/npm_fs/main.out new file mode 100644 index 0000000000..2e9ba477f8 --- /dev/null +++ b/cli/tests/testdata/compile/npm_fs/main.out @@ -0,0 +1 @@ +success diff --git a/cli/tests/testdata/compile/npm_fs/main.ts b/cli/tests/testdata/compile/npm_fs/main.ts new file mode 100644 index 0000000000..f9951d7a46 --- /dev/null +++ b/cli/tests/testdata/compile/npm_fs/main.ts @@ -0,0 +1,259 @@ +import { url } from "npm:@denotest/esm-basic"; +import { fileURLToPath } from "node:url"; +import path from "node:path"; +import assert from "node:assert/strict"; + +// will be at node_modules\.deno\@denotest+esm-basic@1.0.0\node_modules\@denotest\esm-basic +const dirPath = path.dirname(fileURLToPath(url)); +const nodeModulesPath = path.join(dirPath, "../../../../../"); +const packageJsonText = `{ + "name": "@denotest/esm-basic", + "version": "1.0.0", + "type": "module", + "main": "main.mjs", + "types": "main.d.mts" +} +`; +const vfsPackageJsonPath = path.join(dirPath, "package.json"); + +// reading a file in vfs +{ + const text = Deno.readTextFileSync(vfsPackageJsonPath); + assert.equal(text, packageJsonText); +} + +// reading a file async in vfs +{ + const text = await Deno.readTextFile(vfsPackageJsonPath); + assert.equal(text, packageJsonText); +} + +// copy file from vfs to real fs +{ + Deno.copyFileSync(vfsPackageJsonPath, "package.json"); + assert.equal(Deno.readTextFileSync("package.json"), packageJsonText); +} + +// copy to vfs +assert.throws( + () => Deno.copyFileSync("package.json", vfsPackageJsonPath), + Deno.errors.NotSupported, +); +Deno.removeSync("package.json"); + +// copy file async from vfs to real fs +{ + await Deno.copyFile(vfsPackageJsonPath, "package.json"); + assert.equal(Deno.readTextFileSync("package.json"), packageJsonText); +} + +// copy to vfs async +await assert.rejects( + () => Deno.copyFile("package.json", vfsPackageJsonPath), + Deno.errors.NotSupported, +); +Deno.removeSync("package.json"); + +// open +{ + const file = Deno.openSync(vfsPackageJsonPath); + const bytes = new Uint8Array(10); + file.seekSync(2, Deno.SeekMode.Start); + assert.equal(file.readSync(bytes), 10); + const text = new TextDecoder().decode(bytes); + assert.equal(text, packageJsonText.slice(2, 12)); +} +{ + const file = await Deno.open(vfsPackageJsonPath); + const bytes = new Uint8Array(10); + await file.seek(2, Deno.SeekMode.Start); + assert.equal(await file.read(bytes), 10); + const text = new TextDecoder().decode(bytes); + assert.equal(text, packageJsonText.slice(2, 12)); +} + +// chdir +assert.throws(() => Deno.chdir(dirPath), Deno.errors.NotSupported); + +// mkdir +assert.throws( + () => Deno.mkdirSync(path.join(dirPath, "subDir")), + Deno.errors.NotSupported, +); +await assert.rejects( + () => Deno.mkdir(path.join(dirPath, "subDir")), + Deno.errors.NotSupported, +); + +// chmod +assert.throws( + () => Deno.chmodSync(vfsPackageJsonPath, 0o777), + Deno.errors.NotSupported, +); +await assert.rejects( + () => Deno.chmod(vfsPackageJsonPath, 0o777), + Deno.errors.NotSupported, +); + +// chown +assert.throws( + () => Deno.chownSync(vfsPackageJsonPath, 1000, 1000), + Deno.errors.NotSupported, +); +await assert.rejects( + () => Deno.chown(vfsPackageJsonPath, 1000, 1000), + Deno.errors.NotSupported, +); + +// remove +assert.throws( + () => Deno.removeSync(vfsPackageJsonPath), + Deno.errors.NotSupported, +); +await assert.rejects( + () => Deno.remove(vfsPackageJsonPath), + Deno.errors.NotSupported, +); + +// stat +{ + const result = Deno.statSync(vfsPackageJsonPath); + assert(result.isFile); +} +{ + const result = await Deno.stat(vfsPackageJsonPath); + assert(result.isFile); +} + +// lstat +{ + const result = Deno.lstatSync( + path.join(nodeModulesPath, "@denotest", "esm-basic"), + ); + assert(result.isSymlink); +} +{ + const result = await Deno.lstat( + path.join(nodeModulesPath, "@denotest", "esm-basic"), + ); + assert(result.isSymlink); +} + +// realpath +{ + const result = Deno.realPathSync( + path.join(nodeModulesPath, "@denotest", "esm-basic", "package.json"), + ); + assert.equal(result, vfsPackageJsonPath); +} +{ + const result = await Deno.realPath( + path.join(nodeModulesPath, "@denotest", "esm-basic", "package.json"), + ); + assert.equal(result, vfsPackageJsonPath); +} + +// read dir +const readDirNames = ["main.d.mts", "main.mjs", "package.json"]; +{ + const names = Array.from(Deno.readDirSync(dirPath)) + .map((e) => e.name); + assert.deepEqual(readDirNames, names); +} +{ + const names = []; + for await (const entry of Deno.readDir(dirPath)) { + names.push(entry.name); + } + assert.deepEqual(readDirNames, names); +} + +// rename +assert.throws( + () => Deno.renameSync("package.json", vfsPackageJsonPath), + Deno.errors.NotSupported, +); +assert.throws( + () => Deno.renameSync(vfsPackageJsonPath, "package.json"), + Deno.errors.NotSupported, +); +await assert.rejects( + () => Deno.rename("package.json", vfsPackageJsonPath), + Deno.errors.NotSupported, +); +await assert.rejects( + () => Deno.rename(vfsPackageJsonPath, "package.json"), + Deno.errors.NotSupported, +); + +// link +assert.throws( + () => Deno.linkSync("package.json", vfsPackageJsonPath), + Deno.errors.NotSupported, +); +assert.throws( + () => Deno.linkSync(vfsPackageJsonPath, "package.json"), + Deno.errors.NotSupported, +); +await assert.rejects( + () => Deno.link("package.json", vfsPackageJsonPath), + Deno.errors.NotSupported, +); +await assert.rejects( + () => Deno.link(vfsPackageJsonPath, "package.json"), + Deno.errors.NotSupported, +); + +// symlink +assert.throws( + () => Deno.symlinkSync("package.json", vfsPackageJsonPath), + Deno.errors.NotSupported, +); +assert.throws( + () => Deno.symlinkSync(vfsPackageJsonPath, "package.json"), + Deno.errors.NotSupported, +); +await assert.rejects( + () => Deno.symlink("package.json", vfsPackageJsonPath), + Deno.errors.NotSupported, +); +await assert.rejects( + () => Deno.symlink(vfsPackageJsonPath, "package.json"), + Deno.errors.NotSupported, +); + +// read link +{ + const result = Deno.readLinkSync( + path.join(nodeModulesPath, "@denotest", "esm-basic"), + ); + assert.equal(result, dirPath); +} +{ + const result = await Deno.readLink( + path.join(nodeModulesPath, "@denotest", "esm-basic"), + ); + assert.equal(result, dirPath); +} + +// truncate +assert.throws( + () => Deno.truncateSync(vfsPackageJsonPath, 0), + Deno.errors.NotSupported, +); +await assert.rejects( + () => Deno.truncate(vfsPackageJsonPath, 0), + Deno.errors.NotSupported, +); + +// utime +assert.throws( + () => Deno.utimeSync(vfsPackageJsonPath, 0, 0), + Deno.errors.NotSupported, +); +await assert.rejects( + () => Deno.utime(vfsPackageJsonPath, 0, 0), + Deno.errors.NotSupported, +); + +console.log("success"); diff --git a/cli/tests/testdata/npm/registry/@denotest/esm-basic/1.0.0/main.d.mts b/cli/tests/testdata/npm/registry/@denotest/esm-basic/1.0.0/main.d.mts index fa7814911e..29da1e6d7b 100644 --- a/cli/tests/testdata/npm/registry/@denotest/esm-basic/1.0.0/main.d.mts +++ b/cli/tests/testdata/npm/registry/@denotest/esm-basic/1.0.0/main.d.mts @@ -1,2 +1,3 @@ export declare function setValue(val: number): void; export declare function getValue(): number; +export declare const url: string; diff --git a/cli/tests/testdata/npm/registry/@denotest/esm-basic/1.0.0/main.mjs b/cli/tests/testdata/npm/registry/@denotest/esm-basic/1.0.0/main.mjs index 23df4221cb..0a44f75859 100644 --- a/cli/tests/testdata/npm/registry/@denotest/esm-basic/1.0.0/main.mjs +++ b/cli/tests/testdata/npm/registry/@denotest/esm-basic/1.0.0/main.mjs @@ -7,3 +7,5 @@ export function setValue(newValue) { export function getValue() { return value; } + +export const url = import.meta.url; diff --git a/cli/tests/testdata/package_json/basic/main.info.out b/cli/tests/testdata/package_json/basic/main.info.out index bf36f4f19e..3572c75e11 100644 --- a/cli/tests/testdata/package_json/basic/main.info.out +++ b/cli/tests/testdata/package_json/basic/main.info.out @@ -5,4 +5,4 @@ size: [WILDCARD] file:///[WILDCARD]/main.ts (63B) └─┬ file:///[WILDCARD]/lib.ts (166B) - └── npm:@denotest/esm-basic@1.0.0 (345B) + └── npm:@denotest/esm-basic@1.0.0 (416B) diff --git a/cli/tools/standalone.rs b/cli/tools/compile.rs similarity index 95% rename from cli/tools/standalone.rs rename to cli/tools/compile.rs index d34e5da833..f10a2d0257 100644 --- a/cli/tools/standalone.rs +++ b/cli/tools/compile.rs @@ -5,7 +5,6 @@ use crate::args::Flags; use crate::factory::CliFactory; use crate::graph_util::error_for_any_npm_specifier; use crate::standalone::is_standalone_binary; -use crate::standalone::DenoCompileBinaryWriter; use crate::util::path::path_has_trailing_slash; use deno_core::anyhow::bail; use deno_core::anyhow::Context; @@ -25,14 +24,9 @@ pub async fn compile( ) -> Result<(), AnyError> { let factory = CliFactory::from_flags(flags).await?; let cli_options = factory.cli_options(); - let file_fetcher = factory.file_fetcher()?; - let http_client = factory.http_client(); - let deno_dir = factory.deno_dir()?; let module_graph_builder = factory.module_graph_builder().await?; let parsed_source_cache = factory.parsed_source_cache()?; - - let binary_writer = - DenoCompileBinaryWriter::new(file_fetcher, http_client, deno_dir); + let binary_writer = factory.create_compile_binary_writer().await?; let module_specifier = cli_options.resolve_main_module()?; let module_roots = { let mut vec = Vec::with_capacity(compile_flags.include.len() + 1); @@ -56,8 +50,11 @@ pub async fn compile( ) .unwrap(); - // at the moment, we don't support npm specifiers in deno_compile, so show an error - error_for_any_npm_specifier(&graph)?; + if !cli_options.unstable() { + error_for_any_npm_specifier(&graph).context( + "Using npm specifiers with deno compile requires the --unstable flag.", + )?; + } let parser = parsed_source_cache.as_capturing_parser(); let eszip = eszip::EszipV2::from_graph(graph, &parser, Default::default())?; diff --git a/cli/tools/mod.rs b/cli/tools/mod.rs index cf29435a7c..c4a8306ab9 100644 --- a/cli/tools/mod.rs +++ b/cli/tools/mod.rs @@ -3,6 +3,7 @@ pub mod bench; pub mod bundle; pub mod check; +pub mod compile; pub mod coverage; pub mod doc; pub mod fmt; @@ -12,7 +13,6 @@ pub mod installer; pub mod lint; pub mod repl; pub mod run; -pub mod standalone; pub mod task; pub mod test; pub mod upgrade; diff --git a/cli/tools/task.rs b/cli/tools/task.rs index 6380d3822a..bf972e2db8 100644 --- a/cli/tools/task.rs +++ b/cli/tools/task.rs @@ -64,12 +64,13 @@ pub async fn execute_script( .await; Ok(exit_code) } else if let Some(script) = package_json_scripts.get(task_name) { + let package_json_deps_provider = factory.package_json_deps_provider(); let package_json_deps_installer = factory.package_json_deps_installer().await?; let npm_resolver = factory.npm_resolver().await?; let node_resolver = factory.node_resolver().await?; - if let Some(package_deps) = package_json_deps_installer.package_deps() { + if let Some(package_deps) = package_json_deps_provider.deps() { for (key, value) in package_deps { if let Err(err) = value { log::info!( diff --git a/cli/tools/vendor/test.rs b/cli/tools/vendor/test.rs index 774ff0d583..e8a474ed34 100644 --- a/cli/tools/vendor/test.rs +++ b/cli/tools/vendor/test.rs @@ -22,7 +22,6 @@ use import_map::ImportMap; use crate::cache::ParsedSourceCache; use crate::npm::CliNpmRegistryApi; use crate::npm::NpmResolution; -use crate::npm::PackageJsonDepsInstaller; use crate::resolver::CliGraphResolver; use super::build::VendorEnvironment; @@ -270,18 +269,14 @@ async fn build_test_graph( None, None, )); - let deps_installer = Arc::new(PackageJsonDepsInstaller::new( - npm_registry_api.clone(), - npm_resolution.clone(), - None, - )); CliGraphResolver::new( None, Some(Arc::new(original_import_map)), false, npm_registry_api, npm_resolution, - deps_installer, + Default::default(), + Default::default(), ) }); let mut graph = ModuleGraph::default(); diff --git a/cli/util/fs.rs b/cli/util/fs.rs index 9d3c6fccbd..94ec24fe6b 100644 --- a/cli/util/fs.rs +++ b/cli/util/fs.rs @@ -92,12 +92,19 @@ pub fn canonicalize_path(path: &Path) -> Result { /// subsequently be created along this path by some other code. pub fn canonicalize_path_maybe_not_exists( path: &Path, +) -> Result { + canonicalize_path_maybe_not_exists_with_fs(path, canonicalize_path) +} + +pub fn canonicalize_path_maybe_not_exists_with_fs( + path: &Path, + canonicalize: impl Fn(&Path) -> Result, ) -> Result { let path = path.to_path_buf().clean(); let mut path = path.as_path(); let mut names_stack = Vec::new(); loop { - match canonicalize_path(path) { + match canonicalize(path) { Ok(mut canonicalized_path) => { for name in names_stack.into_iter().rev() { canonicalized_path = canonicalized_path.join(name); diff --git a/ext/fs/interface.rs b/ext/fs/interface.rs index 2d9b68f55d..7624535c92 100644 --- a/ext/fs/interface.rs +++ b/ext/fs/interface.rs @@ -100,7 +100,7 @@ pub trait FileSystem: std::fmt::Debug + MaybeSend + MaybeSync { async fn mkdir_async( &self, path: PathBuf, - recusive: bool, + recursive: bool, mode: u32, ) -> FsResult<()>; diff --git a/ext/io/fs.rs b/ext/io/fs.rs index a333e1dd5a..e335324f5e 100644 --- a/ext/io/fs.rs +++ b/ext/io/fs.rs @@ -15,6 +15,7 @@ use deno_core::OpState; use deno_core::ResourceId; use tokio::task::JoinError; +#[derive(Debug)] pub enum FsError { Io(io::Error), FileBusy, @@ -29,6 +30,14 @@ impl FsError { Self::NotSupported => io::ErrorKind::Other, } } + + pub fn into_io_error(self) -> io::Error { + match self { + FsError::Io(err) => err, + FsError::FileBusy => io::Error::new(self.kind(), "file busy"), + FsError::NotSupported => io::Error::new(self.kind(), "not supported"), + } + } } impl From for FsError { diff --git a/runtime/build.rs b/runtime/build.rs index 412257f122..bd141d2970 100644 --- a/runtime/build.rs +++ b/runtime/build.rs @@ -358,6 +358,7 @@ fn main() { if env::var_os("DOCS_RS").is_some() { let snapshot_slice = &[]; #[allow(clippy::needless_borrow)] + #[allow(clippy::disallowed_methods)] std::fs::write(&runtime_snapshot_path, snapshot_slice).unwrap(); } diff --git a/runtime/clippy.toml b/runtime/clippy.toml new file mode 100644 index 0000000000..53676a90e6 --- /dev/null +++ b/runtime/clippy.toml @@ -0,0 +1,45 @@ +disallowed-methods = [ + { path = "std::env::current_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::canonicalize", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::Path::is_dir", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::Path::is_file", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::Path::is_symlink", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::Path::metadata", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::Path::read_dir", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::Path::read_link", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::Path::symlink_metadata", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::Path::try_exists", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::PathBuf::exists", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::PathBuf::canonicalize", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::PathBuf::is_dir", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::PathBuf::is_file", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::PathBuf::is_symlink", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::PathBuf::metadata", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::PathBuf::read_dir", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::PathBuf::read_link", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::PathBuf::symlink_metadata", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::path::PathBuf::try_exists", reason = "File system operations should be done using NodeFs trait" }, + { path = "std::env::set_current_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::env::temp_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::canonicalize", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::copy", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::create_dir_all", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::create_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::DirBuilder::new", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::hard_link", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::metadata", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::OpenOptions::new", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::read_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::read_link", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::read_to_string", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::read", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::remove_dir_all", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::remove_dir", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::remove_file", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::rename", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::set_permissions", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::symlink_metadata", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::fs::write", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::canonicalize", reason = "File system operations should be done using FileSystem trait" }, + { path = "std::path::Path::exists", reason = "File system operations should be done using FileSystem trait" }, +] diff --git a/runtime/examples/hello_runtime.rs b/runtime/examples/hello_runtime.rs index 157a200f4c..2bc371b680 100644 --- a/runtime/examples/hello_runtime.rs +++ b/runtime/examples/hello_runtime.rs @@ -2,6 +2,7 @@ use deno_core::error::AnyError; use deno_core::FsModuleLoader; +use deno_core::ModuleSpecifier; use deno_runtime::permissions::PermissionsContainer; use deno_runtime::worker::MainWorker; use deno_runtime::worker::WorkerOptions; @@ -14,10 +15,7 @@ deno_core::extension!(hello_runtime, esm = ["hello_runtime_bootstrap.js"]); async fn main() -> Result<(), AnyError> { let js_path = Path::new(env!("CARGO_MANIFEST_DIR")).join("examples/hello_runtime.js"); - let main_module = deno_core::resolve_path( - &js_path.to_string_lossy(), - &std::env::current_dir()?, - )?; + let main_module = ModuleSpecifier::from_file_path(js_path).unwrap(); let mut worker = MainWorker::bootstrap_from_options( main_module.clone(), PermissionsContainer::allow_all(), diff --git a/runtime/fs_util.rs b/runtime/fs_util.rs index eb4a2f8997..204b0e4e85 100644 --- a/runtime/fs_util.rs +++ b/runtime/fs_util.rs @@ -3,23 +3,17 @@ use deno_core::anyhow::Context; use deno_core::error::AnyError; pub use deno_core::normalize_path; -use std::env::current_dir; -use std::io::Error; use std::path::Path; use std::path::PathBuf; -/// Similar to `std::fs::canonicalize()` but strips UNC prefixes on Windows. -pub fn canonicalize_path(path: &Path) -> Result { - Ok(deno_core::strip_unc_prefix(path.canonicalize()?)) -} - #[inline] pub fn resolve_from_cwd(path: &Path) -> Result { if path.is_absolute() { Ok(normalize_path(path)) } else { - let cwd = - current_dir().context("Failed to get current working directory")?; + #[allow(clippy::disallowed_methods)] + let cwd = std::env::current_dir() + .context("Failed to get current working directory")?; Ok(normalize_path(cwd.join(path))) } } @@ -28,21 +22,26 @@ pub fn resolve_from_cwd(path: &Path) -> Result { mod tests { use super::*; + fn current_dir() -> PathBuf { + #[allow(clippy::disallowed_methods)] + std::env::current_dir().unwrap() + } + #[test] fn resolve_from_cwd_child() { - let cwd = current_dir().unwrap(); + let cwd = current_dir(); assert_eq!(resolve_from_cwd(Path::new("a")).unwrap(), cwd.join("a")); } #[test] fn resolve_from_cwd_dot() { - let cwd = current_dir().unwrap(); + let cwd = current_dir(); assert_eq!(resolve_from_cwd(Path::new(".")).unwrap(), cwd); } #[test] fn resolve_from_cwd_parent() { - let cwd = current_dir().unwrap(); + let cwd = current_dir(); assert_eq!(resolve_from_cwd(Path::new("a/..")).unwrap(), cwd); } @@ -66,7 +65,7 @@ mod tests { #[test] fn resolve_from_cwd_absolute() { let expected = Path::new("a"); - let cwd = current_dir().unwrap(); + let cwd = current_dir(); let absolute_expected = cwd.join(expected); assert_eq!(resolve_from_cwd(expected).unwrap(), absolute_expected); } diff --git a/runtime/ops/os/mod.rs b/runtime/ops/os/mod.rs index 911cd327c0..b997a89d9d 100644 --- a/runtime/ops/os/mod.rs +++ b/runtime/ops/os/mod.rs @@ -339,6 +339,7 @@ fn rss() -> usize { (out, idx) } + #[allow(clippy::disallowed_methods)] let statm_content = if let Ok(c) = std::fs::read_to_string("/proc/self/statm") { c diff --git a/runtime/ops/os/sys_info.rs b/runtime/ops/os/sys_info.rs index 1a9358dc0b..795e6bb0af 100644 --- a/runtime/ops/os/sys_info.rs +++ b/runtime/ops/os/sys_info.rs @@ -48,6 +48,7 @@ pub fn loadavg() -> LoadAvg { pub fn os_release() -> String { #[cfg(target_os = "linux")] { + #[allow(clippy::disallowed_methods)] match std::fs::read_to_string("/proc/sys/kernel/osrelease") { Ok(mut s) => { s.pop(); // pop '\n' diff --git a/test_util/src/builders.rs b/test_util/src/builders.rs index a5f192b73a..33a1a98f01 100644 --- a/test_util/src/builders.rs +++ b/test_util/src/builders.rs @@ -341,6 +341,7 @@ impl TestCommandBuilder { )) } + #[track_caller] pub fn run(&self) -> TestCommandOutput { fn read_pipe_to_string(mut pipe: os_pipe::PipeReader) -> String { let mut output = String::new(); diff --git a/test_util/src/temp_dir.rs b/test_util/src/temp_dir.rs index db3c246dc5..dc638c7eaf 100644 --- a/test_util/src/temp_dir.rs +++ b/test_util/src/temp_dir.rs @@ -58,6 +58,10 @@ impl TempDir { fs::create_dir_all(self.path().join(path)).unwrap(); } + pub fn remove_dir_all(&self, path: impl AsRef) { + fs::remove_dir_all(self.path().join(path)).unwrap(); + } + pub fn read_to_string(&self, path: impl AsRef) -> String { let file_path = self.path().join(path); fs::read_to_string(&file_path) From f1d0f745d362adb2e19a54693ed10d9020e987fa Mon Sep 17 00:00:00 2001 From: Marvin Hagemeister Date: Thu, 11 May 2023 12:32:19 +0200 Subject: [PATCH 151/320] fix(node): expose channels in worker_threads (#19086) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR ensures that node's `worker_threads` module exports `MessageChannel`, `MessagePort` and the `BroadcastChannel` API. Fixing these won't make `esbuild` work, but brings us one step closer 🎉 Fixes #19028 . --- cli/tests/node_compat/config.jsonc | 2 ++ .../test-worker-threads-broadcast-channel.js | 9 +++++++++ .../test-worker-threads-message-channel.js | 10 ++++++++++ ext/node/polyfills/worker_threads.ts | 17 +++++++---------- 4 files changed, 28 insertions(+), 10 deletions(-) create mode 100644 cli/tests/node_compat/test/parallel/test-worker-threads-broadcast-channel.js create mode 100644 cli/tests/node_compat/test/parallel/test-worker-threads-message-channel.js diff --git a/cli/tests/node_compat/config.jsonc b/cli/tests/node_compat/config.jsonc index 87530d4f5c..8fbc3e921d 100644 --- a/cli/tests/node_compat/config.jsonc +++ b/cli/tests/node_compat/config.jsonc @@ -655,6 +655,8 @@ "test-whatwg-url-override-hostname.js", "test-whatwg-url-properties.js", "test-whatwg-url-toascii.js", + "test-worker-threads-broadcast-channel.js", + "test-worker-threads-message-channel.js", "test-zlib-close-after-error.js", "test-zlib-close-after-write.js", "test-zlib-convenience-methods.js", diff --git a/cli/tests/node_compat/test/parallel/test-worker-threads-broadcast-channel.js b/cli/tests/node_compat/test/parallel/test-worker-threads-broadcast-channel.js new file mode 100644 index 0000000000..a8fd3ff0e8 --- /dev/null +++ b/cli/tests/node_compat/test/parallel/test-worker-threads-broadcast-channel.js @@ -0,0 +1,9 @@ +// deno-fmt-ignore-file +// deno-lint-ignore-file + +"use strict"; + +const assert = require("assert/strict"); +const worker_threads = require("worker_threads"); + +assert.equal(BroadcastChannel, worker_threads.BroadcastChannel); diff --git a/cli/tests/node_compat/test/parallel/test-worker-threads-message-channel.js b/cli/tests/node_compat/test/parallel/test-worker-threads-message-channel.js new file mode 100644 index 0000000000..b831ed3fee --- /dev/null +++ b/cli/tests/node_compat/test/parallel/test-worker-threads-message-channel.js @@ -0,0 +1,10 @@ +// deno-fmt-ignore-file +// deno-lint-ignore-file + +"use strict"; + +const assert = require("assert/strict"); +const worker_threads = require("worker_threads"); + +assert.equal(MessageChannel, worker_threads.MessageChannel); +assert.equal(MessagePort, worker_threads.MessagePort); diff --git a/ext/node/polyfills/worker_threads.ts b/ext/node/polyfills/worker_threads.ts index cc9529fbd7..2c13e4bc8d 100644 --- a/ext/node/polyfills/worker_threads.ts +++ b/ext/node/polyfills/worker_threads.ts @@ -4,6 +4,8 @@ import { resolve, toFileUrl } from "ext:deno_node/path.ts"; import { notImplemented } from "ext:deno_node/_utils.ts"; import { EventEmitter } from "ext:deno_node/events.ts"; +import { BroadcastChannel } from "ext:deno_broadcast_channel/01_broadcast_channel.js"; +import { MessageChannel, MessagePort } from "ext:deno_web/13_message_port.js"; const environmentData = new Map(); let threads = 0; @@ -204,12 +206,6 @@ export function setEnvironmentData(key: unknown, value?: unknown) { } } -// deno-lint-ignore no-explicit-any -const _MessagePort: typeof MessagePort = (globalThis as any).MessagePort; -const _MessageChannel: typeof MessageChannel = - // deno-lint-ignore no-explicit-any - (globalThis as any).MessageChannel; -export const BroadcastChannel = globalThis.BroadcastChannel; export const SHARE_ENV = Symbol.for("nodejs.worker_threads.SHARE_ENV"); export function markAsUntransferable() { notImplemented("markAsUntransferable"); @@ -221,9 +217,10 @@ export function receiveMessageOnPort() { notImplemented("receiveMessageOnPort"); } export { - _MessageChannel as MessageChannel, - _MessagePort as MessagePort, _Worker as Worker, + BroadcastChannel, + MessageChannel, + MessagePort, parentPort, threadId, workerData, @@ -233,8 +230,8 @@ export default { markAsUntransferable, moveMessagePortToContext, receiveMessageOnPort, - MessagePort: _MessagePort, - MessageChannel: _MessageChannel, + MessagePort, + MessageChannel, BroadcastChannel, Worker: _Worker, getEnvironmentData, From 20c42286f88d861192f35d272a645d8ab6f15be8 Mon Sep 17 00:00:00 2001 From: Yarden Shoham Date: Thu, 11 May 2023 15:52:56 +0300 Subject: [PATCH 152/320] chore(deps): bump tokio version to 1.28.1 (#19069) --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 48351f7a37..88452bb6b9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5200,9 +5200,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.28.0" +version = "1.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3c786bf8134e5a3a166db9b29ab8f48134739014a3eca7bc6bfa95d673b136f" +checksum = "0aa32867d44e6f2ce3385e89dceb990188b8bb0fb25b0cf576647a6f98ac5105" dependencies = [ "autocfg", "bytes", diff --git a/Cargo.toml b/Cargo.toml index f265d9c674..194cce09bb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -133,7 +133,7 @@ socket2 = "0.4.7" tar = "=0.4.38" tempfile = "3.4.0" thiserror = "=1.0.38" -tokio = { version = "1.28.0", features = ["full"] } +tokio = { version = "1.28.1", features = ["full"] } tikv-jemallocator = "0.5.0" tikv-jemalloc-sys = "0.5.3" tokio-rustls = "0.23.3" From 2ba9ccc1ab25e5c631afcbb12b53f4545ca7f750 Mon Sep 17 00:00:00 2001 From: Nayeem Rahman Date: Thu, 11 May 2023 13:53:45 +0100 Subject: [PATCH 153/320] fix(runtime): `ChildProcess::kill()` doesn't require additional perms (#15339) Fixes #15217. --- cli/tests/integration/run_tests.rs | 5 +++ cli/tests/testdata/spawn_kill_permissions.ts | 6 +++ cli/tests/unit/command_test.ts | 18 +++++++++ runtime/js/40_process.js | 7 ++-- runtime/ops/process.rs | 40 +++++++++++++++----- 5 files changed, 63 insertions(+), 13 deletions(-) create mode 100644 cli/tests/testdata/spawn_kill_permissions.ts diff --git a/cli/tests/integration/run_tests.rs b/cli/tests/integration/run_tests.rs index 26aacc6fdc..e6ea85da45 100644 --- a/cli/tests/integration/run_tests.rs +++ b/cli/tests/integration/run_tests.rs @@ -3435,6 +3435,11 @@ itest!(test_and_bench_are_noops_in_run { output_str: Some(""), }); +itest!(spawn_kill_permissions { + args: "run --quiet --unstable --allow-run=deno spawn_kill_permissions.ts", + output_str: Some(""), +}); + itest!(followup_dyn_import_resolved { args: "run --unstable --allow-read run/followup_dyn_import_resolves/main.ts", output: "run/followup_dyn_import_resolves/main.ts.out", diff --git a/cli/tests/testdata/spawn_kill_permissions.ts b/cli/tests/testdata/spawn_kill_permissions.ts new file mode 100644 index 0000000000..e0c1b7bfdb --- /dev/null +++ b/cli/tests/testdata/spawn_kill_permissions.ts @@ -0,0 +1,6 @@ +const child = new Deno.Command("deno", { + args: ["eval", "await new Promise(r => setTimeout(r, 2000))"], + stdout: "null", + stderr: "null", +}).spawn(); +child.kill("SIGTERM"); diff --git a/cli/tests/unit/command_test.ts b/cli/tests/unit/command_test.ts index 0763a7ac68..198f94aedb 100644 --- a/cli/tests/unit/command_test.ts +++ b/cli/tests/unit/command_test.ts @@ -867,3 +867,21 @@ Deno.test( } }, ); + +Deno.test( + { permissions: { run: true, read: true } }, + async function commandKillAfterStatus() { + const command = new Deno.Command(Deno.execPath(), { + args: ["help"], + stdout: "null", + stderr: "null", + }); + const child = command.spawn(); + await child.status; + assertThrows( + () => child.kill(), + TypeError, + "Child process has already terminated.", + ); + }, +); diff --git a/runtime/js/40_process.js b/runtime/js/40_process.js index 2a5ac86bf2..664a4b303d 100644 --- a/runtime/js/40_process.js +++ b/runtime/js/40_process.js @@ -200,6 +200,7 @@ function collectOutput(readableStream) { class ChildProcess { #rid; #waitPromiseId; + #waitComplete = false; #unrefed = false; #pid; @@ -268,8 +269,8 @@ class ChildProcess { const waitPromise = core.opAsync("op_spawn_wait", this.#rid); this.#waitPromiseId = waitPromise[promiseIdSymbol]; this.#status = PromisePrototypeThen(waitPromise, (res) => { - this.#rid = null; signal?.[abortSignal.remove](onAbort); + this.#waitComplete = true; return res; }); } @@ -317,10 +318,10 @@ class ChildProcess { } kill(signo = "SIGTERM") { - if (this.#rid === null) { + if (this.#waitComplete) { throw new TypeError("Child process has already terminated."); } - ops.op_kill(this.#pid, signo, "Deno.Child.kill()"); + ops.op_spawn_kill(this.#rid, signo); } ref() { diff --git a/runtime/ops/process.rs b/runtime/ops/process.rs index d991c961f2..76db23d029 100644 --- a/runtime/ops/process.rs +++ b/runtime/ops/process.rs @@ -2,6 +2,7 @@ use super::check_unstable; use crate::permissions::PermissionsContainer; +use deno_core::error::type_error; use deno_core::error::AnyError; use deno_core::op; use deno_core::serde_json; @@ -106,6 +107,7 @@ deno_core::extension!( op_spawn_child, op_spawn_wait, op_spawn_sync, + op_spawn_kill, deprecated::op_run, deprecated::op_run_status, deprecated::op_kill, @@ -115,7 +117,9 @@ deno_core::extension!( }, ); -struct ChildResource(tokio::process::Child); +/// Second member stores the pid separately from the RefCell. It's needed for +/// `op_spawn_kill`, where the RefCell is borrowed mutably by `op_spawn_wait`. +struct ChildResource(RefCell, u32); impl Resource for ChildResource { fn name(&self) -> Cow { @@ -302,7 +306,9 @@ fn spawn_child( .take() .map(|stderr| state.resource_table.add(ChildStderrResource::from(stderr))); - let child_rid = state.resource_table.add(ChildResource(child)); + let child_rid = state + .resource_table + .add(ChildResource(RefCell::new(child), pid)); Ok(Child { rid: child_rid, @@ -328,17 +334,18 @@ async fn op_spawn_wait( state: Rc>, rid: ResourceId, ) -> Result { + #![allow(clippy::await_holding_refcell_ref)] let resource = state .borrow_mut() .resource_table - .take::(rid)?; - Rc::try_unwrap(resource) - .ok() - .unwrap() - .0 - .wait() - .await? - .try_into() + .get::(rid)?; + let result = resource.0.try_borrow_mut()?.wait().await?.try_into(); + state + .borrow_mut() + .resource_table + .close(rid) + .expect("shouldn't have closed until now"); + result } #[op] @@ -366,6 +373,19 @@ fn op_spawn_sync( }) } +#[op] +fn op_spawn_kill( + state: &mut OpState, + rid: ResourceId, + signal: String, +) -> Result<(), AnyError> { + if let Ok(child_resource) = state.resource_table.get::(rid) { + deprecated::kill(child_resource.1 as i32, &signal)?; + return Ok(()); + } + Err(type_error("Child process has already terminated.")) +} + mod deprecated { use super::*; From ffb0318e4a2427abd0ab9c77ab48ef57357b0dc0 Mon Sep 17 00:00:00 2001 From: Nayeem Rahman Date: Thu, 11 May 2023 14:08:17 +0100 Subject: [PATCH 154/320] fix(console): handle error when inspecting promise-like (#19083) Fixes https://discord.com/channels/684898665143206084/684911491035430919/1105900195406958672. This was caused by: - A `TypeError` from `core.getPromiseDetails()` for promise-likes which also lead to that code path. - Swallowing internal formatting errors by returning `undefined`. I've made it so that a special message is formatted in that case instead (note that this case is fixed now): ![image](https://github.com/denoland/deno/assets/29990554/65bb9612-60b2-4e31-bf5e-e20976601593) --- cli/tests/unit/console_test.ts | 7 ++++++ ext/console/01_console.js | 43 +++++++++++----------------------- 2 files changed, 21 insertions(+), 29 deletions(-) diff --git a/cli/tests/unit/console_test.ts b/cli/tests/unit/console_test.ts index 0bd53dc779..c4f2f64a4b 100644 --- a/cli/tests/unit/console_test.ts +++ b/cli/tests/unit/console_test.ts @@ -2235,6 +2235,13 @@ Deno.test(function inspectWithPrototypePollution() { } }); +Deno.test(function inspectPromiseLike() { + assertEquals( + Deno.inspect(Object.create(Promise.prototype)), + "Promise { }", + ); +}); + Deno.test(function inspectorMethods() { console.timeStamp("test"); console.profile("test"); diff --git a/ext/console/01_console.js b/ext/console/01_console.js index 31431f120a..dbbc549cad 100644 --- a/ext/console/01_console.js +++ b/ext/console/01_console.js @@ -161,6 +161,7 @@ const styles = { // TODO(BridgeAR): Highlight regular expressions properly. regexp: "red", module: "underline", + internalError: "red", }; const defaultFG = 39; @@ -1022,7 +1023,6 @@ function formatRaw(ctx, value, recurseTimes, typedArray, proxyDetails) { ArrayPrototypePush(ctx.seen, value); ctx.currentDepth = recurseTimes; let output; - const indentationLvl = ctx.indentationLvl; try { output = formatter(ctx, value, recurseTimes); for (i = 0; i < keys.length; i++) { @@ -1034,13 +1034,12 @@ function formatRaw(ctx, value, recurseTimes, typedArray, proxyDetails) { if (protoProps !== undefined) { ArrayPrototypePushApply(output, protoProps); } - } catch (err) { - const constructorName = StringPrototypeSlice( - getCtxStyle(value, constructor, tag), - 0, - -1, + } catch (error) { + // TODO(wafuwafu13): Implement stack overflow check + return ctx.stylize( + `[Internal Formatting Error] ${error.stack}`, + "internalError", ); - return handleMaxCallStackSize(ctx, err, constructorName, indentationLvl); } if (ctx.circular !== undefined) { @@ -1658,8 +1657,14 @@ const PromiseState = { function formatPromise(ctx, value, recurseTimes) { let output; - // TODO(wafuwafu13): Implement - const { 0: state, 1: result } = core.getPromiseDetails(value); + let opResult; + // This op will fail for non-promises, but we get here for some promise-likes. + try { + opResult = core.getPromiseDetails(value); + } catch { + return [ctx.stylize("", "special")]; + } + const { 0: state, 1: result } = opResult; if (state === PromiseState.Pending) { output = [ctx.stylize("", "special")]; } else { @@ -1770,26 +1775,6 @@ function formatProperty( return `${name}:${extra}${str}`; } -function handleMaxCallStackSize( - _ctx, - _err, - _constructorName, - _indentationLvl, -) { - // TODO(wafuwafu13): Implement - // if (isStackOverflowError(err)) { - // ctx.seen.pop(); - // ctx.indentationLvl = indentationLvl; - // return ctx.stylize( - // `[${constructorName}: Inspection interrupted ` + - // 'prematurely. Maximum call stack size exceeded.]', - // 'special' - // ); - // } - // /* c8 ignore next */ - // assert.fail(err.stack); -} - const colorRegExp = new SafeRegExp("\u001b\\[\\d\\d?m", "g"); function removeColors(str) { return StringPrototypeReplace(str, colorRegExp, ""); From b8495e0377ccd328a75762e349e4ad2f060b2a25 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Thu, 11 May 2023 10:12:58 -0400 Subject: [PATCH 155/320] fix(cli): upgrade to Typescript 5.0.4 (#19090) --- cli/build.rs | 2 +- cli/tests/unit/version_test.ts | 2 +- cli/tsc/00_typescript.js | 45 ++++++++++++++++++++-------------- 3 files changed, 29 insertions(+), 20 deletions(-) diff --git a/cli/build.rs b/cli/build.rs index 560c8ceae2..2b2181bae4 100644 --- a/cli/build.rs +++ b/cli/build.rs @@ -467,7 +467,7 @@ fn main() { ); let ts_version = ts::version(); - debug_assert_eq!(ts_version, "5.0.3"); // bump this assertion when it changes + debug_assert_eq!(ts_version, "5.0.4"); // bump this assertion when it changes println!("cargo:rustc-env=TS_VERSION={}", ts_version); println!("cargo:rerun-if-env-changed=TS_VERSION"); diff --git a/cli/tests/unit/version_test.ts b/cli/tests/unit/version_test.ts index f129de6b23..222aeeb851 100644 --- a/cli/tests/unit/version_test.ts +++ b/cli/tests/unit/version_test.ts @@ -6,5 +6,5 @@ Deno.test(function version() { const pattern = /^\d+\.\d+\.\d+/; assert(pattern.test(Deno.version.deno)); assert(pattern.test(Deno.version.v8)); - assertEquals(Deno.version.typescript, "5.0.3"); + assertEquals(Deno.version.typescript, "5.0.4"); }); diff --git a/cli/tsc/00_typescript.js b/cli/tsc/00_typescript.js index 63743a3724..a477f09d9e 100644 --- a/cli/tsc/00_typescript.js +++ b/cli/tsc/00_typescript.js @@ -35,7 +35,7 @@ var ts = (() => { "src/compiler/corePublic.ts"() { "use strict"; versionMajorMinor = "5.0"; - version = "5.0.3"; + version = "5.0.4"; Comparison = /* @__PURE__ */ ((Comparison3) => { Comparison3[Comparison3["LessThan"] = -1] = "LessThan"; Comparison3[Comparison3["EqualTo"] = 0] = "EqualTo"; @@ -17997,6 +17997,9 @@ ${lanes.join("\n")} function moduleResolutionSupportsPackageJsonExportsAndImports(moduleResolution) { return moduleResolution >= 3 /* Node16 */ && moduleResolution <= 99 /* NodeNext */ || moduleResolution === 100 /* Bundler */; } + function shouldResolveJsRequire(compilerOptions) { + return !!compilerOptions.noDtsResolution || getEmitModuleResolutionKind(compilerOptions) !== 100 /* Bundler */; + } function getResolvePackageJsonExports(compilerOptions) { const moduleResolution = getEmitModuleResolutionKind(compilerOptions); if (!moduleResolutionSupportsPackageJsonExportsAndImports(moduleResolution)) { @@ -31698,6 +31701,12 @@ ${lanes.join("\n")} if (languageVariant === 1 /* JSX */) { return parseJsxElementOrSelfClosingElementOrFragment( /*inExpressionContext*/ + true, + /*topInvalidNodePosition*/ + void 0, + /*openingTag*/ + void 0, + /*mustBeUnary*/ true ); } @@ -31802,7 +31811,7 @@ ${lanes.join("\n")} true )), pos); } - function parseJsxElementOrSelfClosingElementOrFragment(inExpressionContext, topInvalidNodePosition, openingTag) { + function parseJsxElementOrSelfClosingElementOrFragment(inExpressionContext, topInvalidNodePosition, openingTag, mustBeUnary = false) { const pos = getNodePos(); const opening = parseJsxOpeningOrSelfClosingElementOrOpeningFragment(inExpressionContext); let result; @@ -31840,7 +31849,7 @@ ${lanes.join("\n")} Debug.assert(opening.kind === 282 /* JsxSelfClosingElement */); result = opening; } - if (inExpressionContext && token() === 29 /* LessThanToken */) { + if (!mustBeUnary && inExpressionContext && token() === 29 /* LessThanToken */) { const topBadPos = typeof topInvalidNodePosition === "undefined" ? result.pos : topInvalidNodePosition; const invalidElement = tryParse(() => parseJsxElementOrSelfClosingElementOrFragment( /*inExpressionContext*/ @@ -38075,7 +38084,8 @@ ${lanes.join("\n")} affectsBuildInfo: true, category: Diagnostics.Modules, description: Diagnostics.Allow_imports_to_include_TypeScript_file_extensions_Requires_moduleResolution_bundler_and_either_noEmit_or_emitDeclarationOnly_to_be_set, - defaultValueDescription: false + defaultValueDescription: false, + transpileOptionValue: void 0 }, { name: "resolvePackageJsonExports", @@ -43773,7 +43783,7 @@ ${lanes.join("\n")} } if (!isBindingPattern(node.name)) { const possibleVariableDecl = node.kind === 257 /* VariableDeclaration */ ? node : node.parent.parent; - if (isInJSFile(node) && getEmitModuleResolutionKind(options) !== 100 /* Bundler */ && isVariableDeclarationInitializedToBareOrAccessedRequire(possibleVariableDecl) && !getJSDocTypeTag(node) && !(getCombinedModifierFlags(node) & 1 /* Export */)) { + if (isInJSFile(node) && shouldResolveJsRequire(options) && isVariableDeclarationInitializedToBareOrAccessedRequire(possibleVariableDecl) && !getJSDocTypeTag(node) && !(getCombinedModifierFlags(node) & 1 /* Export */)) { declareSymbolAndAddToSymbolTable(node, 2097152 /* Alias */, 2097152 /* AliasExcludes */); } else if (isBlockOrCatchScoped(node)) { bindBlockScopedDeclaration(node, 2 /* BlockScopedVariable */, 111551 /* BlockScopedVariableExcludes */); @@ -47284,7 +47294,7 @@ ${lanes.join("\n")} const hasDefaultOnly = isOnlyImportedAsDefault(specifier); const hasSyntheticDefault = canHaveSyntheticDefault(file, moduleSymbol, dontResolveAlias, specifier); if (!exportDefaultSymbol && !hasSyntheticDefault && !hasDefaultOnly) { - if (hasExportAssignmentSymbol(moduleSymbol) && !(getAllowSyntheticDefaultImports(compilerOptions) || getESModuleInterop(compilerOptions))) { + if (hasExportAssignmentSymbol(moduleSymbol) && !allowSyntheticDefaultImports) { const compilerOptionName = moduleKind >= 5 /* ES2015 */ ? "allowSyntheticDefaultImports" : "esModuleInterop"; const exportEqualsSymbol = moduleSymbol.exports.get("export=" /* ExportEquals */); const exportAssignment = exportEqualsSymbol.valueDeclaration; @@ -47452,7 +47462,7 @@ ${lanes.join("\n")} if (!isIdentifier(name)) { return void 0; } - const suppressInteropError = name.escapedText === "default" /* Default */ && !!(compilerOptions.allowSyntheticDefaultImports || getESModuleInterop(compilerOptions)); + const suppressInteropError = name.escapedText === "default" /* Default */ && allowSyntheticDefaultImports; const targetSymbol = resolveESModuleSymbol( moduleSymbol, moduleSpecifier, @@ -52116,7 +52126,7 @@ ${lanes.join("\n")} return; } let verbatimTargetName = isShorthandAmbientModuleSymbol(target) && getSomeTargetNameFromDeclarations(symbol.declarations) || unescapeLeadingUnderscores(target.escapedName); - if (verbatimTargetName === "export=" /* ExportEquals */ && (getESModuleInterop(compilerOptions) || compilerOptions.allowSyntheticDefaultImports)) { + if (verbatimTargetName === "export=" /* ExportEquals */ && allowSyntheticDefaultImports) { verbatimTargetName = "default" /* Default */; } const targetName = getInternalSymbolName(target, verbatimTargetName); @@ -73215,7 +73225,7 @@ ${lanes.join("\n")} return anyType; } } - if (isInJSFile(node) && getEmitModuleResolutionKind(compilerOptions) !== 100 /* Bundler */ && isCommonJsRequire(node)) { + if (isInJSFile(node) && shouldResolveJsRequire(compilerOptions) && isCommonJsRequire(node)) { return resolveExternalModuleTypeByLiteral(node.arguments[0]); } const returnType = getReturnTypeOfSignature(signature); @@ -92253,11 +92263,12 @@ ${lanes.join("\n")} return visitEachChild(node, visitor, context); } function visitArrayAssignmentElement(node) { - Debug.assertNode(node, isArrayBindingOrAssignmentElement); - if (isSpreadElement(node)) - return visitAssignmentRestElement(node); - if (!isOmittedExpression(node)) - return visitAssignmentElement(node); + if (isArrayBindingOrAssignmentElement(node)) { + if (isSpreadElement(node)) + return visitAssignmentRestElement(node); + if (!isOmittedExpression(node)) + return visitAssignmentElement(node); + } return visitEachChild(node, visitor, context); } function visitAssignmentProperty(node) { @@ -117468,7 +117479,7 @@ ${lanes.join("\n")} false ); } - const shouldProcessRequires = isJavaScriptFile && getEmitModuleResolutionKind(options) !== 100 /* Bundler */; + const shouldProcessRequires = isJavaScriptFile && shouldResolveJsRequire(options); if (file.flags & 2097152 /* PossiblyContainsDynamicImport */ || shouldProcessRequires) { collectDynamicImportOrRequireCalls(file); } @@ -118396,9 +118407,6 @@ ${lanes.join("\n")} if (moduleKind === 2 /* AMD */ || moduleKind === 3 /* UMD */ || moduleKind === 4 /* System */) { createDiagnosticForOptionName(Diagnostics.Option_verbatimModuleSyntax_cannot_be_used_when_module_is_set_to_UMD_AMD_or_System, "verbatimModuleSyntax"); } - if (options.isolatedModules) { - createRedundantOptionDiagnostic("isolatedModules", "verbatimModuleSyntax"); - } if (options.preserveValueImports) { createRedundantOptionDiagnostic("preserveValueImports", "verbatimModuleSyntax"); } @@ -169808,6 +169816,7 @@ ${options.prefix}` : "\n" : options.prefix setValueDeclaration: () => setValueDeclaration, shouldAllowImportingTsExtension: () => shouldAllowImportingTsExtension, shouldPreserveConstEnums: () => shouldPreserveConstEnums, + shouldResolveJsRequire: () => shouldResolveJsRequire, shouldUseUriStyleNodeCoreModules: () => shouldUseUriStyleNodeCoreModules, showModuleSpecifier: () => showModuleSpecifier, signatureHasLiteralTypes: () => signatureHasLiteralTypes, From 78c7ff91e31d900609084a9563d5fd105affd091 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miroslav=20Bajto=C5=A1?= Date: Thu, 11 May 2023 16:14:51 +0200 Subject: [PATCH 156/320] chore: upgrade thiserror and deno_lockfile (#19073) Upgrade `thiserror` to `1.40.0`. Remove version pinning so that consumers of deno crates can install newer versions of `thiserrors` without waiting for us to upgrade our Cargo.toml. Upgrade `deno_lockfile` to `0.14.0` to bring in `thiserror` upgrade, see https://github.com/denoland/deno_lockfile/pull/1. --- .github/workflows/ci.generate.ts | 4 ++-- .github/workflows/ci.yml | 6 +++--- Cargo.lock | 14 +++++++------- Cargo.toml | 4 ++-- 4 files changed, 14 insertions(+), 14 deletions(-) diff --git a/.github/workflows/ci.generate.ts b/.github/workflows/ci.generate.ts index b5fa91afb6..139c554d16 100755 --- a/.github/workflows/ci.generate.ts +++ b/.github/workflows/ci.generate.ts @@ -17,7 +17,7 @@ const Runners = (() => { })(); // bump the number at the start when you want to purge the cache const prCacheKeyPrefix = - "23-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-"; + "24-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-"; const installPkgsCommand = "sudo apt-get install --no-install-recommends debootstrap clang-15 lld-15"; @@ -476,7 +476,7 @@ const ci = { "~/.cargo/git/db", ].join("\n"), key: - "23-cargo-home-${{ matrix.os }}-${{ hashFiles('Cargo.lock') }}", + "24-cargo-home-${{ matrix.os }}-${{ hashFiles('Cargo.lock') }}", }, }, { diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9f2c788c25..6ccfbb2c51 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -290,7 +290,7 @@ jobs: ~/.cargo/registry/index ~/.cargo/registry/cache ~/.cargo/git/db - key: '23-cargo-home-${{ matrix.os }}-${{ hashFiles(''Cargo.lock'') }}' + key: '24-cargo-home-${{ matrix.os }}-${{ hashFiles(''Cargo.lock'') }}' if: '!(github.event_name == ''pull_request'' && matrix.skip_pr)' - name: Restore cache build output (PR) uses: actions/cache/restore@v3 @@ -302,7 +302,7 @@ jobs: !./target/*/*.zip !./target/*/*.tar.gz key: never_saved - restore-keys: '23-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-' + restore-keys: '24-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-' - name: Apply and update mtime cache if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (!startsWith(github.ref, ''refs/tags/''))' uses: ./.github/mtime_cache @@ -583,7 +583,7 @@ jobs: !./target/*/gn_out !./target/*/*.zip !./target/*/*.tar.gz - key: '23-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' + key: '24-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' publish-canary: name: publish canary runs-on: ubuntu-22.04 diff --git a/Cargo.lock b/Cargo.lock index 88452bb6b9..be34a16a2f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1102,9 +1102,9 @@ dependencies = [ [[package]] name = "deno_lockfile" -version = "0.13.0" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e88b89dc19bc7b0c28297c9fde36dc999a04c19b6d01ff061ae30dc9119488c8" +checksum = "54cecfa877ecd31bb7f694826a2b6566ff77515f527bddae296aff455e6999c2" dependencies = [ "ring", "serde", @@ -5118,22 +5118,22 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.38" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a9cd18aa97d5c45c6603caea1da6628790b37f7a34b6ca89522331c5180fed0" +checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.38" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f" +checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" dependencies = [ "proc-macro2 1.0.56", "quote 1.0.26", - "syn 1.0.109", + "syn 2.0.13", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 194cce09bb..7bcb59a750 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -51,7 +51,7 @@ deno_runtime = { version = "0.111.0", path = "./runtime" } napi_sym = { version = "0.33.0", path = "./cli/napi/sym" } deno_bench_util = { version = "0.97.0", path = "./bench_util" } test_util = { path = "./test_util" } -deno_lockfile = "0.13.0" +deno_lockfile = "0.14.0" deno_media_type = { version = "0.1.0", features = ["module_specifier"] } deno_npm = "0.3.0" deno_semver = "0.2.1" @@ -132,7 +132,7 @@ smallvec = "1.8" socket2 = "0.4.7" tar = "=0.4.38" tempfile = "3.4.0" -thiserror = "=1.0.38" +thiserror = "1.0.40" tokio = { version = "1.28.1", features = ["full"] } tikv-jemallocator = "0.5.0" tikv-jemalloc-sys = "0.5.3" From 18e9f4642cc9ff598105a0d51263e307949c0423 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Thu, 11 May 2023 12:04:27 -0400 Subject: [PATCH 157/320] fix(lsp): hard to soft error when unable to get completion info (#19091) --- cli/lsp/language_server.rs | 11 +++++++---- runtime/ops/process.rs | 1 - 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index d32d12ec8b..1eb3944726 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -2194,10 +2194,13 @@ impl Inner { )); let snapshot = self.snapshot(); let maybe_completion_info: Option = - self.ts_server.request(snapshot, req).await.map_err(|err| { - error!("Unable to get completion info from TypeScript: {}", err); - LspError::internal_error() - })?; + match self.ts_server.request(snapshot, req).await { + Ok(maybe_info) => maybe_info, + Err(err) => { + error!("Unable to get completion info from TypeScript: {:#}", err); + None + } + }; if let Some(completions) = maybe_completion_info { let results = completions.as_completion_response( diff --git a/runtime/ops/process.rs b/runtime/ops/process.rs index 76db23d029..a2eace8b6a 100644 --- a/runtime/ops/process.rs +++ b/runtime/ops/process.rs @@ -597,7 +597,6 @@ mod deprecated { #[cfg(not(unix))] pub fn kill(pid: i32, signal: &str) -> Result<(), AnyError> { - use deno_core::error::type_error; use std::io::Error; use std::io::ErrorKind::NotFound; use winapi::shared::minwindef::DWORD; From c926bc0debd0df3bf62d5125a490f8675e70c6ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Thu, 11 May 2023 21:24:40 +0200 Subject: [PATCH 158/320] fix(npm): make http2 module available, make 'nodeGlobalThisName' writable (#19092) --- ext/node/polyfill.rs | 4 ++++ ext/node/polyfills/02_init.js | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/ext/node/polyfill.rs b/ext/node/polyfill.rs index b334d2d341..8cf1cec70d 100644 --- a/ext/node/polyfill.rs +++ b/ext/node/polyfill.rs @@ -101,6 +101,10 @@ pub static SUPPORTED_BUILTIN_NODE_MODULES: &[NodeModulePolyfill] = &[ name: "http", specifier: "ext:deno_node/http.ts", }, + NodeModulePolyfill { + name: "http2", + specifier: "ext:deno_node/http2.ts", + }, NodeModulePolyfill { name: "https", specifier: "ext:deno_node/https.ts", diff --git a/ext/node/polyfills/02_init.js b/ext/node/polyfills/02_init.js index d419c3bcaa..b8070d50f7 100644 --- a/ext/node/polyfills/02_init.js +++ b/ext/node/polyfills/02_init.js @@ -39,7 +39,7 @@ function initialize( // get node's globalThis ObjectDefineProperty(globalThis, nodeGlobalThisName, { enumerable: false, - writable: false, + writable: true, value: nodeGlobalThis, }); // FIXME(bartlomieju): not nice to depend on `Deno` namespace here From 28a72d548801f81a96ff4bba750d8dc51a2b1567 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Thu, 11 May 2023 17:17:14 -0400 Subject: [PATCH 159/320] feat(lsp): ability to configure document pre-load limit (#19097) Adds a `deno.preloadLimit` option (ex. `"deno.preloadLimit": 2000`) which specifies how many file entries to traverse on the file system when the lsp loads or its configuration changes. Closes #18955 --- cli/lsp/client.rs | 20 --- cli/lsp/code_lens.rs | 2 +- cli/lsp/completions.rs | 2 +- cli/lsp/config.rs | 24 ++- cli/lsp/diagnostics.rs | 2 +- cli/lsp/documents.rs | 234 +++++++++++++++------------- cli/lsp/language_server.rs | 66 ++++---- cli/lsp/repl.rs | 1 + cli/lsp/tsc.rs | 2 +- cli/tests/integration/lsp_tests.rs | 43 +++++ cli/tests/integration/repl_tests.rs | 5 +- test_util/src/lsp.rs | 6 + 12 files changed, 239 insertions(+), 168 deletions(-) diff --git a/cli/lsp/client.rs b/cli/lsp/client.rs index e684dc09fc..d24d4c2a9e 100644 --- a/cli/lsp/client.rs +++ b/cli/lsp/client.rs @@ -26,13 +26,6 @@ pub enum TestingNotification { Progress(testing_lsp_custom::TestRunProgressParams), } -#[derive(Debug, Default, Copy, Clone, PartialEq, Eq)] -pub enum LspClientKind { - #[default] - CodeEditor, - Repl, -} - #[derive(Clone)] pub struct Client(Arc); @@ -51,10 +44,6 @@ impl Client { Self(Arc::new(ReplClient)) } - pub fn kind(&self) -> LspClientKind { - self.0.kind() - } - /// Gets additional methods that should only be called outside /// the LSP's lock to prevent deadlocking scenarios. pub fn when_outside_lsp_lock(&self) -> OutsideLockClient { @@ -160,7 +149,6 @@ impl OutsideLockClient { #[async_trait] trait ClientTrait: Send + Sync { - fn kind(&self) -> LspClientKind; async fn publish_diagnostics( &self, uri: lsp::Url, @@ -189,10 +177,6 @@ struct TowerClient(tower_lsp::Client); #[async_trait] impl ClientTrait for TowerClient { - fn kind(&self) -> LspClientKind { - LspClientKind::CodeEditor - } - async fn publish_diagnostics( &self, uri: lsp::Url, @@ -312,10 +296,6 @@ struct ReplClient; #[async_trait] impl ClientTrait for ReplClient { - fn kind(&self) -> LspClientKind { - LspClientKind::Repl - } - async fn publish_diagnostics( &self, _uri: lsp::Url, diff --git a/cli/lsp/code_lens.rs b/cli/lsp/code_lens.rs index 6327b7a9cf..fd7f350061 100644 --- a/cli/lsp/code_lens.rs +++ b/cli/lsp/code_lens.rs @@ -391,7 +391,7 @@ pub async fn collect( code_lenses.extend( collect_tsc( specifier, - &config.get_workspace_settings(), + config.workspace_settings(), line_index, navigation_tree, ) diff --git a/cli/lsp/completions.rs b/cli/lsp/completions.rs index d91fc29c28..070e3168a5 100644 --- a/cli/lsp/completions.rs +++ b/cli/lsp/completions.rs @@ -519,7 +519,7 @@ mod tests { source_fixtures: &[(&str, &str)], location: &Path, ) -> Documents { - let mut documents = Documents::new(location, Default::default()); + let mut documents = Documents::new(location); for (specifier, source, version, language_id) in fixtures { let specifier = resolve_url(specifier).expect("failed to create specifier"); diff --git a/cli/lsp/config.rs b/cli/lsp/config.rs index f4b2d8c09e..0a25e2b992 100644 --- a/cli/lsp/config.rs +++ b/cli/lsp/config.rs @@ -265,6 +265,10 @@ fn default_to_true() -> bool { true } +fn default_document_preload_limit() -> usize { + 1000 +} + fn empty_string_none<'de, D: serde::Deserializer<'de>>( d: D, ) -> Result, D::Error> { @@ -318,6 +322,10 @@ pub struct WorkspaceSettings { #[serde(default = "default_to_true")] pub lint: bool, + /// Limits the number of files that can be preloaded by the language server. + #[serde(default = "default_document_preload_limit")] + pub document_preload_limit: usize, + /// A flag that indicates if Dene should validate code against the unstable /// APIs for the workspace. #[serde(default)] @@ -354,6 +362,7 @@ impl Default for WorkspaceSettings { inlay_hints: Default::default(), internal_debug: false, lint: true, + document_preload_limit: default_document_preload_limit(), suggest: Default::default(), testing: Default::default(), tls_certificate: None, @@ -439,8 +448,8 @@ impl Config { } } - pub fn get_workspace_settings(&self) -> WorkspaceSettings { - self.settings.workspace.clone() + pub fn workspace_settings(&self) -> &WorkspaceSettings { + &self.settings.workspace } /// Set the workspace settings directly, which occurs during initialization @@ -714,7 +723,7 @@ mod tests { .set_workspace_settings(json!({})) .expect("could not update"); assert_eq!( - config.get_workspace_settings(), + config.workspace_settings().clone(), WorkspaceSettings { enable: false, enable_paths: Vec::new(), @@ -750,6 +759,7 @@ mod tests { }, internal_debug: false, lint: true, + document_preload_limit: 1_000, suggest: CompletionSettings { complete_function_calls: false, names: true, @@ -778,7 +788,7 @@ mod tests { .set_workspace_settings(json!({ "cache": "" })) .expect("could not update"); assert_eq!( - config.get_workspace_settings(), + config.workspace_settings().clone(), WorkspaceSettings::default() ); } @@ -790,7 +800,7 @@ mod tests { .set_workspace_settings(json!({ "import_map": "" })) .expect("could not update"); assert_eq!( - config.get_workspace_settings(), + config.workspace_settings().clone(), WorkspaceSettings::default() ); } @@ -802,7 +812,7 @@ mod tests { .set_workspace_settings(json!({ "tls_certificate": "" })) .expect("could not update"); assert_eq!( - config.get_workspace_settings(), + config.workspace_settings().clone(), WorkspaceSettings::default() ); } @@ -814,7 +824,7 @@ mod tests { .set_workspace_settings(json!({ "config": "" })) .expect("could not update"); assert_eq!( - config.get_workspace_settings(), + config.workspace_settings().clone(), WorkspaceSettings::default() ); } diff --git a/cli/lsp/diagnostics.rs b/cli/lsp/diagnostics.rs index b650d8e558..7d13cfdb5f 100644 --- a/cli/lsp/diagnostics.rs +++ b/cli/lsp/diagnostics.rs @@ -1096,7 +1096,7 @@ mod tests { location: &Path, maybe_import_map: Option<(&str, &str)>, ) -> StateSnapshot { - let mut documents = Documents::new(location, Default::default()); + let mut documents = Documents::new(location); for (specifier, source, version, language_id) in fixtures { let specifier = resolve_url(specifier).expect("failed to create specifier"); diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs index b55d3ca206..6577d27692 100644 --- a/cli/lsp/documents.rs +++ b/cli/lsp/documents.rs @@ -1,7 +1,6 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. use super::cache::calculate_fs_version; -use super::client::LspClientKind; use super::text::LineIndex; use super::tsc; use super::tsc::AssetDocument; @@ -793,6 +792,16 @@ fn get_document_path( } } +pub struct UpdateDocumentConfigOptions<'a> { + pub enabled_urls: Vec, + pub document_preload_limit: usize, + pub maybe_import_map: Option>, + pub maybe_config_file: Option<&'a ConfigFile>, + pub maybe_package_json: Option<&'a PackageJson>, + pub npm_registry_api: Arc, + pub npm_resolution: Arc, +} + /// Specify the documents to include on a `documents.documents(...)` call. #[derive(Debug, Clone, Copy)] pub enum DocumentsFilter { @@ -818,8 +827,6 @@ pub struct Documents { open_docs: HashMap, /// Documents stored on the file system. file_system_docs: Arc>, - /// Kind of the client that is using the documents. - lsp_client_kind: LspClientKind, /// Hash of the config used for resolution. When the hash changes we update /// dependencies. resolver_config_hash: u64, @@ -839,14 +846,13 @@ pub struct Documents { } impl Documents { - pub fn new(location: &Path, lsp_client_kind: LspClientKind) -> Self { + pub fn new(location: &Path) -> Self { Self { cache: HttpCache::new(location), dirty: true, dependents_map: Default::default(), open_docs: HashMap::default(), file_system_docs: Default::default(), - lsp_client_kind, resolver_config_hash: 0, imports: Default::default(), resolver: Default::default(), @@ -1161,15 +1167,7 @@ impl Documents { Ok(()) } - pub fn update_config( - &mut self, - enabled_urls: Vec, - maybe_import_map: Option>, - maybe_config_file: Option<&ConfigFile>, - maybe_package_json: Option<&PackageJson>, - npm_registry_api: Arc, - npm_resolution: Arc, - ) { + pub fn update_config(&mut self, options: UpdateDocumentConfigOptions) { fn calculate_resolver_config_hash( enabled_urls: &[Url], maybe_import_map: Option<&import_map::ImportMap>, @@ -1208,14 +1206,16 @@ impl Documents { hasher.finish() } - let maybe_package_json_deps = maybe_package_json.map(|package_json| { - package_json::get_local_package_json_version_reqs(package_json) - }); - let maybe_jsx_config = - maybe_config_file.and_then(|cf| cf.to_maybe_jsx_import_source_config()); + let maybe_package_json_deps = + options.maybe_package_json.map(|package_json| { + package_json::get_local_package_json_version_reqs(package_json) + }); + let maybe_jsx_config = options + .maybe_config_file + .and_then(|cf| cf.to_maybe_jsx_import_source_config()); let new_resolver_config_hash = calculate_resolver_config_hash( - &enabled_urls, - maybe_import_map.as_deref(), + &options.enabled_urls, + options.maybe_import_map.as_deref(), maybe_jsx_config.as_ref(), maybe_package_json_deps.as_ref(), ); @@ -1223,21 +1223,21 @@ impl Documents { Arc::new(PackageJsonDepsProvider::new(maybe_package_json_deps)); let deps_installer = Arc::new(PackageJsonDepsInstaller::new( deps_provider.clone(), - npm_registry_api.clone(), - npm_resolution.clone(), + options.npm_registry_api.clone(), + options.npm_resolution.clone(), )); self.resolver = Arc::new(CliGraphResolver::new( maybe_jsx_config, - maybe_import_map, + options.maybe_import_map, false, - npm_registry_api, - npm_resolution, + options.npm_registry_api, + options.npm_resolution, deps_provider, deps_installer, )); self.imports = Arc::new( if let Some(Ok(imports)) = - maybe_config_file.map(|cf| cf.to_maybe_imports()) + options.maybe_config_file.map(|cf| cf.to_maybe_imports()) { imports .into_iter() @@ -1257,14 +1257,21 @@ impl Documents { // only refresh the dependencies if the underlying configuration has changed if self.resolver_config_hash != new_resolver_config_hash { - self.refresh_dependencies(enabled_urls); + self.refresh_dependencies( + options.enabled_urls, + options.document_preload_limit, + ); self.resolver_config_hash = new_resolver_config_hash; } self.dirty = true; } - fn refresh_dependencies(&mut self, enabled_urls: Vec) { + fn refresh_dependencies( + &mut self, + enabled_urls: Vec, + document_preload_limit: usize, + ) { let resolver = self.resolver.as_graph_resolver(); for doc in self.open_docs.values_mut() { if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) { @@ -1274,51 +1281,73 @@ impl Documents { // update the file system documents let mut fs_docs = self.file_system_docs.lock(); - match self.lsp_client_kind { - LspClientKind::CodeEditor => { - let mut not_found_docs = - fs_docs.docs.keys().cloned().collect::>(); - let open_docs = &mut self.open_docs; + if document_preload_limit > 0 { + let mut not_found_docs = + fs_docs.docs.keys().cloned().collect::>(); + let open_docs = &mut self.open_docs; - log::debug!("Preloading documents from enabled urls..."); - for specifier in PreloadDocumentFinder::from_enabled_urls(&enabled_urls) + log::debug!("Preloading documents from enabled urls..."); + let mut finder = PreloadDocumentFinder::from_enabled_urls_with_limit( + &enabled_urls, + document_preload_limit, + ); + for specifier in finder.by_ref() { + // mark this document as having been found + not_found_docs.remove(&specifier); + + if !open_docs.contains_key(&specifier) + && !fs_docs.docs.contains_key(&specifier) { - // mark this document as having been found - not_found_docs.remove(&specifier); - - if !open_docs.contains_key(&specifier) - && !fs_docs.docs.contains_key(&specifier) - { - fs_docs.refresh_document(&self.cache, resolver, &specifier); - } else { - // update the existing entry to have the new resolver - if let Some(doc) = fs_docs.docs.get_mut(&specifier) { - if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) { - *doc = new_doc; - } + fs_docs.refresh_document(&self.cache, resolver, &specifier); + } else { + // update the existing entry to have the new resolver + if let Some(doc) = fs_docs.docs.get_mut(&specifier) { + if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) { + *doc = new_doc; } } } + } + if finder.hit_limit() { + lsp_warn!( + concat!( + "Hit the language server document preload limit of {} file system entries. ", + "You may want to use the \"deno.enablePaths\" configuration setting to only have Deno ", + "partially enable a workspace or increase the limit via \"deno.documentPreloadLimit\". ", + "In cases where Deno ends up using too much memory, you may want to lower the limit." + ), + document_preload_limit, + ); + + // since we hit the limit, just update everything to use the new resolver + for uri in not_found_docs { + if let Some(doc) = fs_docs.docs.get_mut(&uri) { + if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) { + *doc = new_doc; + } + } + } + } else { // clean up and remove any documents that weren't found for uri in not_found_docs { fs_docs.docs.remove(&uri); } } - LspClientKind::Repl => { - // This log statement is used in the tests to ensure preloading doesn't - // happen, which is not useful in the repl and could be very expensive - // if the repl is launched from a directory with a lot of descendants. - log::debug!("Skipping document preload for repl."); + } else { + // This log statement is used in the tests to ensure preloading doesn't + // happen, which is not useful in the repl and could be very expensive + // if the repl is launched from a directory with a lot of descendants. + log::debug!("Skipping document preload."); - // for the repl, just update to use the new resolver - for doc in fs_docs.docs.values_mut() { - if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) { - *doc = new_doc; - } + // just update to use the new resolver + for doc in fs_docs.docs.values_mut() { + if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) { + *doc = new_doc; } } } + fs_docs.dirty = true; } @@ -1558,19 +1587,15 @@ enum PendingEntry { /// Iterator that finds documents that can be preloaded into /// the LSP on startup. struct PreloadDocumentFinder { - limit: u16, - entry_count: u16, + limit: usize, + entry_count: usize, pending_entries: VecDeque, } impl PreloadDocumentFinder { - pub fn from_enabled_urls(enabled_urls: &Vec) -> Self { - Self::from_enabled_urls_with_limit(enabled_urls, 1_000) - } - pub fn from_enabled_urls_with_limit( enabled_urls: &Vec, - limit: u16, + limit: usize, ) -> Self { fn is_allowed_root_dir(dir_path: &Path) -> bool { if dir_path.parent().is_none() { @@ -1605,6 +1630,10 @@ impl PreloadDocumentFinder { finder } + pub fn hit_limit(&self) -> bool { + self.entry_count >= self.limit + } + fn get_valid_specifier(path: &Path) -> Option { fn is_allowed_media_type(media_type: MediaType) -> bool { match media_type { @@ -1699,15 +1728,7 @@ impl Iterator for PreloadDocumentFinder { while let Some(entry) = entries.next() { self.entry_count += 1; - if self.entry_count >= self.limit { - lsp_warn!( - concat!( - "Hit the language server document preload limit of {} file system entries. ", - "You may want to use the \"deno.enablePaths\" configuration setting to only have Deno ", - "partially enable a workspace." - ), - self.limit, - ); + if self.hit_limit() { self.pending_entries.clear(); // stop searching return None; } @@ -1769,7 +1790,7 @@ mod tests { fn setup(temp_dir: &TempDir) -> (Documents, PathBuf) { let location = temp_dir.path().join("deps"); - let documents = Documents::new(&location, Default::default()); + let documents = Documents::new(&location); (documents, location) } @@ -1899,14 +1920,15 @@ console.log(b, "hello deno"); .append("test".to_string(), "./file2.ts".to_string()) .unwrap(); - documents.update_config( - vec![], - Some(Arc::new(import_map)), - None, - None, - npm_registry_api.clone(), - npm_resolution.clone(), - ); + documents.update_config(UpdateDocumentConfigOptions { + enabled_urls: vec![], + document_preload_limit: 1_000, + maybe_import_map: Some(Arc::new(import_map)), + maybe_config_file: None, + maybe_package_json: None, + npm_registry_api: npm_registry_api.clone(), + npm_resolution: npm_resolution.clone(), + }); // open the document let document = documents.open( @@ -1939,14 +1961,15 @@ console.log(b, "hello deno"); .append("test".to_string(), "./file3.ts".to_string()) .unwrap(); - documents.update_config( - vec![], - Some(Arc::new(import_map)), - None, - None, + documents.update_config(UpdateDocumentConfigOptions { + enabled_urls: vec![], + document_preload_limit: 1_000, + maybe_import_map: Some(Arc::new(import_map)), + maybe_config_file: None, + maybe_package_json: None, npm_registry_api, npm_resolution, - ); + }); // check the document's dependencies let document = documents.get(&file1_specifier).unwrap(); @@ -2001,12 +2024,15 @@ console.log(b, "hello deno"); temp_dir.create_dir_all("root3/"); temp_dir.write("root3/mod.ts", ""); // no, not provided - let mut urls = PreloadDocumentFinder::from_enabled_urls(&vec![ - temp_dir.uri().join("root1/").unwrap(), - temp_dir.uri().join("root2/file1.ts").unwrap(), - temp_dir.uri().join("root2/main.min.ts").unwrap(), - temp_dir.uri().join("root2/folder/").unwrap(), - ]) + let mut urls = PreloadDocumentFinder::from_enabled_urls_with_limit( + &vec![ + temp_dir.uri().join("root1/").unwrap(), + temp_dir.uri().join("root2/file1.ts").unwrap(), + temp_dir.uri().join("root2/main.min.ts").unwrap(), + temp_dir.uri().join("root2/folder/").unwrap(), + ], + 1_000, + ) .collect::>(); // Ideally we would test for order here, which should be BFS, but @@ -2048,18 +2074,18 @@ console.log(b, "hello deno"); #[test] pub fn test_pre_load_document_finder_disallowed_dirs() { if cfg!(windows) { - let paths = PreloadDocumentFinder::from_enabled_urls(&vec![Url::parse( - "file:///c:/", + let paths = PreloadDocumentFinder::from_enabled_urls_with_limit( + &vec![Url::parse("file:///c:/").unwrap()], + 1_000, ) - .unwrap()]) .collect::>(); assert_eq!(paths, vec![]); } else { - let paths = - PreloadDocumentFinder::from_enabled_urls(&vec![ - Url::parse("file:///").unwrap() - ]) - .collect::>(); + let paths = PreloadDocumentFinder::from_enabled_urls_with_limit( + &vec![Url::parse("file:///").unwrap()], + 1_000, + ) + .collect::>(); assert_eq!(paths, vec![]); } } diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index 1eb3944726..7fe986bfee 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -49,6 +49,7 @@ use super::documents::Document; use super::documents::Documents; use super::documents::DocumentsFilter; use super::documents::LanguageId; +use super::documents::UpdateDocumentConfigOptions; use super::logging::lsp_log; use super::logging::lsp_warn; use super::lsp_custom; @@ -285,7 +286,7 @@ impl LanguageServer { if let Some(testing_server) = &inner.maybe_testing_server { match params.map(serde_json::from_value) { Some(Ok(params)) => testing_server - .run_request(params, inner.config.get_workspace_settings()), + .run_request(params, inner.config.workspace_settings().clone()), Some(Err(err)) => Err(LspError::invalid_params(err.to_string())), None => Err(LspError::invalid_params("Missing parameters")), } @@ -489,7 +490,7 @@ impl Inner { let module_registries = ModuleRegistry::new(&module_registries_location, http_client.clone()); let location = dir.deps_folder_path(); - let documents = Documents::new(&location, client.kind()); + let documents = Documents::new(&location); let deps_http_cache = HttpCache::new(&location); let cache_metadata = cache::CacheMetadata::new(deps_http_cache.clone()); let performance = Arc::new(Performance::default()); @@ -602,9 +603,9 @@ impl Inner { } fn get_config_file(&self) -> Result, AnyError> { - let workspace_settings = self.config.get_workspace_settings(); - let maybe_config = workspace_settings.config; - if let Some(config_str) = &maybe_config { + let workspace_settings = self.config.workspace_settings(); + let maybe_config = &workspace_settings.config; + if let Some(config_str) = maybe_config { if !config_str.is_empty() { lsp_log!("Setting Deno configuration from: \"{}\"", config_str); let config_url = if let Ok(url) = Url::from_file_path(config_str) { @@ -744,8 +745,8 @@ impl Inner { pub fn update_cache(&mut self) -> Result<(), AnyError> { let mark = self.performance.mark("update_cache", None::<()>); self.performance.measure(mark); - let maybe_cache = self.config.get_workspace_settings().cache; - let maybe_cache_path = if let Some(cache_str) = &maybe_cache { + let maybe_cache = &self.config.workspace_settings().cache; + let maybe_cache_path = if let Some(cache_str) = maybe_cache { lsp_log!("Setting cache path from: \"{}\"", cache_str); let cache_url = if let Ok(url) = Url::from_file_path(cache_str) { Ok(url) @@ -785,7 +786,7 @@ impl Inner { .clone() .or_else(|| env::var("DENO_DIR").map(String::into).ok()); let dir = DenoDir::new(maybe_custom_root)?; - let workspace_settings = self.config.get_workspace_settings(); + let workspace_settings = self.config.workspace_settings(); let maybe_root_path = self .config .root_uri @@ -793,15 +794,17 @@ impl Inner { .and_then(|uri| specifier_to_file_path(uri).ok()); let root_cert_store = get_root_cert_store( maybe_root_path, - workspace_settings.certificate_stores, - workspace_settings.tls_certificate.map(CaData::File), + workspace_settings.certificate_stores.clone(), + workspace_settings.tls_certificate.clone().map(CaData::File), )?; let root_cert_store_provider = Arc::new(LspRootCertStoreProvider(root_cert_store)); let module_registries_location = dir.registries_folder_path(); self.http_client = Arc::new(HttpClient::new( Some(root_cert_store_provider), - workspace_settings.unsafely_ignore_certificate_errors, + workspace_settings + .unsafely_ignore_certificate_errors + .clone(), )); self.module_registries = ModuleRegistry::new( &module_registries_location, @@ -883,8 +886,9 @@ impl Inner { Ok( if let Some(import_map_str) = self .config - .get_workspace_settings() + .workspace_settings() .import_map + .clone() .and_then(|s| if s.is_empty() { None } else { Some(s) }) { lsp_log!( @@ -957,14 +961,14 @@ impl Inner { } pub fn update_debug_flag(&self) { - let internal_debug = self.config.get_workspace_settings().internal_debug; + let internal_debug = self.config.workspace_settings().internal_debug; super::logging::set_lsp_debug_flag(internal_debug) } async fn update_registries(&mut self) -> Result<(), AnyError> { let mark = self.performance.mark("update_registries", None::<()>); self.recreate_http_client_and_dependents(self.maybe_cache_path.clone())?; - let workspace_settings = self.config.get_workspace_settings(); + let workspace_settings = self.config.workspace_settings(); for (registry, enabled) in workspace_settings.suggest.imports.hosts.iter() { if *enabled { lsp_log!("Enabling import suggestions for: {}", registry); @@ -1037,7 +1041,7 @@ impl Inner { "useUnknownInCatchVariables": false, })); let config = &self.config; - let workspace_settings = config.get_workspace_settings(); + let workspace_settings = config.workspace_settings(); if workspace_settings.unstable { let unstable_libs = json!({ "lib": ["deno.ns", "deno.window", "deno.unstable"] @@ -1169,14 +1173,18 @@ impl Inner { } fn refresh_documents_config(&mut self) { - self.documents.update_config( - self.config.enabled_urls(), - self.maybe_import_map.clone(), - self.maybe_config_file.as_ref(), - self.maybe_package_json.as_ref(), - self.npm_api.clone(), - self.npm_resolution.clone(), - ); + self.documents.update_config(UpdateDocumentConfigOptions { + enabled_urls: self.config.enabled_urls(), + document_preload_limit: self + .config + .workspace_settings() + .document_preload_limit, + maybe_import_map: self.maybe_import_map.clone(), + maybe_config_file: self.maybe_config_file.as_ref(), + maybe_package_json: self.maybe_package_json.as_ref(), + npm_registry_api: self.npm_api.clone(), + npm_resolution: self.npm_resolution.clone(), + }); } async fn shutdown(&self) -> LspResult<()> { @@ -1871,7 +1879,7 @@ impl Inner { .normalize_url(¶ms.text_document.uri, LspUrlKind::File); if !self.is_diagnosable(&specifier) || !self.config.specifier_enabled(&specifier) - || !(self.config.get_workspace_settings().enabled_code_lens() + || !(self.config.workspace_settings().enabled_code_lens() || self.config.specifier_code_lens_test(&specifier)) { return Ok(None); @@ -2171,7 +2179,7 @@ impl Inner { ), include_automatic_optional_chain_completions: Some(true), include_completions_for_import_statements: Some( - self.config.get_workspace_settings().suggest.auto_imports, + self.config.workspace_settings().suggest.auto_imports, ), include_completions_for_module_exports: Some(true), include_completions_with_object_literal_method_snippets: Some( @@ -2205,7 +2213,7 @@ impl Inner { if let Some(completions) = maybe_completion_info { let results = completions.as_completion_response( line_index, - &self.config.get_workspace_settings().suggest, + &self.config.workspace_settings().suggest, &specifier, position, ); @@ -3315,7 +3323,7 @@ impl Inner { let specifier = self .url_map .normalize_url(¶ms.text_document.uri, LspUrlKind::File); - let workspace_settings = self.config.get_workspace_settings(); + let workspace_settings = self.config.workspace_settings(); if !self.is_diagnosable(&specifier) || !self.config.specifier_enabled(&specifier) || !workspace_settings.enabled_inlay_hints() @@ -3334,7 +3342,7 @@ impl Inner { let req = tsc::RequestMethod::ProvideInlayHints(( specifier, range, - (&workspace_settings).into(), + workspace_settings.into(), )); let maybe_inlay_hints: Option> = self .ts_server @@ -3388,7 +3396,7 @@ impl Inner { .collect::>(); documents_specifiers.sort(); let measures = self.performance.to_vec(); - let workspace_settings = self.config.get_workspace_settings(); + let workspace_settings = self.config.workspace_settings(); write!( contents, diff --git a/cli/lsp/repl.rs b/cli/lsp/repl.rs index ada8b94041..ad0171629f 100644 --- a/cli/lsp/repl.rs +++ b/cli/lsp/repl.rs @@ -294,6 +294,7 @@ pub fn get_repl_workspace_settings() -> WorkspaceSettings { inlay_hints: Default::default(), internal_debug: false, lint: false, + document_preload_limit: 0, // don't pre-load any modules as it's expensive and not useful for the repl tls_certificate: None, unsafely_ignore_certificate_errors: None, unstable: false, diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs index c6035192c6..92407bec1a 100644 --- a/cli/lsp/tsc.rs +++ b/cli/lsp/tsc.rs @@ -3525,7 +3525,7 @@ mod tests { fixtures: &[(&str, &str, i32, LanguageId)], location: &Path, ) -> StateSnapshot { - let mut documents = Documents::new(location, Default::default()); + let mut documents = Documents::new(location); for (specifier, source, version, language_id) in fixtures { let specifier = resolve_url(specifier).expect("failed to create specifier"); diff --git a/cli/tests/integration/lsp_tests.rs b/cli/tests/integration/lsp_tests.rs index aa69b0dafa..500a27ed2c 100644 --- a/cli/tests/integration/lsp_tests.rs +++ b/cli/tests/integration/lsp_tests.rs @@ -7418,6 +7418,49 @@ fn lsp_closed_file_find_references() { client.shutdown(); } +#[test] +fn lsp_closed_file_find_references_low_document_pre_load() { + let context = TestContextBuilder::new().use_temp_cwd().build(); + let temp_dir = context.temp_dir(); + temp_dir.create_dir_all("sub_dir"); + temp_dir.write("./other_file.ts", "export const b = 5;"); + temp_dir.write("./sub_dir/mod.ts", "export const a = 5;"); + temp_dir.write( + "./sub_dir/mod.test.ts", + "import { a } from './mod.ts'; console.log(a);", + ); + let temp_dir_url = temp_dir.uri(); + let mut client = context.new_lsp_command().build(); + client.initialize(|builder| { + builder.set_preload_limit(1); + }); + client.did_open(json!({ + "textDocument": { + "uri": temp_dir_url.join("sub_dir/mod.ts").unwrap(), + "languageId": "typescript", + "version": 1, + "text": r#"export const a = 5;"# + } + })); + let res = client.write_request( + "textDocument/references", + json!({ + "textDocument": { + "uri": temp_dir_url.join("sub_dir/mod.ts").unwrap(), + }, + "position": { "line": 0, "character": 13 }, + "context": { + "includeDeclaration": false + } + }), + ); + + // won't have results because the document won't be pre-loaded + assert_eq!(res, json!([])); + + client.shutdown(); +} + #[test] fn lsp_data_urls_with_jsx_compiler_option() { let context = TestContextBuilder::new().use_temp_cwd().build(); diff --git a/cli/tests/integration/repl_tests.rs b/cli/tests/integration/repl_tests.rs index 517fda1b73..e6fc7aa911 100644 --- a/cli/tests/integration/repl_tests.rs +++ b/cli/tests/integration/repl_tests.rs @@ -1014,9 +1014,6 @@ fn closed_file_pre_load_does_not_occur() { .new_command() .args_vec(["repl", "-A", "--log-level=debug"]) .with_pty(|console| { - assert_contains!( - console.all_output(), - "Skipping document preload for repl.", - ); + assert_contains!(console.all_output(), "Skipping document preload.",); }); } diff --git a/test_util/src/lsp.rs b/test_util/src/lsp.rs index 3e9d0a80bb..a7061543ff 100644 --- a/test_util/src/lsp.rs +++ b/test_util/src/lsp.rs @@ -378,6 +378,12 @@ impl InitializeParamsBuilder { self } + pub fn set_preload_limit(&mut self, arg: usize) -> &mut Self { + let options = self.initialization_options_mut(); + options.insert("documentPreloadLimit".to_string(), arg.into()); + self + } + pub fn set_tls_certificate(&mut self, value: impl AsRef) -> &mut Self { let options = self.initialization_options_mut(); options.insert( From e0f07ab8cdd007afee14430264f486346ba8447e Mon Sep 17 00:00:00 2001 From: David Sherret Date: Thu, 11 May 2023 19:05:37 -0400 Subject: [PATCH 160/320] fix(ext/fs): add more context_path (#19101) --- ext/fs/ops.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ext/fs/ops.rs b/ext/fs/ops.rs index 71526b217a..0fad92044c 100644 --- a/ext/fs/ops.rs +++ b/ext/fs/ops.rs @@ -1154,7 +1154,7 @@ where permissions.check_read(&path, "Deno.readFileSync()")?; let fs = state.borrow::(); - let buf = fs.read_file_sync(&path).context("readfile")?; + let buf = fs.read_file_sync(&path).context_path("readfile", &path)?; Ok(buf.into()) } @@ -1210,7 +1210,7 @@ where permissions.check_read(&path, "Deno.readFileSync()")?; let fs = state.borrow::(); - let buf = fs.read_file_sync(&path).context("readfile")?; + let buf = fs.read_file_sync(&path).context_path("readfile", &path)?; Ok(string_from_utf8_lossy(buf)) } From a39263e0670eca5816c7a72b57a51a7ef9bb8198 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Thu, 11 May 2023 21:53:09 -0400 Subject: [PATCH 161/320] fix(lsp): preload documents when `deno.documentPreloadLimit` changes (#19103) --- cli/lsp/documents.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs index 6577d27692..8fd9cdbb23 100644 --- a/cli/lsp/documents.rs +++ b/cli/lsp/documents.rs @@ -1170,11 +1170,13 @@ impl Documents { pub fn update_config(&mut self, options: UpdateDocumentConfigOptions) { fn calculate_resolver_config_hash( enabled_urls: &[Url], + document_preload_limit: usize, maybe_import_map: Option<&import_map::ImportMap>, maybe_jsx_config: Option<&JsxImportSourceConfig>, maybe_package_json_deps: Option<&PackageJsonDeps>, ) -> u64 { let mut hasher = FastInsecureHasher::default(); + hasher.write_hashable(&document_preload_limit); hasher.write_hashable(&{ // ensure these are sorted so the hashing is deterministic let mut enabled_urls = enabled_urls.to_vec(); @@ -1215,6 +1217,7 @@ impl Documents { .and_then(|cf| cf.to_maybe_jsx_import_source_config()); let new_resolver_config_hash = calculate_resolver_config_hash( &options.enabled_urls, + options.document_preload_limit, options.maybe_import_map.as_deref(), maybe_jsx_config.as_ref(), maybe_package_json_deps.as_ref(), From dad7744f2c7492ceece67ed28a8e5651fba797b6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Fri, 12 May 2023 04:09:32 +0200 Subject: [PATCH 162/320] chore: upgrade rusty_v8 to 0.71.1 (#19104) Fixes https://github.com/denoland/deno/issues/19021 --- .github/workflows/ci.generate.ts | 4 ++-- .github/workflows/ci.yml | 6 +++--- Cargo.lock | 4 ++-- Cargo.toml | 2 +- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/ci.generate.ts b/.github/workflows/ci.generate.ts index 139c554d16..f5db4f499e 100755 --- a/.github/workflows/ci.generate.ts +++ b/.github/workflows/ci.generate.ts @@ -17,7 +17,7 @@ const Runners = (() => { })(); // bump the number at the start when you want to purge the cache const prCacheKeyPrefix = - "24-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-"; + "25-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-"; const installPkgsCommand = "sudo apt-get install --no-install-recommends debootstrap clang-15 lld-15"; @@ -476,7 +476,7 @@ const ci = { "~/.cargo/git/db", ].join("\n"), key: - "24-cargo-home-${{ matrix.os }}-${{ hashFiles('Cargo.lock') }}", + "25-cargo-home-${{ matrix.os }}-${{ hashFiles('Cargo.lock') }}", }, }, { diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6ccfbb2c51..b3b76647f7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -290,7 +290,7 @@ jobs: ~/.cargo/registry/index ~/.cargo/registry/cache ~/.cargo/git/db - key: '24-cargo-home-${{ matrix.os }}-${{ hashFiles(''Cargo.lock'') }}' + key: '25-cargo-home-${{ matrix.os }}-${{ hashFiles(''Cargo.lock'') }}' if: '!(github.event_name == ''pull_request'' && matrix.skip_pr)' - name: Restore cache build output (PR) uses: actions/cache/restore@v3 @@ -302,7 +302,7 @@ jobs: !./target/*/*.zip !./target/*/*.tar.gz key: never_saved - restore-keys: '24-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-' + restore-keys: '25-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-' - name: Apply and update mtime cache if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (!startsWith(github.ref, ''refs/tags/''))' uses: ./.github/mtime_cache @@ -583,7 +583,7 @@ jobs: !./target/*/gn_out !./target/*/*.zip !./target/*/*.tar.gz - key: '24-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' + key: '25-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' publish-canary: name: publish canary runs-on: ubuntu-22.04 diff --git a/Cargo.lock b/Cargo.lock index be34a16a2f..c06b9a0793 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5703,9 +5703,9 @@ dependencies = [ [[package]] name = "v8" -version = "0.71.0" +version = "0.71.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51a173a437bebab13d587a4aaf0a1e7a49433226538c9a78ca3b4ce3b8c6aeb6" +checksum = "32a2ece81e9f3d573376d5301b0d1c1c0ffcb63d57e6164ddf1bc844b4c8a23b" dependencies = [ "bitflags 1.3.2", "fslock", diff --git a/Cargo.toml b/Cargo.toml index 7bcb59a750..454bb35656 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -41,7 +41,7 @@ license = "MIT" repository = "https://github.com/denoland/deno" [workspace.dependencies] -v8 = { version = "0.71.0", default-features = false } +v8 = { version = "0.71.1", default-features = false } deno_ast = { version = "0.26.0", features = ["transpiling"] } deno_core = { version = "0.185.0", path = "./core" } From 7476ee34fadcefee630edbc564186155acebdf94 Mon Sep 17 00:00:00 2001 From: denobot <33910674+denobot@users.noreply.github.com> Date: Fri, 12 May 2023 16:47:27 +0200 Subject: [PATCH 163/320] chore: forward v1.33.3 release commit to main (#19111) **THIS PR HAS GIT CONFLICTS THAT MUST BE RESOLVED** This is the release commit being forwarded back to main for 1.33.3 Please ensure: - [x] Everything looks ok in the PR - [x] The release has been published To make edits to this PR: ```shell git fetch upstream forward_v1.33.3 && git checkout -b forward_v1.33.3 upstream/forward_v1.33.3 ``` Don't need this PR? Close it. cc @levex Co-authored-by: Levente Kurusa --- Cargo.lock | 52 ++++++++++++++++---------------- Cargo.toml | 50 +++++++++++++++--------------- Releases.md | 26 ++++++++++++++++ bench_util/Cargo.toml | 2 +- cli/Cargo.toml | 2 +- cli/deno_std.rs | 2 +- cli/napi/sym/Cargo.toml | 2 +- core/Cargo.toml | 2 +- ext/broadcast_channel/Cargo.toml | 2 +- ext/cache/Cargo.toml | 2 +- ext/console/Cargo.toml | 2 +- ext/crypto/Cargo.toml | 2 +- ext/fetch/Cargo.toml | 2 +- ext/ffi/Cargo.toml | 2 +- ext/fs/Cargo.toml | 2 +- ext/http/Cargo.toml | 2 +- ext/io/Cargo.toml | 2 +- ext/kv/Cargo.toml | 2 +- ext/napi/Cargo.toml | 2 +- ext/net/Cargo.toml | 2 +- ext/node/Cargo.toml | 2 +- ext/tls/Cargo.toml | 2 +- ext/url/Cargo.toml | 2 +- ext/web/Cargo.toml | 2 +- ext/webidl/Cargo.toml | 2 +- ext/websocket/Cargo.toml | 2 +- ext/webstorage/Cargo.toml | 2 +- ops/Cargo.toml | 2 +- runtime/Cargo.toml | 2 +- serde_v8/Cargo.toml | 2 +- 30 files changed, 104 insertions(+), 78 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c06b9a0793..b2ef8559d4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -696,7 +696,7 @@ checksum = "8d7439c3735f405729d52c3fbbe4de140eaf938a1fe47d227c27f8254d4302a5" [[package]] name = "deno" -version = "1.33.2" +version = "1.33.3" dependencies = [ "async-trait", "atty", @@ -819,7 +819,7 @@ dependencies = [ [[package]] name = "deno_bench_util" -version = "0.97.0" +version = "0.98.0" dependencies = [ "bencher", "deno_core", @@ -829,7 +829,7 @@ dependencies = [ [[package]] name = "deno_broadcast_channel" -version = "0.97.0" +version = "0.98.0" dependencies = [ "async-trait", "deno_core", @@ -839,7 +839,7 @@ dependencies = [ [[package]] name = "deno_cache" -version = "0.35.0" +version = "0.36.0" dependencies = [ "async-trait", "deno_core", @@ -851,14 +851,14 @@ dependencies = [ [[package]] name = "deno_console" -version = "0.103.0" +version = "0.104.0" dependencies = [ "deno_core", ] [[package]] name = "deno_core" -version = "0.185.0" +version = "0.186.0" dependencies = [ "anyhow", "bytes", @@ -883,7 +883,7 @@ dependencies = [ [[package]] name = "deno_crypto" -version = "0.117.0" +version = "0.118.0" dependencies = [ "aes", "aes-gcm", @@ -951,7 +951,7 @@ dependencies = [ [[package]] name = "deno_fetch" -version = "0.127.0" +version = "0.128.0" dependencies = [ "bytes", "data-url", @@ -968,7 +968,7 @@ dependencies = [ [[package]] name = "deno_ffi" -version = "0.90.0" +version = "0.91.0" dependencies = [ "deno_core", "dlopen", @@ -983,7 +983,7 @@ dependencies = [ [[package]] name = "deno_fs" -version = "0.13.0" +version = "0.14.0" dependencies = [ "async-trait", "deno_core", @@ -1023,7 +1023,7 @@ dependencies = [ [[package]] name = "deno_http" -version = "0.98.0" +version = "0.99.0" dependencies = [ "async-compression", "base64 0.13.1", @@ -1057,7 +1057,7 @@ dependencies = [ [[package]] name = "deno_io" -version = "0.13.0" +version = "0.14.0" dependencies = [ "async-trait", "deno_core", @@ -1071,7 +1071,7 @@ dependencies = [ [[package]] name = "deno_kv" -version = "0.11.0" +version = "0.12.0" dependencies = [ "anyhow", "async-trait", @@ -1125,7 +1125,7 @@ dependencies = [ [[package]] name = "deno_napi" -version = "0.33.0" +version = "0.34.0" dependencies = [ "deno_core", "libloading", @@ -1133,7 +1133,7 @@ dependencies = [ [[package]] name = "deno_net" -version = "0.95.0" +version = "0.96.0" dependencies = [ "deno_core", "deno_tls", @@ -1148,7 +1148,7 @@ dependencies = [ [[package]] name = "deno_node" -version = "0.40.0" +version = "0.41.0" dependencies = [ "aes", "cbc", @@ -1218,7 +1218,7 @@ dependencies = [ [[package]] name = "deno_ops" -version = "0.63.0" +version = "0.64.0" dependencies = [ "lazy-regex", "once_cell", @@ -1236,7 +1236,7 @@ dependencies = [ [[package]] name = "deno_runtime" -version = "0.111.0" +version = "0.112.0" dependencies = [ "atty", "console_static_text", @@ -1317,7 +1317,7 @@ dependencies = [ [[package]] name = "deno_tls" -version = "0.90.0" +version = "0.91.0" dependencies = [ "deno_core", "once_cell", @@ -1331,7 +1331,7 @@ dependencies = [ [[package]] name = "deno_url" -version = "0.103.0" +version = "0.104.0" dependencies = [ "deno_bench_util", "deno_core", @@ -1343,7 +1343,7 @@ dependencies = [ [[package]] name = "deno_web" -version = "0.134.0" +version = "0.135.0" dependencies = [ "async-trait", "base64-simd", @@ -1361,7 +1361,7 @@ dependencies = [ [[package]] name = "deno_webidl" -version = "0.103.0" +version = "0.104.0" dependencies = [ "deno_bench_util", "deno_core", @@ -1369,7 +1369,7 @@ dependencies = [ [[package]] name = "deno_websocket" -version = "0.108.0" +version = "0.109.0" dependencies = [ "bytes", "deno_core", @@ -1385,7 +1385,7 @@ dependencies = [ [[package]] name = "deno_webstorage" -version = "0.98.0" +version = "0.99.0" dependencies = [ "deno_core", "deno_web", @@ -2987,7 +2987,7 @@ dependencies = [ [[package]] name = "napi_sym" -version = "0.33.0" +version = "0.34.0" dependencies = [ "proc-macro2 1.0.56", "quote 1.0.26", @@ -4256,7 +4256,7 @@ dependencies = [ [[package]] name = "serde_v8" -version = "0.96.0" +version = "0.97.0" dependencies = [ "bencher", "bytes", diff --git a/Cargo.toml b/Cargo.toml index 454bb35656..fc31a15262 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -44,12 +44,12 @@ repository = "https://github.com/denoland/deno" v8 = { version = "0.71.1", default-features = false } deno_ast = { version = "0.26.0", features = ["transpiling"] } -deno_core = { version = "0.185.0", path = "./core" } -deno_ops = { version = "0.63.0", path = "./ops" } -serde_v8 = { version = "0.96.0", path = "./serde_v8" } -deno_runtime = { version = "0.111.0", path = "./runtime" } -napi_sym = { version = "0.33.0", path = "./cli/napi/sym" } -deno_bench_util = { version = "0.97.0", path = "./bench_util" } +deno_core = { version = "0.186.0", path = "./core" } +deno_ops = { version = "0.64.0", path = "./ops" } +serde_v8 = { version = "0.97.0", path = "./serde_v8" } +deno_runtime = { version = "0.112.0", path = "./runtime" } +napi_sym = { version = "0.34.0", path = "./cli/napi/sym" } +deno_bench_util = { version = "0.98.0", path = "./bench_util" } test_util = { path = "./test_util" } deno_lockfile = "0.14.0" deno_media_type = { version = "0.1.0", features = ["module_specifier"] } @@ -57,25 +57,25 @@ deno_npm = "0.3.0" deno_semver = "0.2.1" # exts -deno_broadcast_channel = { version = "0.97.0", path = "./ext/broadcast_channel" } -deno_cache = { version = "0.35.0", path = "./ext/cache" } -deno_console = { version = "0.103.0", path = "./ext/console" } -deno_crypto = { version = "0.117.0", path = "./ext/crypto" } -deno_fetch = { version = "0.127.0", path = "./ext/fetch" } -deno_ffi = { version = "0.90.0", path = "./ext/ffi" } -deno_fs = { version = "0.13.0", path = "./ext/fs" } -deno_http = { version = "0.98.0", path = "./ext/http" } -deno_io = { version = "0.13.0", path = "./ext/io" } -deno_net = { version = "0.95.0", path = "./ext/net" } -deno_node = { version = "0.40.0", path = "./ext/node" } -deno_kv = { version = "0.11.0", path = "./ext/kv" } -deno_tls = { version = "0.90.0", path = "./ext/tls" } -deno_url = { version = "0.103.0", path = "./ext/url" } -deno_web = { version = "0.134.0", path = "./ext/web" } -deno_webidl = { version = "0.103.0", path = "./ext/webidl" } -deno_websocket = { version = "0.108.0", path = "./ext/websocket" } -deno_webstorage = { version = "0.98.0", path = "./ext/webstorage" } -deno_napi = { version = "0.33.0", path = "./ext/napi" } +deno_broadcast_channel = { version = "0.98.0", path = "./ext/broadcast_channel" } +deno_cache = { version = "0.36.0", path = "./ext/cache" } +deno_console = { version = "0.104.0", path = "./ext/console" } +deno_crypto = { version = "0.118.0", path = "./ext/crypto" } +deno_fetch = { version = "0.128.0", path = "./ext/fetch" } +deno_ffi = { version = "0.91.0", path = "./ext/ffi" } +deno_fs = { version = "0.14.0", path = "./ext/fs" } +deno_http = { version = "0.99.0", path = "./ext/http" } +deno_io = { version = "0.14.0", path = "./ext/io" } +deno_net = { version = "0.96.0", path = "./ext/net" } +deno_node = { version = "0.41.0", path = "./ext/node" } +deno_kv = { version = "0.12.0", path = "./ext/kv" } +deno_tls = { version = "0.91.0", path = "./ext/tls" } +deno_url = { version = "0.104.0", path = "./ext/url" } +deno_web = { version = "0.135.0", path = "./ext/web" } +deno_webidl = { version = "0.104.0", path = "./ext/webidl" } +deno_websocket = { version = "0.109.0", path = "./ext/websocket" } +deno_webstorage = { version = "0.99.0", path = "./ext/webstorage" } +deno_napi = { version = "0.34.0", path = "./ext/napi" } aes = "=0.8.2" anyhow = "1.0.57" diff --git a/Releases.md b/Releases.md index 666e7b2a15..a84cc229e3 100644 --- a/Releases.md +++ b/Releases.md @@ -6,6 +6,32 @@ https://github.com/denoland/deno/releases We also have one-line install commands at: https://github.com/denoland/deno_install +### 1.33.3 / 2023.05.12 + +- feat(compile): unstable npm and node specifier support (#19005) +- feat(ext/http): Automatic compression for Deno.serve (#19031) +- feat(lsp): ability to configure document pre-load limit (#19097) +- feat(node): add `Module.runMain()` (#19080) +- fix(cli): upgrade to Typescript 5.0.4 (#19090) +- fix(console): handle error when inspecting promise-like (#19083) +- fix(core): always report the first error on unhandled rejection (#18992) +- fix(core): let V8 drive extension ESM loads (#18997) +- fix(dts): align `seekSync` `position` arg with `seek` (#19077) +- fix(ext/ffi): Callbacks panic on returning isize (#19022) +- fix(ext/ffi): UnsafeCallback can hang with 'deno test' (#19018) +- fix(ext/fs): add more context_path (#19101) +- fix(ext/http): Ensure Deno.serve works across --watch restarts (#18998) +- fix(lsp): hard to soft error when unable to get completion info (#19091) +- fix(lsp): preload documents when `deno.documentPreloadLimit` changes (#19103) +- fix(node): conditional exports edge case (#19082) +- fix(node): expose channels in worker_threads (#19086) +- fix(npm): make http2 module available, make 'nodeGlobalThisName' writable + (#19092) +- fix(runtime): `ChildProcess::kill()` doesn't require additional perms (#15339) +- fix(vendor): better handling of redirects (#19063) +- perf(ext/ffi): Use `Box<[NativeType]>` in CallbackInfo parameters (#19032) +- perf(fmt): faster formatting for minified object literals (#19050) + ### 1.33.2 / 2023.05.04 - fix(core): Use primordials for methods (#18839) diff --git a/bench_util/Cargo.toml b/bench_util/Cargo.toml index aac3103064..b2b3df4544 100644 --- a/bench_util/Cargo.toml +++ b/bench_util/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_bench_util" -version = "0.97.0" +version = "0.98.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/cli/Cargo.toml b/cli/Cargo.toml index b415f53d8e..027a4f3ad5 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno" -version = "1.33.2" +version = "1.33.3" authors.workspace = true default-run = "deno" edition.workspace = true diff --git a/cli/deno_std.rs b/cli/deno_std.rs index 8f11e9624d..3cfbba101a 100644 --- a/cli/deno_std.rs +++ b/cli/deno_std.rs @@ -2,4 +2,4 @@ // WARNING: Ensure this is the only deno_std version reference as this // is automatically updated by the version bump workflow. -pub const CURRENT_STD_URL_STR: &str = "https://deno.land/std@0.186.0/"; +pub const CURRENT_STD_URL_STR: &str = "https://deno.land/std@0.187.0/"; diff --git a/cli/napi/sym/Cargo.toml b/cli/napi/sym/Cargo.toml index ed00bd4a8b..6dacbd166e 100644 --- a/cli/napi/sym/Cargo.toml +++ b/cli/napi/sym/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "napi_sym" -version = "0.33.0" +version = "0.34.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/core/Cargo.toml b/core/Cargo.toml index 0bdac5703f..c0854fdb66 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_core" -version = "0.185.0" +version = "0.186.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/broadcast_channel/Cargo.toml b/ext/broadcast_channel/Cargo.toml index 199f816881..2e05a532a2 100644 --- a/ext/broadcast_channel/Cargo.toml +++ b/ext/broadcast_channel/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_broadcast_channel" -version = "0.97.0" +version = "0.98.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/cache/Cargo.toml b/ext/cache/Cargo.toml index 6b3385966c..48211ecc60 100644 --- a/ext/cache/Cargo.toml +++ b/ext/cache/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_cache" -version = "0.35.0" +version = "0.36.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/console/Cargo.toml b/ext/console/Cargo.toml index b2340d10d3..42c72ac979 100644 --- a/ext/console/Cargo.toml +++ b/ext/console/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_console" -version = "0.103.0" +version = "0.104.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/crypto/Cargo.toml b/ext/crypto/Cargo.toml index 71ed46976c..cfd1f1daad 100644 --- a/ext/crypto/Cargo.toml +++ b/ext/crypto/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_crypto" -version = "0.117.0" +version = "0.118.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/fetch/Cargo.toml b/ext/fetch/Cargo.toml index a215febac4..36682805bd 100644 --- a/ext/fetch/Cargo.toml +++ b/ext/fetch/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fetch" -version = "0.127.0" +version = "0.128.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/ffi/Cargo.toml b/ext/ffi/Cargo.toml index 69e3a5e42c..8e856b5899 100644 --- a/ext/ffi/Cargo.toml +++ b/ext/ffi/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_ffi" -version = "0.90.0" +version = "0.91.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/fs/Cargo.toml b/ext/fs/Cargo.toml index 67c59a4b48..bbeb6896cd 100644 --- a/ext/fs/Cargo.toml +++ b/ext/fs/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fs" -version = "0.13.0" +version = "0.14.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/http/Cargo.toml b/ext/http/Cargo.toml index c1de811705..8bf1d42e2b 100644 --- a/ext/http/Cargo.toml +++ b/ext/http/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_http" -version = "0.98.0" +version = "0.99.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/io/Cargo.toml b/ext/io/Cargo.toml index fc9de711ff..3a63efe38d 100644 --- a/ext/io/Cargo.toml +++ b/ext/io/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_io" -version = "0.13.0" +version = "0.14.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/kv/Cargo.toml b/ext/kv/Cargo.toml index fd36ee536d..5551e64cc6 100644 --- a/ext/kv/Cargo.toml +++ b/ext/kv/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_kv" -version = "0.11.0" +version = "0.12.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/napi/Cargo.toml b/ext/napi/Cargo.toml index c427be25c0..e0c1206925 100644 --- a/ext/napi/Cargo.toml +++ b/ext/napi/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_napi" -version = "0.33.0" +version = "0.34.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/net/Cargo.toml b/ext/net/Cargo.toml index 87c3cba567..85c2f84d0d 100644 --- a/ext/net/Cargo.toml +++ b/ext/net/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_net" -version = "0.95.0" +version = "0.96.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/node/Cargo.toml b/ext/node/Cargo.toml index 6a897a9a18..00d36107f7 100644 --- a/ext/node/Cargo.toml +++ b/ext/node/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_node" -version = "0.40.0" +version = "0.41.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/tls/Cargo.toml b/ext/tls/Cargo.toml index 49fb2aae3f..1326a2f394 100644 --- a/ext/tls/Cargo.toml +++ b/ext/tls/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_tls" -version = "0.90.0" +version = "0.91.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/url/Cargo.toml b/ext/url/Cargo.toml index f5e8815077..a8bff4af46 100644 --- a/ext/url/Cargo.toml +++ b/ext/url/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_url" -version = "0.103.0" +version = "0.104.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/web/Cargo.toml b/ext/web/Cargo.toml index ba58f13c54..9ac7217f13 100644 --- a/ext/web/Cargo.toml +++ b/ext/web/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_web" -version = "0.134.0" +version = "0.135.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/webidl/Cargo.toml b/ext/webidl/Cargo.toml index bda6aeeb1a..62af48a2c1 100644 --- a/ext/webidl/Cargo.toml +++ b/ext/webidl/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_webidl" -version = "0.103.0" +version = "0.104.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/websocket/Cargo.toml b/ext/websocket/Cargo.toml index ce6891f0f0..83d3733de1 100644 --- a/ext/websocket/Cargo.toml +++ b/ext/websocket/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_websocket" -version = "0.108.0" +version = "0.109.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/webstorage/Cargo.toml b/ext/webstorage/Cargo.toml index 3bffacab54..2e7e01598b 100644 --- a/ext/webstorage/Cargo.toml +++ b/ext/webstorage/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_webstorage" -version = "0.98.0" +version = "0.99.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ops/Cargo.toml b/ops/Cargo.toml index efeefbcd0a..4e7eecf9e7 100644 --- a/ops/Cargo.toml +++ b/ops/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_ops" -version = "0.63.0" +version = "0.64.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index 9f9c65af1d..4c5dc8be05 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_runtime" -version = "0.111.0" +version = "0.112.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/serde_v8/Cargo.toml b/serde_v8/Cargo.toml index 60ffc40e66..75288dc89e 100644 --- a/serde_v8/Cargo.toml +++ b/serde_v8/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "serde_v8" -version = "0.96.0" +version = "0.97.0" authors.workspace = true edition.workspace = true license.workspace = true From 68c0fcb157bb47bbf58bcdcecf59d237fb84f201 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Fri, 12 May 2023 19:07:40 -0400 Subject: [PATCH 164/320] refactor(lsp): make `RequestMethod` private (#19114) --- cli/lsp/code_lens.rs | 21 +- cli/lsp/diagnostics.rs | 6 +- cli/lsp/language_server.rs | 525 ++++++++++++++----------------------- cli/lsp/tsc.rs | 443 ++++++++++++++++++++++++++++--- cli/npm/cache.rs | 15 -- 5 files changed, 628 insertions(+), 382 deletions(-) diff --git a/cli/lsp/code_lens.rs b/cli/lsp/code_lens.rs index fd7f350061..c451e30bdc 100644 --- a/cli/lsp/code_lens.rs +++ b/cli/lsp/code_lens.rs @@ -230,13 +230,14 @@ async fn resolve_implementation_code_lens( ) -> Result { let asset_or_doc = language_server.get_asset_or_document(&data.specifier)?; let line_index = asset_or_doc.line_index(); - let req = tsc::RequestMethod::GetImplementation(( - data.specifier.clone(), - line_index.offset_tsc(code_lens.range.start)?, - )); - let snapshot = language_server.snapshot(); - let maybe_implementations: Option> = - language_server.ts_server.request(snapshot, req).await?; + let maybe_implementations = language_server + .ts_server + .get_implementations( + language_server.snapshot(), + data.specifier.clone(), + line_index.offset_tsc(code_lens.range.start)?, + ) + .await?; if let Some(implementations) = maybe_implementations { let mut locations = Vec::new(); for implementation in implementations { @@ -325,12 +326,12 @@ async fn resolve_references_code_lens( let asset_or_document = language_server.get_asset_or_document(&data.specifier)?; let line_index = asset_or_document.line_index(); - let snapshot = language_server.snapshot(); + let maybe_referenced_symbols = language_server .ts_server .find_references( - snapshot, - &data.specifier, + language_server.snapshot(), + data.specifier.clone(), line_index.offset_tsc(code_lens.range.start)?, ) .await?; diff --git a/cli/lsp/diagnostics.rs b/cli/lsp/diagnostics.rs index 7d13cfdb5f..0f96a498bd 100644 --- a/cli/lsp/diagnostics.rs +++ b/cli/lsp/diagnostics.rs @@ -50,7 +50,6 @@ pub type DiagnosticRecord = pub type DiagnosticVec = Vec; type DiagnosticMap = HashMap, Vec)>; -type TsDiagnosticsMap = HashMap>; type DiagnosticsByVersionMap = HashMap, Vec>; #[derive(Clone)] @@ -539,10 +538,9 @@ async fn generate_ts_diagnostics( let (enabled_specifiers, disabled_specifiers) = specifiers .into_iter() .partition::, _>(|s| config.specifier_enabled(s)); - let ts_diagnostics_map: TsDiagnosticsMap = if !enabled_specifiers.is_empty() { - let req = tsc::RequestMethod::GetDiagnostics(enabled_specifiers); + let ts_diagnostics_map = if !enabled_specifiers.is_empty() { ts_server - .request_with_cancellation(snapshot.clone(), req, token) + .get_diagnostics(snapshot.clone(), enabled_specifiers, token) .await? } else { Default::default() diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index 7fe986bfee..de5cd6f09c 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -578,10 +578,7 @@ impl Inner { } else { let navigation_tree: tsc::NavigationTree = self .ts_server - .request( - self.snapshot(), - tsc::RequestMethod::GetNavigationTree(specifier.clone()), - ) + .get_navigation_tree(self.snapshot(), specifier.clone()) .await?; let navigation_tree = Arc::new(navigation_tree); match asset_or_doc { @@ -1051,10 +1048,7 @@ impl Inner { if let Err(err) = self.merge_user_tsconfig(&mut tsconfig) { self.client.show_message(MessageType::WARNING, err); } - let _ok: bool = self - .ts_server - .request(self.snapshot(), tsc::RequestMethod::Configure(tsconfig)) - .await?; + let _ok = self.ts_server.configure(self.snapshot(), tsconfig).await?; self.performance.measure(mark); Ok(()) } @@ -1142,14 +1136,10 @@ impl Inner { } if capabilities.code_action_provider.is_some() { - let fixable_diagnostics: Vec = self + let fixable_diagnostics = self .ts_server - .request(self.snapshot(), tsc::RequestMethod::GetSupportedCodeFixes) - .await - .map_err(|err| { - error!("Unable to get fixable diagnostics: {}", err); - LspError::internal_error() - })?; + .get_supported_code_fixes(self.snapshot()) + .await?; self.ts_fixable_diagnostics = fixable_diagnostics; } @@ -1383,7 +1373,7 @@ impl Inner { self.refresh_documents_config(); self.refresh_npm_specifiers().await; self.diagnostics_server.invalidate_all(); - self.restart_ts_server().await; + self.ts_server.restart(self.snapshot()).await; self.send_diagnostics_update(); self.send_testing_update(); } @@ -1594,18 +1584,12 @@ impl Inner { }) } else { let line_index = asset_or_doc.line_index(); - let req = tsc::RequestMethod::GetQuickInfo(( - specifier, - line_index.offset_tsc(params.text_document_position_params.position)?, - )); - let maybe_quick_info: Option = self + let position = + line_index.offset_tsc(params.text_document_position_params.position)?; + let maybe_quick_info = self .ts_server - .request(self.snapshot(), req) - .await - .map_err(|err| { - error!("Unable to get quick info: {}", err); - LspError::internal_error() - })?; + .get_quick_info(self.snapshot(), specifier.clone(), position) + .await?; maybe_quick_info.map(|qi| qi.to_hover(line_index, self)) }; self.performance.measure(mark); @@ -1666,24 +1650,16 @@ impl Inner { NumberOrString::Number(code) => code.to_string(), }; let codes = vec![code]; - let req = tsc::RequestMethod::GetCodeFixes(( - specifier.clone(), - line_index.offset_tsc(diagnostic.range.start)?, - line_index.offset_tsc(diagnostic.range.end)?, - codes, - )); - let actions: Vec = - match self.ts_server.request(self.snapshot(), req).await { - Ok(items) => items, - Err(err) => { - // sometimes tsc reports errors when retrieving code actions - // because they don't reflect the current state of the document - // so we will log them to the output, but we won't send an error - // message back to the client. - error!("Error getting actions from TypeScript: {}", err); - Vec::new() - } - }; + let actions = self + .ts_server + .get_code_fixes( + self.snapshot(), + specifier.clone(), + line_index.offset_tsc(diagnostic.range.start)? + ..line_index.offset_tsc(diagnostic.range.end)?, + codes, + ) + .await; for action in actions { code_actions .add_ts_fix_action(&specifier, &action, diagnostic, self) @@ -1726,27 +1702,22 @@ impl Inner { } // Refactor - let start = line_index.offset_tsc(params.range.start)?; - let length = line_index.offset_tsc(params.range.end)? - start; let only = params .context .only .as_ref() .and_then(|values| values.first().map(|v| v.as_str().to_owned())) .unwrap_or_default(); - let req = tsc::RequestMethod::GetApplicableRefactors(( - specifier.clone(), - tsc::TextSpan { start, length }, - only, - )); - let refactor_infos: Vec = self + let refactor_infos = self .ts_server - .request(self.snapshot(), req) - .await - .map_err(|err| { - error!("Failed to request to tsserver {}", err); - LspError::invalid_request() - })?; + .get_applicable_refactors( + self.snapshot(), + specifier.clone(), + line_index.offset_tsc(params.range.start)? + ..line_index.offset_tsc(params.range.end)?, + only, + ) + .await?; let mut refactor_actions = Vec::::new(); for refactor_info in refactor_infos.iter() { refactor_actions @@ -1788,24 +1759,15 @@ impl Inner { let result = if kind.as_str().starts_with(CodeActionKind::QUICKFIX.as_str()) { - let snapshot = self.snapshot(); let code_action_data: CodeActionData = from_value(data).map_err(|err| { error!("Unable to decode code action data: {}", err); LspError::invalid_params("The CodeAction's data is invalid.") })?; - let req = tsc::RequestMethod::GetCombinedCodeFix(( - code_action_data.specifier.clone(), - json!(code_action_data.fix_id.clone()), - )); - let combined_code_actions: tsc::CombinedCodeActions = self + let combined_code_actions = self .ts_server - .request(snapshot.clone(), req) - .await - .map_err(|err| { - error!("Unable to get combined fix from TypeScript: {}", err); - LspError::internal_error() - })?; + .get_combined_code_fix(self.snapshot(), &code_action_data) + .await?; if combined_code_actions.commands.is_some() { error!("Deno does not support code actions with commands."); return Err(LspError::invalid_request()); @@ -1831,7 +1793,6 @@ impl Inner { })?; code_action } else if kind.as_str().starts_with(CodeActionKind::REFACTOR.as_str()) { - let snapshot = self.snapshot(); let mut code_action = params; let action_data: refactor::RefactorCodeActionData = from_value(data) .map_err(|err| { @@ -1840,19 +1801,17 @@ impl Inner { })?; let asset_or_doc = self.get_asset_or_document(&action_data.specifier)?; let line_index = asset_or_doc.line_index(); - let start = line_index.offset_tsc(action_data.range.start)?; - let length = line_index.offset_tsc(action_data.range.end)? - start; - let req = tsc::RequestMethod::GetEditsForRefactor(( - action_data.specifier, - tsc::TextSpan { start, length }, - action_data.refactor_name, - action_data.action_name, - )); - let refactor_edit_info: tsc::RefactorEditInfo = - self.ts_server.request(snapshot, req).await.map_err(|err| { - error!("Failed to request to tsserver {}", err); - LspError::invalid_request() - })?; + let refactor_edit_info = self + .ts_server + .get_edits_for_refactor( + self.snapshot(), + action_data.specifier, + line_index.offset_tsc(action_data.range.start)? + ..line_index.offset_tsc(action_data.range.end)?, + action_data.refactor_name, + action_data.action_name, + ) + .await?; code_action.edit = refactor_edit_info .to_workspace_edit(self) .await @@ -1950,19 +1909,15 @@ impl Inner { let asset_or_doc = self.get_asset_or_document(&specifier)?; let line_index = asset_or_doc.line_index(); let files_to_search = vec![specifier.clone()]; - let req = tsc::RequestMethod::GetDocumentHighlights(( - specifier, - line_index.offset_tsc(params.text_document_position_params.position)?, - files_to_search, - )); - let maybe_document_highlights: Option> = self + let maybe_document_highlights = self .ts_server - .request(self.snapshot(), req) - .await - .map_err(|err| { - error!("Unable to get document highlights from TypeScript: {}", err); - LspError::internal_error() - })?; + .get_document_highlights( + self.snapshot(), + specifier, + line_index.offset_tsc(params.text_document_position_params.position)?, + files_to_search, + ) + .await?; if let Some(document_highlights) = maybe_document_highlights { let result = document_highlights @@ -1998,7 +1953,7 @@ impl Inner { .ts_server .find_references( self.snapshot(), - &specifier, + specifier.clone(), line_index.offset_tsc(params.text_document_position.position)?, ) .await?; @@ -2050,18 +2005,14 @@ impl Inner { let mark = self.performance.mark("goto_definition", Some(¶ms)); let asset_or_doc = self.get_asset_or_document(&specifier)?; let line_index = asset_or_doc.line_index(); - let req = tsc::RequestMethod::GetDefinition(( - specifier, - line_index.offset_tsc(params.text_document_position_params.position)?, - )); - let maybe_definition: Option = self + let maybe_definition = self .ts_server - .request(self.snapshot(), req) - .await - .map_err(|err| { - error!("Unable to get definition from TypeScript: {}", err); - LspError::internal_error() - })?; + .get_definition( + self.snapshot(), + specifier, + line_index.offset_tsc(params.text_document_position_params.position)?, + ) + .await?; if let Some(definition) = maybe_definition { let results = definition.to_definition(line_index, self).await; @@ -2090,19 +2041,14 @@ impl Inner { let mark = self.performance.mark("goto_definition", Some(¶ms)); let asset_or_doc = self.get_asset_or_document(&specifier)?; let line_index = asset_or_doc.line_index(); - let req = tsc::RequestMethod::GetTypeDefinition { - specifier, - position: line_index - .offset_tsc(params.text_document_position_params.position)?, - }; - let maybe_definition_info: Option> = self + let maybe_definition_info = self .ts_server - .request(self.snapshot(), req) - .await - .map_err(|err| { - error!("Unable to get type definition from TypeScript: {}", err); - LspError::internal_error() - })?; + .get_type_definition( + self.snapshot(), + specifier, + line_index.offset_tsc(params.text_document_position_params.position)?, + ) + .await?; let response = if let Some(definition_info) = maybe_definition_info { let mut location_links = Vec::new(); @@ -2167,48 +2113,47 @@ impl Inner { let position = line_index.offset_tsc(params.text_document_position.position)?; let use_snippets = self.config.client_capabilities.snippet_support; - let req = tsc::RequestMethod::GetCompletions(( - specifier.clone(), - position, - tsc::GetCompletionsAtPositionOptions { - user_preferences: tsc::UserPreferences { - allow_incomplete_completions: Some(true), - allow_text_changes_in_new_files: Some(specifier.scheme() == "file"), - import_module_specifier_ending: Some( - tsc::ImportModuleSpecifierEnding::Index, - ), - include_automatic_optional_chain_completions: Some(true), - include_completions_for_import_statements: Some( - self.config.workspace_settings().suggest.auto_imports, - ), - include_completions_for_module_exports: Some(true), - include_completions_with_object_literal_method_snippets: Some( - use_snippets, - ), - include_completions_with_class_member_snippets: Some(use_snippets), - include_completions_with_insert_text: Some(true), - include_completions_with_snippet_text: Some(use_snippets), - jsx_attribute_completion_style: Some( - tsc::JsxAttributeCompletionStyle::Auto, - ), - provide_prefix_and_suffix_text_for_rename: Some(true), - provide_refactor_not_applicable_reason: Some(true), - use_label_details_in_completion_entries: Some(true), - ..Default::default() + let maybe_completion_info = self + .ts_server + .get_completions( + self.snapshot(), + specifier.clone(), + position, + tsc::GetCompletionsAtPositionOptions { + user_preferences: tsc::UserPreferences { + allow_incomplete_completions: Some(true), + allow_text_changes_in_new_files: Some( + specifier.scheme() == "file", + ), + import_module_specifier_ending: Some( + tsc::ImportModuleSpecifierEnding::Index, + ), + include_automatic_optional_chain_completions: Some(true), + include_completions_for_import_statements: Some( + self.config.workspace_settings().suggest.auto_imports, + ), + include_completions_for_module_exports: Some(true), + include_completions_with_object_literal_method_snippets: Some( + use_snippets, + ), + include_completions_with_class_member_snippets: Some( + use_snippets, + ), + include_completions_with_insert_text: Some(true), + include_completions_with_snippet_text: Some(use_snippets), + jsx_attribute_completion_style: Some( + tsc::JsxAttributeCompletionStyle::Auto, + ), + provide_prefix_and_suffix_text_for_rename: Some(true), + provide_refactor_not_applicable_reason: Some(true), + use_label_details_in_completion_entries: Some(true), + ..Default::default() + }, + trigger_character, + trigger_kind, }, - trigger_character, - trigger_kind, - }, - )); - let snapshot = self.snapshot(); - let maybe_completion_info: Option = - match self.ts_server.request(snapshot, req).await { - Ok(maybe_info) => maybe_info, - Err(err) => { - error!("Unable to get completion info from TypeScript: {:#}", err); - None - } - }; + ) + .await; if let Some(completions) = maybe_completion_info { let results = completions.as_completion_response( @@ -2241,9 +2186,10 @@ impl Inner { })?; if let Some(data) = &data.tsc { let specifier = &data.specifier; - let req = tsc::RequestMethod::GetCompletionDetails(data.into()); - let result: Result, _> = - self.ts_server.request(self.snapshot(), req).await; + let result = self + .ts_server + .get_completion_details(self.snapshot(), data.into()) + .await; match result { Ok(maybe_completion_info) => { if let Some(completion_info) = maybe_completion_info { @@ -2302,18 +2248,14 @@ impl Inner { let asset_or_doc = self.get_asset_or_document(&specifier)?; let line_index = asset_or_doc.line_index(); - let req = tsc::RequestMethod::GetImplementation(( - specifier, - line_index.offset_tsc(params.text_document_position_params.position)?, - )); - let maybe_implementations: Option> = self + let maybe_implementations = self .ts_server - .request(self.snapshot(), req) - .await - .map_err(|err| { - error!("Failed to request to tsserver {}", err); - LspError::invalid_request() - })?; + .get_implementations( + self.snapshot(), + specifier, + line_index.offset_tsc(params.text_document_position_params.position)?, + ) + .await?; let result = if let Some(implementations) = maybe_implementations { let mut links = Vec::new(); @@ -2347,15 +2289,10 @@ impl Inner { let mark = self.performance.mark("folding_range", Some(¶ms)); let asset_or_doc = self.get_asset_or_document(&specifier)?; - let req = tsc::RequestMethod::GetOutliningSpans(specifier); - let outlining_spans: Vec = self + let outlining_spans = self .ts_server - .request(self.snapshot(), req) - .await - .map_err(|err| { - error!("Failed to request to tsserver {}", err); - LspError::invalid_request() - })?; + .get_outlining_spans(self.snapshot(), specifier) + .await?; let response = if !outlining_spans.is_empty() { Some( @@ -2394,18 +2331,14 @@ impl Inner { let asset_or_doc = self.get_asset_or_document(&specifier)?; let line_index = asset_or_doc.line_index(); - let req = tsc::RequestMethod::ProvideCallHierarchyIncomingCalls(( - specifier, - line_index.offset_tsc(params.item.selection_range.start)?, - )); let incoming_calls: Vec = self .ts_server - .request(self.snapshot(), req) - .await - .map_err(|err| { - error!("Failed to request to tsserver {}", err); - LspError::invalid_request() - })?; + .provide_call_hierarchy_incoming_calls( + self.snapshot(), + specifier, + line_index.offset_tsc(params.item.selection_range.start)?, + ) + .await?; let maybe_root_path_owned = self .config @@ -2442,18 +2375,14 @@ impl Inner { let asset_or_doc = self.get_asset_or_document(&specifier)?; let line_index = asset_or_doc.line_index(); - let req = tsc::RequestMethod::ProvideCallHierarchyOutgoingCalls(( - specifier, - line_index.offset_tsc(params.item.selection_range.start)?, - )); let outgoing_calls: Vec = self .ts_server - .request(self.snapshot(), req) - .await - .map_err(|err| { - error!("Failed to request to tsserver {}", err); - LspError::invalid_request() - })?; + .provide_call_hierarchy_outgoing_calls( + self.snapshot(), + specifier, + line_index.offset_tsc(params.item.selection_range.start)?, + ) + .await?; let maybe_root_path_owned = self .config @@ -2494,19 +2423,14 @@ impl Inner { let asset_or_doc = self.get_asset_or_document(&specifier)?; let line_index = asset_or_doc.line_index(); - let req = tsc::RequestMethod::PrepareCallHierarchy(( - specifier, - line_index.offset_tsc(params.text_document_position_params.position)?, - )); - let maybe_one_or_many: Option> = - self - .ts_server - .request(self.snapshot(), req) - .await - .map_err(|err| { - error!("Failed to request to tsserver {}", err); - LspError::invalid_request() - })?; + let maybe_one_or_many = self + .ts_server + .prepare_call_hierarchy( + self.snapshot(), + specifier, + line_index.offset_tsc(params.text_document_position_params.position)?, + ) + .await?; let response = if let Some(one_or_many) = maybe_one_or_many { let maybe_root_path_owned = self @@ -2561,23 +2485,14 @@ impl Inner { let asset_or_doc = self.get_asset_or_document(&specifier)?; let line_index = asset_or_doc.line_index(); - let req = tsc::RequestMethod::FindRenameLocations { - specifier, - position: line_index - .offset_tsc(params.text_document_position.position)?, - find_in_strings: false, - find_in_comments: false, - provide_prefix_and_suffix_text_for_rename: false, - }; - - let maybe_locations: Option> = self + let maybe_locations = self .ts_server - .request(self.snapshot(), req) - .await - .map_err(|err| { - error!("Failed to request to tsserver {}", err); - LspError::invalid_request() - })?; + .find_rename_locations( + self.snapshot(), + specifier, + line_index.offset_tsc(params.text_document_position.position)?, + ) + .await?; if let Some(locations) = maybe_locations { let rename_locations = tsc::RenameLocations { locations }; @@ -2615,19 +2530,14 @@ impl Inner { let mut selection_ranges = Vec::::new(); for position in params.positions { - let req = tsc::RequestMethod::GetSmartSelectionRange(( - specifier.clone(), - line_index.offset_tsc(position)?, - )); - let selection_range: tsc::SelectionRange = self .ts_server - .request(self.snapshot(), req) - .await - .map_err(|err| { - error!("Failed to request to tsserver {}", err); - LspError::invalid_request() - })?; + .get_smart_selection_range( + self.snapshot(), + specifier.clone(), + line_index.offset_tsc(position)?, + ) + .await?; selection_ranges .push(selection_range.to_selection_range(line_index.clone())); @@ -2653,21 +2563,14 @@ impl Inner { let asset_or_doc = self.get_asset_or_document(&specifier)?; let line_index = asset_or_doc.line_index(); - let req = tsc::RequestMethod::GetEncodedSemanticClassifications(( - specifier, - tsc::TextSpan { - start: 0, - length: line_index.text_content_length_utf16().into(), - }, - )); - let semantic_classification: tsc::Classifications = self + let semantic_classification = self .ts_server - .request(self.snapshot(), req) - .await - .map_err(|err| { - error!("Failed to request to tsserver {}", err); - LspError::invalid_request() - })?; + .get_encoded_semantic_classifications( + self.snapshot(), + specifier, + 0..line_index.text_content_length_utf16().into(), + ) + .await?; let semantic_tokens = semantic_classification.to_semantic_tokens(&asset_or_doc, line_index)?; @@ -2699,20 +2602,15 @@ impl Inner { let asset_or_doc = self.get_asset_or_document(&specifier)?; let line_index = asset_or_doc.line_index(); - let start = line_index.offset_tsc(params.range.start)?; - let length = line_index.offset_tsc(params.range.end)? - start; - let req = tsc::RequestMethod::GetEncodedSemanticClassifications(( - specifier, - tsc::TextSpan { start, length }, - )); - let semantic_classification: tsc::Classifications = self + let semantic_classification = self .ts_server - .request(self.snapshot(), req) - .await - .map_err(|err| { - error!("Failed to request to tsserver {}", err); - LspError::invalid_request() - })?; + .get_encoded_semantic_classifications( + self.snapshot(), + specifier, + line_index.offset_tsc(params.range.start)? + ..line_index.offset_tsc(params.range.end)?, + ) + .await?; let semantic_tokens = semantic_classification.to_semantic_tokens(&asset_or_doc, line_index)?; @@ -2754,19 +2652,15 @@ impl Inner { trigger_reason: None, } }; - let req = tsc::RequestMethod::GetSignatureHelpItems(( - specifier, - line_index.offset_tsc(params.text_document_position_params.position)?, - options, - )); let maybe_signature_help_items: Option = self .ts_server - .request(self.snapshot(), req) - .await - .map_err(|err| { - error!("Failed to request to tsserver: {}", err); - LspError::invalid_request() - })?; + .get_signature_help_items( + self.snapshot(), + specifier, + line_index.offset_tsc(params.text_document_position_params.position)?, + options, + ) + .await?; if let Some(signature_help_items) = maybe_signature_help_items { let signature_help = signature_help_items.into_signature_help(self); @@ -2784,21 +2678,18 @@ impl Inner { ) -> LspResult>> { let mark = self.performance.mark("symbol", Some(¶ms)); - let req = tsc::RequestMethod::GetNavigateToItems { - search: params.query, - // this matches vscode's hard coded result count - max_result_count: Some(256), - file: None, - }; - - let navigate_to_items: Vec = self + let navigate_to_items = self .ts_server - .request(self.snapshot(), req) - .await - .map_err(|err| { - error!("Failed request to tsserver: {}", err); - LspError::invalid_request() - })?; + .get_navigate_to_items( + self.snapshot(), + tsc::GetNavigateToItemsArgs { + search: params.query, + // this matches vscode's hard coded result count + max_result_count: Some(256), + file: None, + }, + ) + .await?; let maybe_symbol_information = if navigate_to_items.is_empty() { None @@ -3287,21 +3178,13 @@ impl Inner { // the language server for TypeScript (as it might hold to some stale // documents). self.diagnostics_server.invalidate_all(); - self.restart_ts_server().await; + self.ts_server.restart(self.snapshot()).await; self.send_diagnostics_update(); self.send_testing_update(); self.performance.measure(mark); } - async fn restart_ts_server(&self) { - let _: bool = self - .ts_server - .request(self.snapshot(), tsc::RequestMethod::Restart) - .await - .unwrap(); - } - fn get_performance(&self) -> Value { let averages = self.performance.averages(); json!({ "averages": averages }) @@ -3334,24 +3217,22 @@ impl Inner { let mark = self.performance.mark("inlay_hint", Some(¶ms)); let asset_or_doc = self.get_asset_or_document(&specifier)?; let line_index = asset_or_doc.line_index(); - let range = tsc::TextSpan::from_range(¶ms.range, line_index.clone()) - .map_err(|err| { - error!("Failed to convert range to text_span: {}", err); - LspError::internal_error() - })?; - let req = tsc::RequestMethod::ProvideInlayHints(( - specifier, - range, - workspace_settings.into(), - )); - let maybe_inlay_hints: Option> = self + let text_span = + tsc::TextSpan::from_range(¶ms.range, line_index.clone()).map_err( + |err| { + error!("Failed to convert range to text_span: {}", err); + LspError::internal_error() + }, + )?; + let maybe_inlay_hints = self .ts_server - .request(self.snapshot(), req) - .await - .map_err(|err| { - error!("Unable to get inlay hints: {}", err); - LspError::internal_error() - })?; + .provide_inlay_hints( + self.snapshot(), + specifier, + text_span, + workspace_settings.into(), + ) + .await?; let maybe_inlay_hints = maybe_inlay_hints.map(|hints| { hints .iter() diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs index 92407bec1a..bfbb5cf9ac 100644 --- a/cli/lsp/tsc.rs +++ b/cli/lsp/tsc.rs @@ -1,5 +1,6 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use super::analysis::CodeActionData; use super::code_lens; use super::config; use super::documents::AssetOrDocument; @@ -53,6 +54,7 @@ use serde_repr::Serialize_repr; use std::cmp; use std::collections::HashMap; use std::collections::HashSet; +use std::ops::Range; use std::path::Path; use std::sync::Arc; use std::thread; @@ -118,7 +120,403 @@ impl TsServer { Self(tx) } - pub async fn request( + pub async fn get_diagnostics( + &self, + snapshot: Arc, + specifiers: Vec, + token: CancellationToken, + ) -> Result>, AnyError> { + let req = RequestMethod::GetDiagnostics(specifiers); + self.request_with_cancellation(snapshot, req, token).await + } + + pub async fn find_references( + &self, + snapshot: Arc, + specifier: ModuleSpecifier, + position: u32, + ) -> Result>, LspError> { + let req = RequestMethod::FindReferences { + specifier, + position, + }; + self.request(snapshot, req).await.map_err(|err| { + log::error!("Unable to get references from TypeScript: {}", err); + LspError::internal_error() + }) + } + + pub async fn get_navigation_tree( + &self, + snapshot: Arc, + specifier: ModuleSpecifier, + ) -> Result { + self + .request(snapshot, RequestMethod::GetNavigationTree(specifier)) + .await + } + + pub async fn configure( + &self, + snapshot: Arc, + tsconfig: TsConfig, + ) -> Result { + self + .request(snapshot, RequestMethod::Configure(tsconfig)) + .await + } + + pub async fn get_supported_code_fixes( + &self, + snapshot: Arc, + ) -> Result, LspError> { + self + .request(snapshot, RequestMethod::GetSupportedCodeFixes) + .await + .map_err(|err| { + log::error!("Unable to get fixable diagnostics: {}", err); + LspError::internal_error() + }) + } + + pub async fn get_quick_info( + &self, + snapshot: Arc, + specifier: ModuleSpecifier, + position: u32, + ) -> Result, LspError> { + let req = RequestMethod::GetQuickInfo((specifier, position)); + self.request(snapshot, req).await.map_err(|err| { + log::error!("Unable to get quick info: {}", err); + LspError::internal_error() + }) + } + + pub async fn get_code_fixes( + &self, + snapshot: Arc, + specifier: ModuleSpecifier, + range: Range, + codes: Vec, + ) -> Vec { + let req = + RequestMethod::GetCodeFixes((specifier, range.start, range.end, codes)); + match self.request(snapshot, req).await { + Ok(items) => items, + Err(err) => { + // sometimes tsc reports errors when retrieving code actions + // because they don't reflect the current state of the document + // so we will log them to the output, but we won't send an error + // message back to the client. + log::error!("Error getting actions from TypeScript: {}", err); + Vec::new() + } + } + } + + pub async fn get_applicable_refactors( + &self, + snapshot: Arc, + specifier: ModuleSpecifier, + range: Range, + only: String, + ) -> Result, LspError> { + let req = RequestMethod::GetApplicableRefactors(( + specifier.clone(), + TextSpan { + start: range.start, + length: range.end - range.start, + }, + only, + )); + self.request(snapshot, req).await.map_err(|err| { + log::error!("Failed to request to tsserver {}", err); + LspError::invalid_request() + }) + } + + pub async fn get_combined_code_fix( + &self, + snapshot: Arc, + code_action_data: &CodeActionData, + ) -> Result { + let req = RequestMethod::GetCombinedCodeFix(( + code_action_data.specifier.clone(), + json!(code_action_data.fix_id.clone()), + )); + self.request(snapshot, req).await.map_err(|err| { + log::error!("Unable to get combined fix from TypeScript: {}", err); + LspError::internal_error() + }) + } + + pub async fn get_edits_for_refactor( + &self, + snapshot: Arc, + specifier: ModuleSpecifier, + range: Range, + refactor_name: String, + action_name: String, + ) -> Result { + let req = RequestMethod::GetEditsForRefactor(( + specifier, + TextSpan { + start: range.start, + length: range.end - range.start, + }, + refactor_name, + action_name, + )); + self.request(snapshot, req).await.map_err(|err| { + log::error!("Failed to request to tsserver {}", err); + LspError::invalid_request() + }) + } + + pub async fn get_document_highlights( + &self, + snapshot: Arc, + specifier: ModuleSpecifier, + position: u32, + files_to_search: Vec, + ) -> Result>, LspError> { + let req = RequestMethod::GetDocumentHighlights(( + specifier, + position, + files_to_search, + )); + self.request(snapshot, req).await.map_err(|err| { + log::error!("Unable to get document highlights from TypeScript: {}", err); + LspError::internal_error() + }) + } + + pub async fn get_definition( + &self, + snapshot: Arc, + specifier: ModuleSpecifier, + position: u32, + ) -> Result, LspError> { + let req = RequestMethod::GetDefinition((specifier, position)); + self.request(snapshot, req).await.map_err(|err| { + log::error!("Unable to get definition from TypeScript: {}", err); + LspError::internal_error() + }) + } + + pub async fn get_type_definition( + &self, + snapshot: Arc, + specifier: ModuleSpecifier, + position: u32, + ) -> Result>, LspError> { + let req = RequestMethod::GetTypeDefinition { + specifier, + position, + }; + self.request(snapshot, req).await.map_err(|err| { + log::error!("Unable to get type definition from TypeScript: {}", err); + LspError::internal_error() + }) + } + + pub async fn get_completions( + &self, + snapshot: Arc, + specifier: ModuleSpecifier, + position: u32, + options: GetCompletionsAtPositionOptions, + ) -> Option { + let req = RequestMethod::GetCompletions((specifier, position, options)); + match self.request(snapshot, req).await { + Ok(maybe_info) => maybe_info, + Err(err) => { + log::error!("Unable to get completion info from TypeScript: {:#}", err); + None + } + } + } + + pub async fn get_completion_details( + &self, + snapshot: Arc, + args: GetCompletionDetailsArgs, + ) -> Result, AnyError> { + let req = RequestMethod::GetCompletionDetails(args); + self.request(snapshot, req).await + } + + pub async fn get_implementations( + &self, + snapshot: Arc, + specifier: ModuleSpecifier, + position: u32, + ) -> Result>, LspError> { + let req = RequestMethod::GetImplementation((specifier, position)); + self.request(snapshot, req).await.map_err(|err| { + log::error!("Failed to request to tsserver {}", err); + LspError::invalid_request() + }) + } + + pub async fn get_outlining_spans( + &self, + snapshot: Arc, + specifier: ModuleSpecifier, + ) -> Result, LspError> { + let req = RequestMethod::GetOutliningSpans(specifier); + self.request(snapshot, req).await.map_err(|err| { + log::error!("Failed to request to tsserver {}", err); + LspError::invalid_request() + }) + } + + pub async fn provide_call_hierarchy_incoming_calls( + &self, + snapshot: Arc, + specifier: ModuleSpecifier, + position: u32, + ) -> Result, LspError> { + let req = + RequestMethod::ProvideCallHierarchyIncomingCalls((specifier, position)); + self.request(snapshot, req).await.map_err(|err| { + log::error!("Failed to request to tsserver {}", err); + LspError::invalid_request() + }) + } + + pub async fn provide_call_hierarchy_outgoing_calls( + &self, + snapshot: Arc, + specifier: ModuleSpecifier, + position: u32, + ) -> Result, LspError> { + let req = + RequestMethod::ProvideCallHierarchyOutgoingCalls((specifier, position)); + self.request(snapshot, req).await.map_err(|err| { + log::error!("Failed to request to tsserver {}", err); + LspError::invalid_request() + }) + } + + pub async fn prepare_call_hierarchy( + &self, + snapshot: Arc, + specifier: ModuleSpecifier, + position: u32, + ) -> Result>, LspError> { + let req = RequestMethod::PrepareCallHierarchy((specifier, position)); + self.request(snapshot, req).await.map_err(|err| { + log::error!("Failed to request to tsserver {}", err); + LspError::invalid_request() + }) + } + + pub async fn find_rename_locations( + &self, + snapshot: Arc, + specifier: ModuleSpecifier, + position: u32, + ) -> Result>, LspError> { + let req = RequestMethod::FindRenameLocations { + specifier, + position, + find_in_strings: false, + find_in_comments: false, + provide_prefix_and_suffix_text_for_rename: false, + }; + self.request(snapshot, req).await.map_err(|err| { + log::error!("Failed to request to tsserver {}", err); + LspError::invalid_request() + }) + } + + pub async fn get_smart_selection_range( + &self, + snapshot: Arc, + specifier: ModuleSpecifier, + position: u32, + ) -> Result { + let req = RequestMethod::GetSmartSelectionRange((specifier, position)); + + self.request(snapshot, req).await.map_err(|err| { + log::error!("Failed to request to tsserver {}", err); + LspError::invalid_request() + }) + } + + pub async fn get_encoded_semantic_classifications( + &self, + snapshot: Arc, + specifier: ModuleSpecifier, + range: Range, + ) -> Result { + let req = RequestMethod::GetEncodedSemanticClassifications(( + specifier, + TextSpan { + start: range.start, + length: range.end - range.start, + }, + )); + self.request(snapshot, req).await.map_err(|err| { + log::error!("Failed to request to tsserver {}", err); + LspError::invalid_request() + }) + } + + pub async fn get_signature_help_items( + &self, + snapshot: Arc, + specifier: ModuleSpecifier, + position: u32, + options: SignatureHelpItemsOptions, + ) -> Result, LspError> { + let req = + RequestMethod::GetSignatureHelpItems((specifier, position, options)); + self.request(snapshot, req).await.map_err(|err| { + log::error!("Failed to request to tsserver: {}", err); + LspError::invalid_request() + }) + } + + pub async fn get_navigate_to_items( + &self, + snapshot: Arc, + args: GetNavigateToItemsArgs, + ) -> Result, LspError> { + let req = RequestMethod::GetNavigateToItems(args); + self.request(snapshot, req).await.map_err(|err| { + log::error!("Failed request to tsserver: {}", err); + LspError::invalid_request() + }) + } + + pub async fn provide_inlay_hints( + &self, + snapshot: Arc, + specifier: ModuleSpecifier, + text_span: TextSpan, + user_preferences: UserPreferences, + ) -> Result>, LspError> { + let req = RequestMethod::ProvideInlayHints(( + specifier, + text_span, + user_preferences, + )); + self.request(snapshot, req).await.map_err(|err| { + log::error!("Unable to get inlay hints: {}", err); + LspError::internal_error() + }) + } + + pub async fn restart(&self, snapshot: Arc) { + let _: bool = self + .request(snapshot, RequestMethod::Restart) + .await + .unwrap(); + } + + async fn request( &self, snapshot: Arc, req: RequestMethod, @@ -131,7 +529,7 @@ impl TsServer { .await } - pub async fn request_with_cancellation( + async fn request_with_cancellation( &self, snapshot: Arc, req: RequestMethod, @@ -147,26 +545,6 @@ impl TsServer { let value = rx.await??; Ok(serde_json::from_value::(value)?) } - - // todo(dsherret): refactor the rest of the request methods to have - // methods to call on this struct, then make `RequestMethod` and - // friends internal - - pub async fn find_references( - &self, - snapshot: Arc, - specifier: &ModuleSpecifier, - position: u32, - ) -> Result>, LspError> { - let req = RequestMethod::FindReferences { - specifier: specifier.clone(), - position, - }; - self.request(snapshot, req).await.map_err(|err| { - log::error!("Unable to get references from TypeScript: {}", err); - LspError::internal_error() - }) - } } #[derive(Debug, Clone)] @@ -3161,9 +3539,16 @@ impl From<&CompletionItemData> for GetCompletionDetailsArgs { } } +#[derive(Debug)] +pub struct GetNavigateToItemsArgs { + pub search: String, + pub max_result_count: Option, + pub file: Option, +} + /// Methods that are supported by the Language Service in the compiler isolate. #[derive(Debug)] -pub enum RequestMethod { +enum RequestMethod { /// Configure the compilation settings for the server. Configure(TsConfig), /// Get rename locations at a given position. @@ -3198,11 +3583,7 @@ pub enum RequestMethod { /// Get implementation information for a specific position. GetImplementation((ModuleSpecifier, u32)), /// Get "navigate to" items, which are converted to workspace symbols - GetNavigateToItems { - search: String, - max_result_count: Option, - file: Option, - }, + GetNavigateToItems(GetNavigateToItemsArgs), /// Get a "navigation tree" for a specifier. GetNavigationTree(ModuleSpecifier), /// Get outlining spans for a specifier. @@ -3356,11 +3737,11 @@ impl RequestMethod { "specifier": state.denormalize_specifier(specifier), "position": position, }), - RequestMethod::GetNavigateToItems { + RequestMethod::GetNavigateToItems(GetNavigateToItemsArgs { search, max_result_count, file, - } => json!({ + }) => json!({ "id": id, "method": "getNavigateToItems", "search": search, @@ -3470,7 +3851,7 @@ impl RequestMethod { } /// Send a request into a runtime and return the JSON value of the response. -pub fn request( +fn request( runtime: &mut JsRuntime, state_snapshot: Arc, method: RequestMethod, diff --git a/cli/npm/cache.rs b/cli/npm/cache.rs index cda40fd172..aba6c0cca7 100644 --- a/cli/npm/cache.rs +++ b/cli/npm/cache.rs @@ -20,7 +20,6 @@ use deno_semver::Version; use once_cell::sync::Lazy; use crate::args::CacheSetting; -use crate::cache::DenoDir; use crate::http_util::HttpClient; use crate::util::fs::canonicalize_path; use crate::util::fs::hard_link_dir_recursive; @@ -120,20 +119,6 @@ pub struct ReadonlyNpmCache { root_dir_url: Url, } -// todo(dsherret): implementing Default for this is error prone because someone -// might accidentally use the default implementation instead of getting the -// correct location of the deno dir, which might be provided via a CLI argument. -// That said, the rest of the LSP code does this at the moment and so this code -// copies that. -impl Default for ReadonlyNpmCache { - fn default() -> Self { - // This only gets used when creating the tsc runtime and for testing, and so - // it shouldn't ever actually access the DenoDir, so it doesn't support a - // custom root. - Self::new(DenoDir::new(None).unwrap().npm_folder_path()) - } -} - impl ReadonlyNpmCache { pub fn new(root_dir: PathBuf) -> Self { fn try_get_canonicalized_root_dir( From 2a0c66484098ba35c1b31d4dc6380887b6eb9da4 Mon Sep 17 00:00:00 2001 From: Yoshiya Hinosawa Date: Sat, 13 May 2023 14:49:11 +0900 Subject: [PATCH 165/320] chore: fix & update node compat config (#19106) --- cli/tests/node_compat/config.jsonc | 3 --- cli/tests/node_compat/test/fixtures/run-main.js | 1 - .../test/parallel/test-module-run-main.js | 15 --------------- .../test-worker-threads-broadcast-channel.js | 9 --------- .../test-worker-threads-message-channel.js | 10 ---------- cli/tests/unit_node/module_test.ts | 17 +++++++++++++++-- .../testdata/add_global_property_run_main.js | 1 + cli/tests/unit_node/worker_threads_test.ts | 13 +++++++++++++ tools/node_compat/TODO.md | 3 ++- 9 files changed, 31 insertions(+), 41 deletions(-) delete mode 100644 cli/tests/node_compat/test/fixtures/run-main.js delete mode 100644 cli/tests/node_compat/test/parallel/test-module-run-main.js delete mode 100644 cli/tests/node_compat/test/parallel/test-worker-threads-broadcast-channel.js delete mode 100644 cli/tests/node_compat/test/parallel/test-worker-threads-message-channel.js create mode 100644 cli/tests/unit_node/testdata/add_global_property_run_main.js create mode 100644 cli/tests/unit_node/worker_threads_test.ts diff --git a/cli/tests/node_compat/config.jsonc b/cli/tests/node_compat/config.jsonc index 8fbc3e921d..81463bcaf5 100644 --- a/cli/tests/node_compat/config.jsonc +++ b/cli/tests/node_compat/config.jsonc @@ -366,7 +366,6 @@ "test-http-outgoing-message-inheritance.js", "test-http-outgoing-renderHeaders.js", "test-http-outgoing-settimeout.js", - "test-module-run-main.js", "test-net-access-byteswritten.js", "test-net-better-error-messages-listen-path.js", "test-net-better-error-messages-path.js", @@ -655,8 +654,6 @@ "test-whatwg-url-override-hostname.js", "test-whatwg-url-properties.js", "test-whatwg-url-toascii.js", - "test-worker-threads-broadcast-channel.js", - "test-worker-threads-message-channel.js", "test-zlib-close-after-error.js", "test-zlib-close-after-write.js", "test-zlib-convenience-methods.js", diff --git a/cli/tests/node_compat/test/fixtures/run-main.js b/cli/tests/node_compat/test/fixtures/run-main.js deleted file mode 100644 index 9a081cbbae..0000000000 --- a/cli/tests/node_compat/test/fixtures/run-main.js +++ /dev/null @@ -1 +0,0 @@ -globalThis.foo = 42; diff --git a/cli/tests/node_compat/test/parallel/test-module-run-main.js b/cli/tests/node_compat/test/parallel/test-module-run-main.js deleted file mode 100644 index 8e30de2671..0000000000 --- a/cli/tests/node_compat/test/parallel/test-module-run-main.js +++ /dev/null @@ -1,15 +0,0 @@ -// deno-fmt-ignore-file -// deno-lint-ignore-file - -"use strict"; - -const Module = require("module"); -const assert = require("assert/strict"); -const path = require("path"); - -const file = path.join(__dirname, "..", "fixtures", "run-main.js"); -process.argv = [process.argv[0], file]; -Module.runMain(); - -// The required file via `Module.runMain()` sets this global -assert.equal(globalThis.foo, 42); diff --git a/cli/tests/node_compat/test/parallel/test-worker-threads-broadcast-channel.js b/cli/tests/node_compat/test/parallel/test-worker-threads-broadcast-channel.js deleted file mode 100644 index a8fd3ff0e8..0000000000 --- a/cli/tests/node_compat/test/parallel/test-worker-threads-broadcast-channel.js +++ /dev/null @@ -1,9 +0,0 @@ -// deno-fmt-ignore-file -// deno-lint-ignore-file - -"use strict"; - -const assert = require("assert/strict"); -const worker_threads = require("worker_threads"); - -assert.equal(BroadcastChannel, worker_threads.BroadcastChannel); diff --git a/cli/tests/node_compat/test/parallel/test-worker-threads-message-channel.js b/cli/tests/node_compat/test/parallel/test-worker-threads-message-channel.js deleted file mode 100644 index b831ed3fee..0000000000 --- a/cli/tests/node_compat/test/parallel/test-worker-threads-message-channel.js +++ /dev/null @@ -1,10 +0,0 @@ -// deno-fmt-ignore-file -// deno-lint-ignore-file - -"use strict"; - -const assert = require("assert/strict"); -const worker_threads = require("worker_threads"); - -assert.equal(MessageChannel, worker_threads.MessageChannel); -assert.equal(MessagePort, worker_threads.MessagePort); diff --git a/cli/tests/unit_node/module_test.ts b/cli/tests/unit_node/module_test.ts index d071ed2d18..a5c819d960 100644 --- a/cli/tests/unit_node/module_test.ts +++ b/cli/tests/unit_node/module_test.ts @@ -1,7 +1,8 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. import { Module } from "node:module"; -import { assertStrictEquals } from "../../../test_util/std/testing/asserts.ts"; +import { assertEquals } from "../../../test_util/std/testing/asserts.ts"; +import process from "node:process"; Deno.test("[node/module _preloadModules] has internal require hook", () => { // Check if it's there @@ -10,5 +11,17 @@ Deno.test("[node/module _preloadModules] has internal require hook", () => { "./cli/tests/unit_node/testdata/add_global_property.js", ]); // deno-lint-ignore no-explicit-any - assertStrictEquals((globalThis as any).foo, "Hello"); + assertEquals((globalThis as any).foo, "Hello"); +}); + +Deno.test("[node/module runMain] loads module using the current process.argv", () => { + process.argv = [ + process.argv[0], + "./cli/tests/unit_node/testdata/add_global_property_run_main.js", + ]; + + // deno-lint-ignore no-explicit-any + (Module as any).runMain(); + // deno-lint-ignore no-explicit-any + assertEquals((globalThis as any).calledViaRunMain, true); }); diff --git a/cli/tests/unit_node/testdata/add_global_property_run_main.js b/cli/tests/unit_node/testdata/add_global_property_run_main.js new file mode 100644 index 0000000000..c9db1cea66 --- /dev/null +++ b/cli/tests/unit_node/testdata/add_global_property_run_main.js @@ -0,0 +1 @@ +globalThis.calledViaRunMain = true; diff --git a/cli/tests/unit_node/worker_threads_test.ts b/cli/tests/unit_node/worker_threads_test.ts new file mode 100644 index 0000000000..17de7cca1e --- /dev/null +++ b/cli/tests/unit_node/worker_threads_test.ts @@ -0,0 +1,13 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +import { assertEquals } from "../../../test_util/std/testing/asserts.ts"; +import workerThreads from "node:worker_threads"; + +Deno.test("[node/worker_threads] BroadcastChannel is exported", () => { + assertEquals(workerThreads.BroadcastChannel, BroadcastChannel); +}); + +Deno.test("[node/worker_threads] MessageChannel are MessagePort are exported", () => { + assertEquals(workerThreads.MessageChannel, MessageChannel); + assertEquals(workerThreads.MessagePort, MessagePort); +}); diff --git a/tools/node_compat/TODO.md b/tools/node_compat/TODO.md index cca14b4997..0aae01edcd 100644 --- a/tools/node_compat/TODO.md +++ b/tools/node_compat/TODO.md @@ -3,7 +3,7 @@ NOTE: This file should not be manually edited. Please edit 'cli/tests/node_compat/config.json' and run 'tools/node_compat/setup.ts' instead. -Total: 2923 +Total: 2924 - [abort/test-abort-backtrace.js](https://github.com/nodejs/node/tree/v18.12.1/test/abort/test-abort-backtrace.js) - [abort/test-abort-fatal-error.js](https://github.com/nodejs/node/tree/v18.12.1/test/abort/test-abort-fatal-error.js) @@ -281,6 +281,7 @@ Total: 2923 - [parallel/test-child-process-exec-encoding.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-child-process-exec-encoding.js) - [parallel/test-child-process-exec-std-encoding.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-child-process-exec-std-encoding.js) - [parallel/test-child-process-exec-timeout-expire.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-child-process-exec-timeout-expire.js) +- [parallel/test-child-process-exec-timeout-kill.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-child-process-exec-timeout-kill.js) - [parallel/test-child-process-exec-timeout-not-expired.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-child-process-exec-timeout-not-expired.js) - [parallel/test-child-process-execFile-promisified-abortController.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-child-process-execFile-promisified-abortController.js) - [parallel/test-child-process-execfile.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-child-process-execfile.js) From ab88dc2c688ba085de476ae66a67bc383a921819 Mon Sep 17 00:00:00 2001 From: Yoshiya Hinosawa Date: Sat, 13 May 2023 15:26:16 +0900 Subject: [PATCH 166/320] chore(ext/node): removed skipped compat test cases (#19109) --- cli/tests/node_compat/config.jsonc | 10 - cli/tests/node_compat/test/common/index.js | 1 + .../test/parallel/test-url-format-whatwg.js | 149 --- .../test/parallel/test-url-parse-format.js | 1053 ----------------- ...-whatwg-encoding-custom-fatal-streaming.js | 68 -- ...hatwg-encoding-custom-textdecoder-fatal.js | 91 -- ...ing-custom-textdecoder-utf16-surrogates.js | 63 - .../test-whatwg-url-custom-domainto.js | 64 - .../test-whatwg-url-custom-inspect.js | 75 -- .../test-whatwg-url-custom-parsing.js | 87 -- .../test-whatwg-url-custom-setters.js | 67 -- .../test/parallel/test-whatwg-url-toascii.js | 93 -- tools/node_compat/TODO.md | 12 +- 13 files changed, 12 insertions(+), 1821 deletions(-) delete mode 100644 cli/tests/node_compat/test/parallel/test-url-format-whatwg.js delete mode 100644 cli/tests/node_compat/test/parallel/test-url-parse-format.js delete mode 100644 cli/tests/node_compat/test/parallel/test-whatwg-encoding-custom-fatal-streaming.js delete mode 100644 cli/tests/node_compat/test/parallel/test-whatwg-encoding-custom-textdecoder-fatal.js delete mode 100644 cli/tests/node_compat/test/parallel/test-whatwg-encoding-custom-textdecoder-utf16-surrogates.js delete mode 100644 cli/tests/node_compat/test/parallel/test-whatwg-url-custom-domainto.js delete mode 100644 cli/tests/node_compat/test/parallel/test-whatwg-url-custom-inspect.js delete mode 100644 cli/tests/node_compat/test/parallel/test-whatwg-url-custom-parsing.js delete mode 100644 cli/tests/node_compat/test/parallel/test-whatwg-url-custom-setters.js delete mode 100644 cli/tests/node_compat/test/parallel/test-whatwg-url-toascii.js diff --git a/cli/tests/node_compat/config.jsonc b/cli/tests/node_compat/config.jsonc index 81463bcaf5..2146daf926 100644 --- a/cli/tests/node_compat/config.jsonc +++ b/cli/tests/node_compat/config.jsonc @@ -610,9 +610,7 @@ "test-url-domain-ascii-unicode.js", "test-url-fileurltopath.js", "test-url-format-invalid-input.js", - "test-url-format-whatwg.js", "test-url-format.js", - "test-url-parse-format.js", "test-url-parse-invalid-input.js", "test-url-parse-query.js", "test-url-pathtofileurl.js", @@ -635,25 +633,17 @@ "test-vm-static-this.js", "test-webcrypto-sign-verify.js", "test-whatwg-encoding-custom-api-basics.js", - "test-whatwg-encoding-custom-fatal-streaming.js", - "test-whatwg-encoding-custom-textdecoder-fatal.js", "test-whatwg-encoding-custom-textdecoder-ignorebom.js", "test-whatwg-encoding-custom-textdecoder-streaming.js", - "test-whatwg-encoding-custom-textdecoder-utf16-surrogates.js", "test-whatwg-events-add-event-listener-options-passive.js", "test-whatwg-events-add-event-listener-options-signal.js", "test-whatwg-events-customevent.js", "test-whatwg-url-custom-deepequal.js", - "test-whatwg-url-custom-domainto.js", "test-whatwg-url-custom-global.js", "test-whatwg-url-custom-href-side-effect.js", - "test-whatwg-url-custom-inspect.js", - "test-whatwg-url-custom-parsing.js", - "test-whatwg-url-custom-setters.js", "test-whatwg-url-custom-tostringtag.js", "test-whatwg-url-override-hostname.js", "test-whatwg-url-properties.js", - "test-whatwg-url-toascii.js", "test-zlib-close-after-error.js", "test-zlib-close-after-write.js", "test-zlib-convenience-methods.js", diff --git a/cli/tests/node_compat/test/common/index.js b/cli/tests/node_compat/test/common/index.js index 491dabd2f3..0f6019746e 100644 --- a/cli/tests/node_compat/test/common/index.js +++ b/cli/tests/node_compat/test/common/index.js @@ -446,6 +446,7 @@ module.exports = { getArrayBufferViews, getBufferSources, hasCrypto: true, + hasIntl: true, hasMultiLocalhost() { return false; }, diff --git a/cli/tests/node_compat/test/parallel/test-url-format-whatwg.js b/cli/tests/node_compat/test/parallel/test-url-format-whatwg.js deleted file mode 100644 index ea099f494c..0000000000 --- a/cli/tests/node_compat/test/parallel/test-url-format-whatwg.js +++ /dev/null @@ -1,149 +0,0 @@ -// deno-fmt-ignore-file -// deno-lint-ignore-file - -// Copyright Joyent and Node contributors. All rights reserved. MIT license. -// Taken from Node 18.12.1 -// This file is automatically generated by "node/_tools/setup.ts". Do not modify this file manually - -'use strict'; - -const common = require('../common'); -if (!common.hasIntl) - common.skip('missing Intl'); - -const assert = require('assert'); -const url = require('url'); - -const myURL = new URL('http://user:pass@xn--lck1c3crb1723bpq4a.com/a?a=b#c'); - -assert.strictEqual( - url.format(myURL), - 'http://user:pass@xn--lck1c3crb1723bpq4a.com/a?a=b#c' -); - -assert.strictEqual( - url.format(myURL, {}), - 'http://user:pass@xn--lck1c3crb1723bpq4a.com/a?a=b#c' -); - -{ - [true, 1, 'test', Infinity].forEach((value) => { - assert.throws( - () => url.format(myURL, value), - { - code: 'ERR_INVALID_ARG_TYPE', - name: 'TypeError', - message: 'The "options" argument must be of type object.' + - common.invalidArgTypeHelper(value) - } - ); - }); -} - -// Any falsy value other than undefined will be treated as false. -// Any truthy value will be treated as true. - -assert.strictEqual( - url.format(myURL, { auth: false }), - 'http://xn--lck1c3crb1723bpq4a.com/a?a=b#c' -); - -assert.strictEqual( - url.format(myURL, { auth: '' }), - 'http://xn--lck1c3crb1723bpq4a.com/a?a=b#c' -); - -assert.strictEqual( - url.format(myURL, { auth: 0 }), - 'http://xn--lck1c3crb1723bpq4a.com/a?a=b#c' -); - -assert.strictEqual( - url.format(myURL, { auth: 1 }), - 'http://user:pass@xn--lck1c3crb1723bpq4a.com/a?a=b#c' -); - -assert.strictEqual( - url.format(myURL, { auth: {} }), - 'http://user:pass@xn--lck1c3crb1723bpq4a.com/a?a=b#c' -); - -assert.strictEqual( - url.format(myURL, { fragment: false }), - 'http://user:pass@xn--lck1c3crb1723bpq4a.com/a?a=b' -); - -assert.strictEqual( - url.format(myURL, { fragment: '' }), - 'http://user:pass@xn--lck1c3crb1723bpq4a.com/a?a=b' -); - -assert.strictEqual( - url.format(myURL, { fragment: 0 }), - 'http://user:pass@xn--lck1c3crb1723bpq4a.com/a?a=b' -); - -assert.strictEqual( - url.format(myURL, { fragment: 1 }), - 'http://user:pass@xn--lck1c3crb1723bpq4a.com/a?a=b#c' -); - -assert.strictEqual( - url.format(myURL, { fragment: {} }), - 'http://user:pass@xn--lck1c3crb1723bpq4a.com/a?a=b#c' -); - -assert.strictEqual( - url.format(myURL, { search: false }), - 'http://user:pass@xn--lck1c3crb1723bpq4a.com/a#c' -); - -assert.strictEqual( - url.format(myURL, { search: '' }), - 'http://user:pass@xn--lck1c3crb1723bpq4a.com/a#c' -); - -assert.strictEqual( - url.format(myURL, { search: 0 }), - 'http://user:pass@xn--lck1c3crb1723bpq4a.com/a#c' -); - -assert.strictEqual( - url.format(myURL, { search: 1 }), - 'http://user:pass@xn--lck1c3crb1723bpq4a.com/a?a=b#c' -); - -assert.strictEqual( - url.format(myURL, { search: {} }), - 'http://user:pass@xn--lck1c3crb1723bpq4a.com/a?a=b#c' -); - -assert.strictEqual( - url.format(myURL, { unicode: true }), - 'http://user:pass@理容ナカムラ.com/a?a=b#c' -); - -assert.strictEqual( - url.format(myURL, { unicode: 1 }), - 'http://user:pass@理容ナカムラ.com/a?a=b#c' -); - -assert.strictEqual( - url.format(myURL, { unicode: {} }), - 'http://user:pass@理容ナカムラ.com/a?a=b#c' -); - -assert.strictEqual( - url.format(myURL, { unicode: false }), - 'http://user:pass@xn--lck1c3crb1723bpq4a.com/a?a=b#c' -); - -assert.strictEqual( - url.format(myURL, { unicode: 0 }), - 'http://user:pass@xn--lck1c3crb1723bpq4a.com/a?a=b#c' -); - -assert.strictEqual( - url.format(new URL('http://user:pass@xn--0zwm56d.com:8080/path'), { unicode: true }), - 'http://user:pass@测试.com:8080/path' -); diff --git a/cli/tests/node_compat/test/parallel/test-url-parse-format.js b/cli/tests/node_compat/test/parallel/test-url-parse-format.js deleted file mode 100644 index 7079857bd5..0000000000 --- a/cli/tests/node_compat/test/parallel/test-url-parse-format.js +++ /dev/null @@ -1,1053 +0,0 @@ -// deno-fmt-ignore-file -// deno-lint-ignore-file - -// Copyright Joyent and Node contributors. All rights reserved. MIT license. -// Taken from Node 18.12.1 -// This file is automatically generated by "node/_tools/setup.ts". Do not modify this file manually - -'use strict'; -const common = require('../common'); - -if (!common.hasIntl) - common.skip('missing Intl'); - -const assert = require('assert'); -const inspect = require('util').inspect; - -const url = require('url'); - -// URLs to parse, and expected data -// { url : parsed } -const parseTests = { - '//some_path': { - href: '//some_path', - pathname: '//some_path', - path: '//some_path' - }, - - 'http:\\\\evil-phisher\\foo.html#h\\a\\s\\h': { - protocol: 'http:', - slashes: true, - host: 'evil-phisher', - hostname: 'evil-phisher', - pathname: '/foo.html', - path: '/foo.html', - hash: '#h%5Ca%5Cs%5Ch', - href: 'http://evil-phisher/foo.html#h%5Ca%5Cs%5Ch' - }, - - 'http:\\\\evil-phisher\\foo.html?json="\\"foo\\""#h\\a\\s\\h': { - protocol: 'http:', - slashes: true, - host: 'evil-phisher', - hostname: 'evil-phisher', - pathname: '/foo.html', - search: '?json=%22%5C%22foo%5C%22%22', - query: 'json=%22%5C%22foo%5C%22%22', - path: '/foo.html?json=%22%5C%22foo%5C%22%22', - hash: '#h%5Ca%5Cs%5Ch', - href: 'http://evil-phisher/foo.html?json=%22%5C%22foo%5C%22%22#h%5Ca%5Cs%5Ch' - }, - - 'http:\\\\evil-phisher\\foo.html#h\\a\\s\\h?blarg': { - protocol: 'http:', - slashes: true, - host: 'evil-phisher', - hostname: 'evil-phisher', - pathname: '/foo.html', - path: '/foo.html', - hash: '#h%5Ca%5Cs%5Ch?blarg', - href: 'http://evil-phisher/foo.html#h%5Ca%5Cs%5Ch?blarg' - }, - - - 'http:\\\\evil-phisher\\foo.html': { - protocol: 'http:', - slashes: true, - host: 'evil-phisher', - hostname: 'evil-phisher', - pathname: '/foo.html', - path: '/foo.html', - href: 'http://evil-phisher/foo.html' - }, - - 'HTTP://www.example.com/': { - href: 'http://www.example.com/', - protocol: 'http:', - slashes: true, - host: 'www.example.com', - hostname: 'www.example.com', - pathname: '/', - path: '/' - }, - - 'HTTP://www.example.com': { - href: 'http://www.example.com/', - protocol: 'http:', - slashes: true, - host: 'www.example.com', - hostname: 'www.example.com', - pathname: '/', - path: '/' - }, - - 'http://www.ExAmPlE.com/': { - href: 'http://www.example.com/', - protocol: 'http:', - slashes: true, - host: 'www.example.com', - hostname: 'www.example.com', - pathname: '/', - path: '/' - }, - - 'http://user:pw@www.ExAmPlE.com/': { - href: 'http://user:pw@www.example.com/', - protocol: 'http:', - slashes: true, - auth: 'user:pw', - host: 'www.example.com', - hostname: 'www.example.com', - pathname: '/', - path: '/' - }, - - 'http://USER:PW@www.ExAmPlE.com/': { - href: 'http://USER:PW@www.example.com/', - protocol: 'http:', - slashes: true, - auth: 'USER:PW', - host: 'www.example.com', - hostname: 'www.example.com', - pathname: '/', - path: '/' - }, - - 'http://user@www.example.com/': { - href: 'http://user@www.example.com/', - protocol: 'http:', - slashes: true, - auth: 'user', - host: 'www.example.com', - hostname: 'www.example.com', - pathname: '/', - path: '/' - }, - - 'http://user%3Apw@www.example.com/': { - href: 'http://user:pw@www.example.com/', - protocol: 'http:', - slashes: true, - auth: 'user:pw', - host: 'www.example.com', - hostname: 'www.example.com', - pathname: '/', - path: '/' - }, - - 'http://x.com/path?that\'s#all, folks': { - href: 'http://x.com/path?that%27s#all,%20folks', - protocol: 'http:', - slashes: true, - host: 'x.com', - hostname: 'x.com', - search: '?that%27s', - query: 'that%27s', - pathname: '/path', - hash: '#all,%20folks', - path: '/path?that%27s' - }, - - 'HTTP://X.COM/Y': { - href: 'http://x.com/Y', - protocol: 'http:', - slashes: true, - host: 'x.com', - hostname: 'x.com', - pathname: '/Y', - path: '/Y' - }, - - // Whitespace in the front - ' http://www.example.com/': { - href: 'http://www.example.com/', - protocol: 'http:', - slashes: true, - host: 'www.example.com', - hostname: 'www.example.com', - pathname: '/', - path: '/' - }, - - // + not an invalid host character - // per https://url.spec.whatwg.org/#host-parsing - 'http://x.y.com+a/b/c': { - href: 'http://x.y.com+a/b/c', - protocol: 'http:', - slashes: true, - host: 'x.y.com+a', - hostname: 'x.y.com+a', - pathname: '/b/c', - path: '/b/c' - }, - - // An unexpected invalid char in the hostname. - 'HtTp://x.y.cOm;a/b/c?d=e#f gi': { - href: 'http://x.y.com/;a/b/c?d=e#f%20g%3Ch%3Ei', - protocol: 'http:', - slashes: true, - host: 'x.y.com', - hostname: 'x.y.com', - pathname: ';a/b/c', - search: '?d=e', - query: 'd=e', - hash: '#f%20g%3Ch%3Ei', - path: ';a/b/c?d=e' - }, - - // Make sure that we don't accidentally lcast the path parts. - 'HtTp://x.y.cOm;A/b/c?d=e#f gi': { - href: 'http://x.y.com/;A/b/c?d=e#f%20g%3Ch%3Ei', - protocol: 'http:', - slashes: true, - host: 'x.y.com', - hostname: 'x.y.com', - pathname: ';A/b/c', - search: '?d=e', - query: 'd=e', - hash: '#f%20g%3Ch%3Ei', - path: ';A/b/c?d=e' - }, - - 'http://x...y...#p': { - href: 'http://x...y.../#p', - protocol: 'http:', - slashes: true, - host: 'x...y...', - hostname: 'x...y...', - hash: '#p', - pathname: '/', - path: '/' - }, - - 'http://x/p/"quoted"': { - href: 'http://x/p/%22quoted%22', - protocol: 'http:', - slashes: true, - host: 'x', - hostname: 'x', - pathname: '/p/%22quoted%22', - path: '/p/%22quoted%22' - }, - - ' Is a URL!': { - href: '%3Chttp://goo.corn/bread%3E%20Is%20a%20URL!', - pathname: '%3Chttp://goo.corn/bread%3E%20Is%20a%20URL!', - path: '%3Chttp://goo.corn/bread%3E%20Is%20a%20URL!' - }, - - 'http://www.narwhaljs.org/blog/categories?id=news': { - href: 'http://www.narwhaljs.org/blog/categories?id=news', - protocol: 'http:', - slashes: true, - host: 'www.narwhaljs.org', - hostname: 'www.narwhaljs.org', - search: '?id=news', - query: 'id=news', - pathname: '/blog/categories', - path: '/blog/categories?id=news' - }, - - 'http://mt0.google.com/vt/lyrs=m@114&hl=en&src=api&x=2&y=2&z=3&s=': { - href: 'http://mt0.google.com/vt/lyrs=m@114&hl=en&src=api&x=2&y=2&z=3&s=', - protocol: 'http:', - slashes: true, - host: 'mt0.google.com', - hostname: 'mt0.google.com', - pathname: '/vt/lyrs=m@114&hl=en&src=api&x=2&y=2&z=3&s=', - path: '/vt/lyrs=m@114&hl=en&src=api&x=2&y=2&z=3&s=' - }, - - 'http://mt0.google.com/vt/lyrs=m@114???&hl=en&src=api&x=2&y=2&z=3&s=': { - href: 'http://mt0.google.com/vt/lyrs=m@114???&hl=en&src=api' + - '&x=2&y=2&z=3&s=', - protocol: 'http:', - slashes: true, - host: 'mt0.google.com', - hostname: 'mt0.google.com', - search: '???&hl=en&src=api&x=2&y=2&z=3&s=', - query: '??&hl=en&src=api&x=2&y=2&z=3&s=', - pathname: '/vt/lyrs=m@114', - path: '/vt/lyrs=m@114???&hl=en&src=api&x=2&y=2&z=3&s=' - }, - - 'http://user:pass@mt0.google.com/vt/lyrs=m@114???&hl=en&src=api&x=2&y=2&z=3&s=': { - href: 'http://user:pass@mt0.google.com/vt/lyrs=m@114???&hl=en&src=api&x=2&y=2&z=3&s=', - protocol: 'http:', - slashes: true, - host: 'mt0.google.com', - auth: 'user:pass', - hostname: 'mt0.google.com', - search: '???&hl=en&src=api&x=2&y=2&z=3&s=', - query: '??&hl=en&src=api&x=2&y=2&z=3&s=', - pathname: '/vt/lyrs=m@114', - path: '/vt/lyrs=m@114???&hl=en&src=api&x=2&y=2&z=3&s=' - }, - - 'file:///etc/passwd': { - href: 'file:///etc/passwd', - slashes: true, - protocol: 'file:', - pathname: '/etc/passwd', - hostname: '', - host: '', - path: '/etc/passwd' - }, - - 'file://localhost/etc/passwd': { - href: 'file://localhost/etc/passwd', - protocol: 'file:', - slashes: true, - pathname: '/etc/passwd', - hostname: 'localhost', - host: 'localhost', - path: '/etc/passwd' - }, - - 'file://foo/etc/passwd': { - href: 'file://foo/etc/passwd', - protocol: 'file:', - slashes: true, - pathname: '/etc/passwd', - hostname: 'foo', - host: 'foo', - path: '/etc/passwd' - }, - - 'file:///etc/node/': { - href: 'file:///etc/node/', - slashes: true, - protocol: 'file:', - pathname: '/etc/node/', - hostname: '', - host: '', - path: '/etc/node/' - }, - - 'file://localhost/etc/node/': { - href: 'file://localhost/etc/node/', - protocol: 'file:', - slashes: true, - pathname: '/etc/node/', - hostname: 'localhost', - host: 'localhost', - path: '/etc/node/' - }, - - 'file://foo/etc/node/': { - href: 'file://foo/etc/node/', - protocol: 'file:', - slashes: true, - pathname: '/etc/node/', - hostname: 'foo', - host: 'foo', - path: '/etc/node/' - }, - - 'http:/baz/../foo/bar': { - href: 'http:/baz/../foo/bar', - protocol: 'http:', - pathname: '/baz/../foo/bar', - path: '/baz/../foo/bar' - }, - - 'http://user:pass@example.com:8000/foo/bar?baz=quux#frag': { - href: 'http://user:pass@example.com:8000/foo/bar?baz=quux#frag', - protocol: 'http:', - slashes: true, - host: 'example.com:8000', - auth: 'user:pass', - port: '8000', - hostname: 'example.com', - hash: '#frag', - search: '?baz=quux', - query: 'baz=quux', - pathname: '/foo/bar', - path: '/foo/bar?baz=quux' - }, - - '//user:pass@example.com:8000/foo/bar?baz=quux#frag': { - href: '//user:pass@example.com:8000/foo/bar?baz=quux#frag', - slashes: true, - host: 'example.com:8000', - auth: 'user:pass', - port: '8000', - hostname: 'example.com', - hash: '#frag', - search: '?baz=quux', - query: 'baz=quux', - pathname: '/foo/bar', - path: '/foo/bar?baz=quux' - }, - - '/foo/bar?baz=quux#frag': { - href: '/foo/bar?baz=quux#frag', - hash: '#frag', - search: '?baz=quux', - query: 'baz=quux', - pathname: '/foo/bar', - path: '/foo/bar?baz=quux' - }, - - 'http:/foo/bar?baz=quux#frag': { - href: 'http:/foo/bar?baz=quux#frag', - protocol: 'http:', - hash: '#frag', - search: '?baz=quux', - query: 'baz=quux', - pathname: '/foo/bar', - path: '/foo/bar?baz=quux' - }, - - 'mailto:foo@bar.com?subject=hello': { - href: 'mailto:foo@bar.com?subject=hello', - protocol: 'mailto:', - host: 'bar.com', - auth: 'foo', - hostname: 'bar.com', - search: '?subject=hello', - query: 'subject=hello', - path: '?subject=hello' - }, - - 'javascript:alert(\'hello\');': { - href: 'javascript:alert(\'hello\');', - protocol: 'javascript:', - pathname: 'alert(\'hello\');', - path: 'alert(\'hello\');' - }, - - 'xmpp:isaacschlueter@jabber.org': { - href: 'xmpp:isaacschlueter@jabber.org', - protocol: 'xmpp:', - host: 'jabber.org', - auth: 'isaacschlueter', - hostname: 'jabber.org' - }, - - 'http://atpass:foo%40bar@127.0.0.1:8080/path?search=foo#bar': { - href: 'http://atpass:foo%40bar@127.0.0.1:8080/path?search=foo#bar', - protocol: 'http:', - slashes: true, - host: '127.0.0.1:8080', - auth: 'atpass:foo@bar', - hostname: '127.0.0.1', - port: '8080', - pathname: '/path', - search: '?search=foo', - query: 'search=foo', - hash: '#bar', - path: '/path?search=foo' - }, - - 'svn+ssh://foo/bar': { - href: 'svn+ssh://foo/bar', - host: 'foo', - hostname: 'foo', - protocol: 'svn+ssh:', - pathname: '/bar', - path: '/bar', - slashes: true - }, - - 'dash-test://foo/bar': { - href: 'dash-test://foo/bar', - host: 'foo', - hostname: 'foo', - protocol: 'dash-test:', - pathname: '/bar', - path: '/bar', - slashes: true - }, - - 'dash-test:foo/bar': { - href: 'dash-test:foo/bar', - host: 'foo', - hostname: 'foo', - protocol: 'dash-test:', - pathname: '/bar', - path: '/bar' - }, - - 'dot.test://foo/bar': { - href: 'dot.test://foo/bar', - host: 'foo', - hostname: 'foo', - protocol: 'dot.test:', - pathname: '/bar', - path: '/bar', - slashes: true - }, - - 'dot.test:foo/bar': { - href: 'dot.test:foo/bar', - host: 'foo', - hostname: 'foo', - protocol: 'dot.test:', - pathname: '/bar', - path: '/bar' - }, - - // IDNA tests - 'http://www.日本語.com/': { - href: 'http://www.xn--wgv71a119e.com/', - protocol: 'http:', - slashes: true, - host: 'www.xn--wgv71a119e.com', - hostname: 'www.xn--wgv71a119e.com', - pathname: '/', - path: '/' - }, - - 'http://example.Bücher.com/': { - href: 'http://example.xn--bcher-kva.com/', - protocol: 'http:', - slashes: true, - host: 'example.xn--bcher-kva.com', - hostname: 'example.xn--bcher-kva.com', - pathname: '/', - path: '/' - }, - - 'http://www.Äffchen.com/': { - href: 'http://www.xn--ffchen-9ta.com/', - protocol: 'http:', - slashes: true, - host: 'www.xn--ffchen-9ta.com', - hostname: 'www.xn--ffchen-9ta.com', - pathname: '/', - path: '/' - }, - - 'http://www.Äffchen.cOm;A/b/c?d=e#f gi': { - href: 'http://www.xn--ffchen-9ta.com/;A/b/c?d=e#f%20g%3Ch%3Ei', - protocol: 'http:', - slashes: true, - host: 'www.xn--ffchen-9ta.com', - hostname: 'www.xn--ffchen-9ta.com', - pathname: ';A/b/c', - search: '?d=e', - query: 'd=e', - hash: '#f%20g%3Ch%3Ei', - path: ';A/b/c?d=e' - }, - - 'http://SÉLIER.COM/': { - href: 'http://xn--slier-bsa.com/', - protocol: 'http:', - slashes: true, - host: 'xn--slier-bsa.com', - hostname: 'xn--slier-bsa.com', - pathname: '/', - path: '/' - }, - - 'http://ليهمابتكلموشعربي؟.ي؟/': { - href: 'http://xn--egbpdaj6bu4bxfgehfvwxn.xn--egb9f/', - protocol: 'http:', - slashes: true, - host: 'xn--egbpdaj6bu4bxfgehfvwxn.xn--egb9f', - hostname: 'xn--egbpdaj6bu4bxfgehfvwxn.xn--egb9f', - pathname: '/', - path: '/' - }, - - 'http://➡.ws/➡': { - href: 'http://xn--hgi.ws/➡', - protocol: 'http:', - slashes: true, - host: 'xn--hgi.ws', - hostname: 'xn--hgi.ws', - pathname: '/➡', - path: '/➡' - }, - - 'http://bucket_name.s3.amazonaws.com/image.jpg': { - protocol: 'http:', - slashes: true, - host: 'bucket_name.s3.amazonaws.com', - hostname: 'bucket_name.s3.amazonaws.com', - pathname: '/image.jpg', - href: 'http://bucket_name.s3.amazonaws.com/image.jpg', - path: '/image.jpg' - }, - - 'git+http://github.com/joyent/node.git': { - protocol: 'git+http:', - slashes: true, - host: 'github.com', - hostname: 'github.com', - pathname: '/joyent/node.git', - path: '/joyent/node.git', - href: 'git+http://github.com/joyent/node.git' - }, - - // If local1@domain1 is uses as a relative URL it may - // be parse into auth@hostname, but here there is no - // way to make it work in url.parse, I add the test to be explicit - 'local1@domain1': { - pathname: 'local1@domain1', - path: 'local1@domain1', - href: 'local1@domain1' - }, - - // While this may seem counter-intuitive, a browser will parse - // as a path. - 'www.example.com': { - href: 'www.example.com', - pathname: 'www.example.com', - path: 'www.example.com' - }, - - // ipv6 support - '[fe80::1]': { - href: '[fe80::1]', - pathname: '[fe80::1]', - path: '[fe80::1]' - }, - - 'coap://[FEDC:BA98:7654:3210:FEDC:BA98:7654:3210]': { - protocol: 'coap:', - slashes: true, - host: '[fedc:ba98:7654:3210:fedc:ba98:7654:3210]', - hostname: 'fedc:ba98:7654:3210:fedc:ba98:7654:3210', - href: 'coap://[fedc:ba98:7654:3210:fedc:ba98:7654:3210]/', - pathname: '/', - path: '/' - }, - - 'coap://[1080:0:0:0:8:800:200C:417A]:61616/': { - protocol: 'coap:', - slashes: true, - host: '[1080:0:0:0:8:800:200c:417a]:61616', - port: '61616', - hostname: '1080:0:0:0:8:800:200c:417a', - href: 'coap://[1080:0:0:0:8:800:200c:417a]:61616/', - pathname: '/', - path: '/' - }, - - 'http://user:password@[3ffe:2a00:100:7031::1]:8080': { - protocol: 'http:', - slashes: true, - auth: 'user:password', - host: '[3ffe:2a00:100:7031::1]:8080', - port: '8080', - hostname: '3ffe:2a00:100:7031::1', - href: 'http://user:password@[3ffe:2a00:100:7031::1]:8080/', - pathname: '/', - path: '/' - }, - - 'coap://u:p@[::192.9.5.5]:61616/.well-known/r?n=Temperature': { - protocol: 'coap:', - slashes: true, - auth: 'u:p', - host: '[::192.9.5.5]:61616', - port: '61616', - hostname: '::192.9.5.5', - href: 'coap://u:p@[::192.9.5.5]:61616/.well-known/r?n=Temperature', - search: '?n=Temperature', - query: 'n=Temperature', - pathname: '/.well-known/r', - path: '/.well-known/r?n=Temperature' - }, - - // empty port - 'http://example.com:': { - protocol: 'http:', - slashes: true, - host: 'example.com', - hostname: 'example.com', - href: 'http://example.com/', - pathname: '/', - path: '/' - }, - - 'http://example.com:/a/b.html': { - protocol: 'http:', - slashes: true, - host: 'example.com', - hostname: 'example.com', - href: 'http://example.com/a/b.html', - pathname: '/a/b.html', - path: '/a/b.html' - }, - - 'http://example.com:?a=b': { - protocol: 'http:', - slashes: true, - host: 'example.com', - hostname: 'example.com', - href: 'http://example.com/?a=b', - search: '?a=b', - query: 'a=b', - pathname: '/', - path: '/?a=b' - }, - - 'http://example.com:#abc': { - protocol: 'http:', - slashes: true, - host: 'example.com', - hostname: 'example.com', - href: 'http://example.com/#abc', - hash: '#abc', - pathname: '/', - path: '/' - }, - - 'http://[fe80::1]:/a/b?a=b#abc': { - protocol: 'http:', - slashes: true, - host: '[fe80::1]', - hostname: 'fe80::1', - href: 'http://[fe80::1]/a/b?a=b#abc', - search: '?a=b', - query: 'a=b', - hash: '#abc', - pathname: '/a/b', - path: '/a/b?a=b' - }, - - 'http://-lovemonsterz.tumblr.com/rss': { - protocol: 'http:', - slashes: true, - host: '-lovemonsterz.tumblr.com', - hostname: '-lovemonsterz.tumblr.com', - href: 'http://-lovemonsterz.tumblr.com/rss', - pathname: '/rss', - path: '/rss', - }, - - 'http://-lovemonsterz.tumblr.com:80/rss': { - protocol: 'http:', - slashes: true, - port: '80', - host: '-lovemonsterz.tumblr.com:80', - hostname: '-lovemonsterz.tumblr.com', - href: 'http://-lovemonsterz.tumblr.com:80/rss', - pathname: '/rss', - path: '/rss', - }, - - 'http://user:pass@-lovemonsterz.tumblr.com/rss': { - protocol: 'http:', - slashes: true, - auth: 'user:pass', - host: '-lovemonsterz.tumblr.com', - hostname: '-lovemonsterz.tumblr.com', - href: 'http://user:pass@-lovemonsterz.tumblr.com/rss', - pathname: '/rss', - path: '/rss', - }, - - 'http://user:pass@-lovemonsterz.tumblr.com:80/rss': { - protocol: 'http:', - slashes: true, - auth: 'user:pass', - port: '80', - host: '-lovemonsterz.tumblr.com:80', - hostname: '-lovemonsterz.tumblr.com', - href: 'http://user:pass@-lovemonsterz.tumblr.com:80/rss', - pathname: '/rss', - path: '/rss', - }, - - 'http://_jabber._tcp.google.com/test': { - protocol: 'http:', - slashes: true, - host: '_jabber._tcp.google.com', - hostname: '_jabber._tcp.google.com', - href: 'http://_jabber._tcp.google.com/test', - pathname: '/test', - path: '/test', - }, - - 'http://user:pass@_jabber._tcp.google.com/test': { - protocol: 'http:', - slashes: true, - auth: 'user:pass', - host: '_jabber._tcp.google.com', - hostname: '_jabber._tcp.google.com', - href: 'http://user:pass@_jabber._tcp.google.com/test', - pathname: '/test', - path: '/test', - }, - - 'http://_jabber._tcp.google.com:80/test': { - protocol: 'http:', - slashes: true, - port: '80', - host: '_jabber._tcp.google.com:80', - hostname: '_jabber._tcp.google.com', - href: 'http://_jabber._tcp.google.com:80/test', - pathname: '/test', - path: '/test', - }, - - 'http://user:pass@_jabber._tcp.google.com:80/test': { - protocol: 'http:', - slashes: true, - auth: 'user:pass', - port: '80', - host: '_jabber._tcp.google.com:80', - hostname: '_jabber._tcp.google.com', - href: 'http://user:pass@_jabber._tcp.google.com:80/test', - pathname: '/test', - path: '/test', - }, - - 'http://x:1/\' <>"`/{}|\\^~`/': { - protocol: 'http:', - slashes: true, - host: 'x:1', - port: '1', - hostname: 'x', - pathname: '/%27%20%3C%3E%22%60/%7B%7D%7C/%5E~%60/', - path: '/%27%20%3C%3E%22%60/%7B%7D%7C/%5E~%60/', - href: 'http://x:1/%27%20%3C%3E%22%60/%7B%7D%7C/%5E~%60/' - }, - - 'http://a@b@c/': { - protocol: 'http:', - slashes: true, - auth: 'a@b', - host: 'c', - hostname: 'c', - href: 'http://a%40b@c/', - path: '/', - pathname: '/' - }, - - 'http://a@b?@c': { - protocol: 'http:', - slashes: true, - auth: 'a', - host: 'b', - hostname: 'b', - href: 'http://a@b/?@c', - path: '/?@c', - pathname: '/', - search: '?@c', - query: '@c' - }, - - 'http://a.b/\tbc\ndr\ref g"hq\'j?mn\\op^q=r`99{st|uv}wz': { - protocol: 'http:', - slashes: true, - host: 'a.b', - port: null, - hostname: 'a.b', - hash: null, - pathname: '/%09bc%0Adr%0Def%20g%22hq%27j%3Ckl%3E', - path: '/%09bc%0Adr%0Def%20g%22hq%27j%3Ckl%3E?mn%5Cop%5Eq=r%6099%7Bst%7Cuv%7Dwz', - search: '?mn%5Cop%5Eq=r%6099%7Bst%7Cuv%7Dwz', - query: 'mn%5Cop%5Eq=r%6099%7Bst%7Cuv%7Dwz', - href: 'http://a.b/%09bc%0Adr%0Def%20g%22hq%27j%3Ckl%3E?mn%5Cop%5Eq=r%6099%7Bst%7Cuv%7Dwz' - }, - - 'http://a\r" \t\n<\'b:b@c\r\nd/e?f': { - protocol: 'http:', - slashes: true, - auth: 'a\r" \t\n<\'b:b', - host: 'c', - port: null, - hostname: 'c', - hash: null, - search: '?f', - query: 'f', - pathname: '%0D%0Ad/e', - path: '%0D%0Ad/e?f', - href: 'http://a%0D%22%20%09%0A%3C\'b:b@c/%0D%0Ad/e?f' - }, - - // Git urls used by npm - 'git+ssh://git@github.com:npm/npm': { - protocol: 'git+ssh:', - slashes: true, - auth: 'git', - host: 'github.com', - port: null, - hostname: 'github.com', - hash: null, - search: null, - query: null, - pathname: '/:npm/npm', - path: '/:npm/npm', - href: 'git+ssh://git@github.com/:npm/npm' - }, - - 'https://*': { - protocol: 'https:', - slashes: true, - auth: null, - host: '', - port: null, - hostname: '', - hash: null, - search: null, - query: null, - pathname: '/*', - path: '/*', - href: 'https:///*' - }, - - // The following two URLs are the same, but they differ for a capital A. - // Verify that the protocol is checked in a case-insensitive manner. - 'javascript:alert(1);a=\x27@white-listed.com\x27': { - protocol: 'javascript:', - slashes: null, - auth: null, - host: null, - port: null, - hostname: null, - hash: null, - search: null, - query: null, - pathname: "alert(1);a='@white-listed.com'", - path: "alert(1);a='@white-listed.com'", - href: "javascript:alert(1);a='@white-listed.com'" - }, - - 'javAscript:alert(1);a=\x27@white-listed.com\x27': { - protocol: 'javascript:', - slashes: null, - auth: null, - host: null, - port: null, - hostname: null, - hash: null, - search: null, - query: null, - pathname: "alert(1);a='@white-listed.com'", - path: "alert(1);a='@white-listed.com'", - href: "javascript:alert(1);a='@white-listed.com'" - }, - - 'ws://www.example.com': { - protocol: 'ws:', - slashes: true, - hostname: 'www.example.com', - host: 'www.example.com', - pathname: '/', - path: '/', - href: 'ws://www.example.com/' - }, - - 'wss://www.example.com': { - protocol: 'wss:', - slashes: true, - hostname: 'www.example.com', - host: 'www.example.com', - pathname: '/', - path: '/', - href: 'wss://www.example.com/' - }, - - '//fhqwhgads@example.com/everybody-to-the-limit': { - protocol: null, - slashes: true, - auth: 'fhqwhgads', - host: 'example.com', - port: null, - hostname: 'example.com', - hash: null, - search: null, - query: null, - pathname: '/everybody-to-the-limit', - path: '/everybody-to-the-limit', - href: '//fhqwhgads@example.com/everybody-to-the-limit' - }, - - '//fhqwhgads@example.com/everybody#to-the-limit': { - protocol: null, - slashes: true, - auth: 'fhqwhgads', - host: 'example.com', - port: null, - hostname: 'example.com', - hash: '#to-the-limit', - search: null, - query: null, - pathname: '/everybody', - path: '/everybody', - href: '//fhqwhgads@example.com/everybody#to-the-limit' - }, - - '\bhttp://example.com/\b': { - protocol: 'http:', - slashes: true, - auth: null, - host: 'example.com', - port: null, - hostname: 'example.com', - hash: null, - search: null, - query: null, - pathname: '/', - path: '/', - href: 'http://example.com/' - } -}; - -for (const u in parseTests) { - let actual = url.parse(u); - const spaced = url.parse(` \t ${u}\n\t`); - let expected = Object.assign(new url.Url(), parseTests[u]); - - Object.keys(actual).forEach(function(i) { - if (expected[i] === undefined && actual[i] === null) { - expected[i] = null; - } - }); - - assert.deepStrictEqual( - actual, - expected, - `expected ${inspect(expected)}, got ${inspect(actual)}` - ); - assert.deepStrictEqual( - spaced, - expected, - `expected ${inspect(expected)}, got ${inspect(spaced)}` - ); - - expected = parseTests[u].href; - actual = url.format(parseTests[u]); - - assert.strictEqual(actual, expected, - `format(${u}) == ${u}\nactual:${actual}`); -} - -{ - const parsed = url.parse('http://nodejs.org/') - .resolveObject('jAvascript:alert(1);a=\x27@white-listed.com\x27'); - - const expected = Object.assign(new url.Url(), { - protocol: 'javascript:', - slashes: null, - auth: null, - host: null, - port: null, - hostname: null, - hash: null, - search: null, - query: null, - pathname: "alert(1);a='@white-listed.com'", - path: "alert(1);a='@white-listed.com'", - href: "javascript:alert(1);a='@white-listed.com'" - }); - - assert.deepStrictEqual(parsed, expected); -} diff --git a/cli/tests/node_compat/test/parallel/test-whatwg-encoding-custom-fatal-streaming.js b/cli/tests/node_compat/test/parallel/test-whatwg-encoding-custom-fatal-streaming.js deleted file mode 100644 index b3ad5f0585..0000000000 --- a/cli/tests/node_compat/test/parallel/test-whatwg-encoding-custom-fatal-streaming.js +++ /dev/null @@ -1,68 +0,0 @@ -// deno-fmt-ignore-file -// deno-lint-ignore-file - -// Copyright Joyent and Node contributors. All rights reserved. MIT license. -// Taken from Node 18.12.1 -// This file is automatically generated by "node/_tools/setup.ts". Do not modify this file manually - -'use strict'; - -// From: https://github.com/w3c/web-platform-tests/blob/d74324b53c/encoding/textdecoder-fatal-streaming.html -// With the twist that we specifically test for Node.js error codes - -const common = require('../common'); -const assert = require('assert'); - -if (!common.hasIntl) - common.skip('missing Intl'); - -{ - [ - { encoding: 'utf-8', sequence: [0xC0] }, - { encoding: 'utf-16le', sequence: [0x00] }, - { encoding: 'utf-16be', sequence: [0x00] }, - ].forEach((testCase) => { - const data = new Uint8Array([testCase.sequence]); - assert.throws( - () => { - const decoder = new TextDecoder(testCase.encoding, { fatal: true }); - decoder.decode(data); - }, { - code: 'ERR_ENCODING_INVALID_ENCODED_DATA', - name: 'TypeError', - message: - `The encoded data was not valid for encoding ${testCase.encoding}` - } - ); - }); -} - -{ - const decoder = new TextDecoder('utf-16le', { fatal: true }); - const odd = new Uint8Array([0x00]); - const even = new Uint8Array([0x00, 0x00]); - - assert.throws( - () => { - decoder.decode(even, { stream: true }); - decoder.decode(odd); - }, { - code: 'ERR_ENCODING_INVALID_ENCODED_DATA', - name: 'TypeError', - message: - 'The encoded data was not valid for encoding utf-16le' - } - ); - - assert.throws( - () => { - decoder.decode(odd, { stream: true }); - decoder.decode(even); - }, { - code: 'ERR_ENCODING_INVALID_ENCODED_DATA', - name: 'TypeError', - message: - 'The encoded data was not valid for encoding utf-16le' - } - ); -} diff --git a/cli/tests/node_compat/test/parallel/test-whatwg-encoding-custom-textdecoder-fatal.js b/cli/tests/node_compat/test/parallel/test-whatwg-encoding-custom-textdecoder-fatal.js deleted file mode 100644 index 3a8aac4003..0000000000 --- a/cli/tests/node_compat/test/parallel/test-whatwg-encoding-custom-textdecoder-fatal.js +++ /dev/null @@ -1,91 +0,0 @@ -// deno-fmt-ignore-file -// deno-lint-ignore-file - -// Copyright Joyent and Node contributors. All rights reserved. MIT license. -// Taken from Node 18.12.1 -// This file is automatically generated by "node/_tools/setup.ts". Do not modify this file manually - -'use strict'; - -// From: https://github.com/w3c/web-platform-tests/blob/39a67e2fff/encoding/textdecoder-fatal.html -// With the twist that we specifically test for Node.js error codes - -const common = require('../common'); - -if (!common.hasIntl) - common.skip('missing Intl'); - -const assert = require('assert'); - -const bad = [ - { encoding: 'utf-8', input: [0xFF], name: 'invalid code' }, - { encoding: 'utf-8', input: [0xC0], name: 'ends early' }, - { encoding: 'utf-8', input: [0xE0], name: 'ends early 2' }, - { encoding: 'utf-8', input: [0xC0, 0x00], name: 'invalid trail' }, - { encoding: 'utf-8', input: [0xC0, 0xC0], name: 'invalid trail 2' }, - { encoding: 'utf-8', input: [0xE0, 0x00], name: 'invalid trail 3' }, - { encoding: 'utf-8', input: [0xE0, 0xC0], name: 'invalid trail 4' }, - { encoding: 'utf-8', input: [0xE0, 0x80, 0x00], name: 'invalid trail 5' }, - { encoding: 'utf-8', input: [0xE0, 0x80, 0xC0], name: 'invalid trail 6' }, - { encoding: 'utf-8', input: [0xFC, 0x80, 0x80, 0x80, 0x80, 0x80], - name: '> 0x10FFFF' }, - { encoding: 'utf-8', input: [0xFE, 0x80, 0x80, 0x80, 0x80, 0x80], - name: 'obsolete lead byte' }, - // Overlong encodings - { encoding: 'utf-8', input: [0xC0, 0x80], name: 'overlong U+0000 - 2 bytes' }, - { encoding: 'utf-8', input: [0xE0, 0x80, 0x80], - name: 'overlong U+0000 - 3 bytes' }, - { encoding: 'utf-8', input: [0xF0, 0x80, 0x80, 0x80], - name: 'overlong U+0000 - 4 bytes' }, - { encoding: 'utf-8', input: [0xF8, 0x80, 0x80, 0x80, 0x80], - name: 'overlong U+0000 - 5 bytes' }, - { encoding: 'utf-8', input: [0xFC, 0x80, 0x80, 0x80, 0x80, 0x80], - name: 'overlong U+0000 - 6 bytes' }, - { encoding: 'utf-8', input: [0xC1, 0xBF], name: 'overlong U+007F - 2 bytes' }, - { encoding: 'utf-8', input: [0xE0, 0x81, 0xBF], - name: 'overlong U+007F - 3 bytes' }, - { encoding: 'utf-8', input: [0xF0, 0x80, 0x81, 0xBF], - name: 'overlong U+007F - 4 bytes' }, - { encoding: 'utf-8', input: [0xF8, 0x80, 0x80, 0x81, 0xBF], - name: 'overlong U+007F - 5 bytes' }, - { encoding: 'utf-8', input: [0xFC, 0x80, 0x80, 0x80, 0x81, 0xBF], - name: 'overlong U+007F - 6 bytes' }, - { encoding: 'utf-8', input: [0xE0, 0x9F, 0xBF], - name: 'overlong U+07FF - 3 bytes' }, - { encoding: 'utf-8', input: [0xF0, 0x80, 0x9F, 0xBF], - name: 'overlong U+07FF - 4 bytes' }, - { encoding: 'utf-8', input: [0xF8, 0x80, 0x80, 0x9F, 0xBF], - name: 'overlong U+07FF - 5 bytes' }, - { encoding: 'utf-8', input: [0xFC, 0x80, 0x80, 0x80, 0x9F, 0xBF], - name: 'overlong U+07FF - 6 bytes' }, - { encoding: 'utf-8', input: [0xF0, 0x8F, 0xBF, 0xBF], - name: 'overlong U+FFFF - 4 bytes' }, - { encoding: 'utf-8', input: [0xF8, 0x80, 0x8F, 0xBF, 0xBF], - name: 'overlong U+FFFF - 5 bytes' }, - { encoding: 'utf-8', input: [0xFC, 0x80, 0x80, 0x8F, 0xBF, 0xBF], - name: 'overlong U+FFFF - 6 bytes' }, - { encoding: 'utf-8', input: [0xF8, 0x84, 0x8F, 0xBF, 0xBF], - name: 'overlong U+10FFFF - 5 bytes' }, - { encoding: 'utf-8', input: [0xFC, 0x80, 0x84, 0x8F, 0xBF, 0xBF], - name: 'overlong U+10FFFF - 6 bytes' }, - // UTF-16 surrogates encoded as code points in UTF-8 - { encoding: 'utf-8', input: [0xED, 0xA0, 0x80], name: 'lead surrogate' }, - { encoding: 'utf-8', input: [0xED, 0xB0, 0x80], name: 'trail surrogate' }, - { encoding: 'utf-8', input: [0xED, 0xA0, 0x80, 0xED, 0xB0, 0x80], - name: 'surrogate pair' }, - { encoding: 'utf-16le', input: [0x00], name: 'truncated code unit' }, - // Mismatched UTF-16 surrogates are exercised in utf16-surrogates.html - // FIXME: Add legacy encoding cases -]; - -bad.forEach((t) => { - assert.throws( - () => { - new TextDecoder(t.encoding, { fatal: true }) - .decode(new Uint8Array(t.input)); - }, { - code: 'ERR_ENCODING_INVALID_ENCODED_DATA', - name: 'TypeError' - } - ); -}); diff --git a/cli/tests/node_compat/test/parallel/test-whatwg-encoding-custom-textdecoder-utf16-surrogates.js b/cli/tests/node_compat/test/parallel/test-whatwg-encoding-custom-textdecoder-utf16-surrogates.js deleted file mode 100644 index afe542dfd9..0000000000 --- a/cli/tests/node_compat/test/parallel/test-whatwg-encoding-custom-textdecoder-utf16-surrogates.js +++ /dev/null @@ -1,63 +0,0 @@ -// deno-fmt-ignore-file -// deno-lint-ignore-file - -// Copyright Joyent and Node contributors. All rights reserved. MIT license. -// Taken from Node 18.12.1 -// This file is automatically generated by "node/_tools/setup.ts". Do not modify this file manually - -'use strict'; - -// From: https://github.com/w3c/web-platform-tests/blob/39a67e2fff/encoding/textdecoder-utf16-surrogates.html -// With the twist that we specifically test for Node.js error codes - -const common = require('../common'); - -if (!common.hasIntl) - common.skip('missing Intl'); - -const assert = require('assert'); - -const bad = [ - { - encoding: 'utf-16le', - input: [0x00, 0xd8], - expected: '\uFFFD', - name: 'lone surrogate lead' - }, - { - encoding: 'utf-16le', - input: [0x00, 0xdc], - expected: '\uFFFD', - name: 'lone surrogate trail' - }, - { - encoding: 'utf-16le', - input: [0x00, 0xd8, 0x00, 0x00], - expected: '\uFFFD\u0000', - name: 'unmatched surrogate lead' - }, - { - encoding: 'utf-16le', - input: [0x00, 0xdc, 0x00, 0x00], - expected: '\uFFFD\u0000', - name: 'unmatched surrogate trail' - }, - { - encoding: 'utf-16le', - input: [0x00, 0xdc, 0x00, 0xd8], - expected: '\uFFFD\uFFFD', - name: 'swapped surrogate pair' - }, -]; - -bad.forEach((t) => { - assert.throws( - () => { - new TextDecoder(t.encoding, { fatal: true }) - .decode(new Uint8Array(t.input)); - }, { - code: 'ERR_ENCODING_INVALID_ENCODED_DATA', - name: 'TypeError' - } - ); -}); diff --git a/cli/tests/node_compat/test/parallel/test-whatwg-url-custom-domainto.js b/cli/tests/node_compat/test/parallel/test-whatwg-url-custom-domainto.js deleted file mode 100644 index 225f8a05c9..0000000000 --- a/cli/tests/node_compat/test/parallel/test-whatwg-url-custom-domainto.js +++ /dev/null @@ -1,64 +0,0 @@ -// deno-fmt-ignore-file -// deno-lint-ignore-file - -// Copyright Joyent and Node contributors. All rights reserved. MIT license. -// Taken from Node 18.12.1 -// This file is automatically generated by "node/_tools/setup.ts". Do not modify this file manually - -'use strict'; - -// Tests below are not from WPT. - -const common = require('../common'); - -if (!common.hasIntl) - common.skip('missing Intl'); - -const assert = require('assert'); -const { domainToASCII, domainToUnicode } = require('url'); - -const tests = require('../fixtures/url-idna'); -const fixtures = require('../common/fixtures'); -const wptToASCIITests = require( - fixtures.path('wpt', 'url', 'resources', 'toascii.json') -); - -{ - const expectedError = { code: 'ERR_MISSING_ARGS', name: 'TypeError' }; - assert.throws(() => domainToASCII(), expectedError); - assert.throws(() => domainToUnicode(), expectedError); - assert.strictEqual(domainToASCII(undefined), 'undefined'); - assert.strictEqual(domainToUnicode(undefined), 'undefined'); -} - -{ - for (const [i, { ascii, unicode }] of tests.entries()) { - assert.strictEqual(ascii, domainToASCII(unicode), - `domainToASCII(${i + 1})`); - assert.strictEqual(unicode, domainToUnicode(ascii), - `domainToUnicode(${i + 1})`); - assert.strictEqual(ascii, domainToASCII(domainToUnicode(ascii)), - `domainToASCII(domainToUnicode(${i + 1}))`); - assert.strictEqual(unicode, domainToUnicode(domainToASCII(unicode)), - `domainToUnicode(domainToASCII(${i + 1}))`); - } -} - -{ - for (const [i, test] of wptToASCIITests.entries()) { - if (typeof test === 'string') - continue; // skip comments - const { comment, input, output } = test; - let caseComment = `Case ${i + 1}`; - if (comment) - caseComment += ` (${comment})`; - if (output === null) { - assert.strictEqual(domainToASCII(input), '', caseComment); - assert.strictEqual(domainToUnicode(input), '', caseComment); - } else { - assert.strictEqual(domainToASCII(input), output, caseComment); - const roundtripped = domainToASCII(domainToUnicode(input)); - assert.strictEqual(roundtripped, output, caseComment); - } - } -} diff --git a/cli/tests/node_compat/test/parallel/test-whatwg-url-custom-inspect.js b/cli/tests/node_compat/test/parallel/test-whatwg-url-custom-inspect.js deleted file mode 100644 index 7a92d5ea3e..0000000000 --- a/cli/tests/node_compat/test/parallel/test-whatwg-url-custom-inspect.js +++ /dev/null @@ -1,75 +0,0 @@ -// deno-fmt-ignore-file -// deno-lint-ignore-file - -// Copyright Joyent and Node contributors. All rights reserved. MIT license. -// Taken from Node 18.12.1 -// This file is automatically generated by "node/_tools/setup.ts". Do not modify this file manually - -'use strict'; - -// Tests below are not from WPT. - -const common = require('../common'); -if (!common.hasIntl) { - // A handful of the tests fail when ICU is not included. - common.skip('missing Intl'); -} - -const util = require('util'); -const assert = require('assert'); - -const url = new URL('https://username:password@host.name:8080/path/name/?que=ry#hash'); - -assert.strictEqual( - util.inspect(url), - `URL { - href: 'https://username:password@host.name:8080/path/name/?que=ry#hash', - origin: 'https://host.name:8080', - protocol: 'https:', - username: 'username', - password: 'password', - host: 'host.name:8080', - hostname: 'host.name', - port: '8080', - pathname: '/path/name/', - search: '?que=ry', - searchParams: URLSearchParams { 'que' => 'ry' }, - hash: '#hash' -}`); - -assert.strictEqual( - util.inspect(url, { showHidden: true }), - `URL { - href: 'https://username:password@host.name:8080/path/name/?que=ry#hash', - origin: 'https://host.name:8080', - protocol: 'https:', - username: 'username', - password: 'password', - host: 'host.name:8080', - hostname: 'host.name', - port: '8080', - pathname: '/path/name/', - search: '?que=ry', - searchParams: URLSearchParams { 'que' => 'ry' }, - hash: '#hash', - cannotBeBase: false, - special: true, - [Symbol(context)]: URLContext { - flags: 2032, - scheme: 'https:', - username: 'username', - password: 'password', - host: 'host.name', - port: 8080, - path: [ 'path', 'name', '', [length]: 3 ], - query: 'que=ry', - fragment: 'hash' - } -}`); - -assert.strictEqual( - util.inspect({ a: url }, { depth: 0 }), - '{ a: [URL] }'); - -class MyURL extends URL {} -assert(util.inspect(new MyURL(url.href)).startsWith('MyURL {')); diff --git a/cli/tests/node_compat/test/parallel/test-whatwg-url-custom-parsing.js b/cli/tests/node_compat/test/parallel/test-whatwg-url-custom-parsing.js deleted file mode 100644 index 7af0759566..0000000000 --- a/cli/tests/node_compat/test/parallel/test-whatwg-url-custom-parsing.js +++ /dev/null @@ -1,87 +0,0 @@ -// deno-fmt-ignore-file -// deno-lint-ignore-file - -// Copyright Joyent and Node contributors. All rights reserved. MIT license. -// Taken from Node 18.12.1 -// This file is automatically generated by "node/_tools/setup.ts". Do not modify this file manually - -'use strict'; - -// Tests below are not from WPT. - -const common = require('../common'); -if (!common.hasIntl) { - // A handful of the tests fail when ICU is not included. - common.skip('missing Intl'); -} - -const assert = require('assert'); -const fixtures = require('../common/fixtures'); - -const tests = require( - fixtures.path('wpt', 'url', 'resources', 'urltestdata.json') -); - -const originalFailures = tests.filter((test) => test.failure); - -const typeFailures = [ - { input: '' }, - { input: 'test' }, - { input: undefined }, - { input: 0 }, - { input: true }, - { input: false }, - { input: null }, - { input: new Date() }, - { input: new RegExp() }, - { input: 'test', base: null }, - { input: 'http://nodejs.org', base: null }, - { input: () => {} }, -]; - -// See https://github.com/w3c/web-platform-tests/pull/10955 -// > If `failure` is true, parsing `about:blank` against `base` -// > must give failure. This tests that the logic for converting -// > base URLs into strings properly fails the whole parsing -// > algorithm if the base URL cannot be parsed. -const aboutBlankFailures = originalFailures - .map((test) => ({ - input: 'about:blank', - base: test.input, - failure: true - })); - -const failureTests = originalFailures - .concat(typeFailures) - .concat(aboutBlankFailures); - -const expectedError = { code: 'ERR_INVALID_URL', name: 'TypeError' }; - -for (const test of failureTests) { - assert.throws( - () => new URL(test.input, test.base), - (error) => { - assert.throws(() => { throw error; }, expectedError); - assert.strictEqual(`${error}`, 'TypeError [ERR_INVALID_URL]: Invalid URL'); - assert.strictEqual(error.message, 'Invalid URL'); - return true; - }); -} - -const additional_tests = - require(fixtures.path('url-tests-additional.js')); - -for (const test of additional_tests) { - const url = new URL(test.url); - if (test.href) assert.strictEqual(url.href, test.href); - if (test.origin) assert.strictEqual(url.origin, test.origin); - if (test.protocol) assert.strictEqual(url.protocol, test.protocol); - if (test.username) assert.strictEqual(url.username, test.username); - if (test.password) assert.strictEqual(url.password, test.password); - if (test.hostname) assert.strictEqual(url.hostname, test.hostname); - if (test.host) assert.strictEqual(url.host, test.host); - if (test.port !== undefined) assert.strictEqual(url.port, test.port); - if (test.pathname) assert.strictEqual(url.pathname, test.pathname); - if (test.search) assert.strictEqual(url.search, test.search); - if (test.hash) assert.strictEqual(url.hash, test.hash); -} diff --git a/cli/tests/node_compat/test/parallel/test-whatwg-url-custom-setters.js b/cli/tests/node_compat/test/parallel/test-whatwg-url-custom-setters.js deleted file mode 100644 index c0b4e41bdc..0000000000 --- a/cli/tests/node_compat/test/parallel/test-whatwg-url-custom-setters.js +++ /dev/null @@ -1,67 +0,0 @@ -// deno-fmt-ignore-file -// deno-lint-ignore-file - -// Copyright Joyent and Node contributors. All rights reserved. MIT license. -// Taken from Node 18.12.1 -// This file is automatically generated by "node/_tools/setup.ts". Do not modify this file manually - -'use strict'; - -// Tests below are not from WPT. - -const common = require('../common'); -if (!common.hasIntl) { - // A handful of the tests fail when ICU is not included. - common.skip('missing Intl'); -} - -const assert = require('assert'); -const { test, assert_equals } = require('../common/wpt').harness; -const fixtures = require('../common/fixtures'); - -// TODO(joyeecheung): we should submit these to the upstream -const additionalTestCases = - require(fixtures.path('url-setter-tests-additional.js')); - -{ - for (const attributeToBeSet in additionalTestCases) { - if (attributeToBeSet === 'comment') { - continue; - } - const testCases = additionalTestCases[attributeToBeSet]; - for (const testCase of testCases) { - let name = `Setting <${testCase.href}>.${attributeToBeSet}` + - ` = "${testCase.new_value}"`; - if ('comment' in testCase) { - name += ` ${testCase.comment}`; - } - test(function() { - const url = new URL(testCase.href); - url[attributeToBeSet] = testCase.new_value; - for (const attribute in testCase.expected) { - assert_equals(url[attribute], testCase.expected[attribute]); - } - }, `URL: ${name}`); - } - } -} - -{ - const url = new URL('http://example.com/'); - const obj = { - toString() { throw new Error('toString'); }, - valueOf() { throw new Error('valueOf'); } - }; - const sym = Symbol(); - const props = Object.getOwnPropertyDescriptors(Object.getPrototypeOf(url)); - for (const [name, { set }] of Object.entries(props)) { - if (set) { - assert.throws(() => url[name] = obj, - /^Error: toString$/, - `url.${name} = { toString() { throw ... } }`); - assert.throws(() => url[name] = sym, - /^TypeError: Cannot convert a Symbol value to a string$/, - `url.${name} = ${String(sym)}`); - } - } -} diff --git a/cli/tests/node_compat/test/parallel/test-whatwg-url-toascii.js b/cli/tests/node_compat/test/parallel/test-whatwg-url-toascii.js deleted file mode 100644 index 82ac527f1f..0000000000 --- a/cli/tests/node_compat/test/parallel/test-whatwg-url-toascii.js +++ /dev/null @@ -1,93 +0,0 @@ -// deno-fmt-ignore-file -// deno-lint-ignore-file - -// Copyright Joyent and Node contributors. All rights reserved. MIT license. -// Taken from Node 18.12.1 -// This file is automatically generated by "node/_tools/setup.ts". Do not modify this file manually - -'use strict'; -const common = require('../common'); -if (!common.hasIntl) { - // A handful of the tests fail when ICU is not included. - common.skip('missing Intl'); -} - -const fixtures = require('../common/fixtures'); -const { test, assert_equals, assert_throws } = require('../common/wpt').harness; - -const request = { - response: require( - fixtures.path('wpt', 'url', 'resources', 'toascii.json') - ) -}; - -// The following tests are copied from WPT. Modifications to them should be -// upstreamed first. -// Refs: https://github.com/w3c/web-platform-tests/blob/4839a0a804/url/toascii.window.js -// License: http://www.w3.org/Consortium/Legal/2008/04-testsuite-copyright.html - -/* eslint-disable */ -// async_test(t => { -// const request = new XMLHttpRequest() -// request.open("GET", "toascii.json") -// request.send() -// request.responseType = "json" -// request.onload = t.step_func_done(() => { - runTests(request.response) -// }) -// }, "Loading data…") - -function makeURL(type, input) { - input = "https://" + input + "/x" - if(type === "url") { - return new URL(input) - } else { - const url = document.createElement(type) - url.href = input - return url - } -} - -function runTests(tests) { - for(var i = 0, l = tests.length; i < l; i++) { - let hostTest = tests[i] - if (typeof hostTest === "string") { - continue // skip comments - } - const typeName = { "url": "URL", "a": "", "area": "" } - // ;["url", "a", "area"].forEach((type) => { - ;["url"].forEach((type) => { - test(() => { - if(hostTest.output !== null) { - const url = makeURL("url", hostTest.input) - assert_equals(url.host, hostTest.output) - assert_equals(url.hostname, hostTest.output) - assert_equals(url.pathname, "/x") - assert_equals(url.href, "https://" + hostTest.output + "/x") - } else { - if(type === "url") { - assert_throws(new TypeError, () => makeURL("url", hostTest.input)) - } else { - const url = makeURL(type, hostTest.input) - assert_equals(url.host, "") - assert_equals(url.hostname, "") - assert_equals(url.pathname, "") - assert_equals(url.href, "https://" + hostTest.input + "/x") - } - } - }, hostTest.input + " (using " + typeName[type] + ")") - ;["host", "hostname"].forEach((val) => { - test(() => { - const url = makeURL(type, "x") - url[val] = hostTest.input - if(hostTest.output !== null) { - assert_equals(url[val], hostTest.output) - } else { - assert_equals(url[val], "x") - } - }, hostTest.input + " (using " + typeName[type] + "." + val + ")") - }) - }) - } -} -/* eslint-enable */ diff --git a/tools/node_compat/TODO.md b/tools/node_compat/TODO.md index 0aae01edcd..650cd4b165 100644 --- a/tools/node_compat/TODO.md +++ b/tools/node_compat/TODO.md @@ -3,7 +3,7 @@ NOTE: This file should not be manually edited. Please edit 'cli/tests/node_compat/config.json' and run 'tools/node_compat/setup.ts' instead. -Total: 2924 +Total: 2934 - [abort/test-abort-backtrace.js](https://github.com/nodejs/node/tree/v18.12.1/test/abort/test-abort-backtrace.js) - [abort/test-abort-fatal-error.js](https://github.com/nodejs/node/tree/v18.12.1/test/abort/test-abort-fatal-error.js) @@ -2371,7 +2371,9 @@ Total: 2924 - [parallel/test-ttywrap-stack.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-ttywrap-stack.js) - [parallel/test-unhandled-exception-rethrow-error.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-unhandled-exception-rethrow-error.js) - [parallel/test-unicode-node-options.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-unicode-node-options.js) +- [parallel/test-url-format-whatwg.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-url-format-whatwg.js) - [parallel/test-url-null-char.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-url-null-char.js) +- [parallel/test-url-parse-format.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-url-parse-format.js) - [parallel/test-utf8-scripts.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-utf8-scripts.js) - [parallel/test-util-callbackify.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-util-callbackify.js) - [parallel/test-util-emit-experimental-warning.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-util-emit-experimental-warning.js) @@ -2496,10 +2498,13 @@ Total: 2924 - [parallel/test-webcrypto-wrap-unwrap.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-webcrypto-wrap-unwrap.js) - [parallel/test-webstream-encoding-inspect.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-webstream-encoding-inspect.js) - [parallel/test-webstream-readablestream-pipeto.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-webstream-readablestream-pipeto.js) +- [parallel/test-whatwg-encoding-custom-fatal-streaming.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-encoding-custom-fatal-streaming.js) - [parallel/test-whatwg-encoding-custom-internals.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-encoding-custom-internals.js) - [parallel/test-whatwg-encoding-custom-interop.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-encoding-custom-interop.js) - [parallel/test-whatwg-encoding-custom-textdecoder-api-invalid-label.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-encoding-custom-textdecoder-api-invalid-label.js) +- [parallel/test-whatwg-encoding-custom-textdecoder-fatal.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-encoding-custom-textdecoder-fatal.js) - [parallel/test-whatwg-encoding-custom-textdecoder-invalid-arg.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-encoding-custom-textdecoder-invalid-arg.js) +- [parallel/test-whatwg-encoding-custom-textdecoder-utf16-surrogates.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-encoding-custom-textdecoder-utf16-surrogates.js) - [parallel/test-whatwg-encoding-custom-textdecoder.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-encoding-custom-textdecoder.js) - [parallel/test-whatwg-events-event-constructors.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-events-event-constructors.js) - [parallel/test-whatwg-events-eventtarget-this-of-listener.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-events-eventtarget-this-of-listener.js) @@ -2507,6 +2512,9 @@ Total: 2924 - [parallel/test-whatwg-readablebytestream.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-readablebytestream.js) - [parallel/test-whatwg-readablestream.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-readablestream.js) - [parallel/test-whatwg-transformstream.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-transformstream.js) +- [parallel/test-whatwg-url-custom-domainto.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-url-custom-domainto.js) +- [parallel/test-whatwg-url-custom-inspect.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-url-custom-inspect.js) +- [parallel/test-whatwg-url-custom-parsing.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-url-custom-parsing.js) - [parallel/test-whatwg-url-custom-properties.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-url-custom-properties.js) - [parallel/test-whatwg-url-custom-searchparams-append.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-url-custom-searchparams-append.js) - [parallel/test-whatwg-url-custom-searchparams-constructor.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-url-custom-searchparams-constructor.js) @@ -2523,7 +2531,9 @@ Total: 2924 - [parallel/test-whatwg-url-custom-searchparams-stringifier.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-url-custom-searchparams-stringifier.js) - [parallel/test-whatwg-url-custom-searchparams-values.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-url-custom-searchparams-values.js) - [parallel/test-whatwg-url-custom-searchparams.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-url-custom-searchparams.js) +- [parallel/test-whatwg-url-custom-setters.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-url-custom-setters.js) - [parallel/test-whatwg-url-invalidthis.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-url-invalidthis.js) +- [parallel/test-whatwg-url-toascii.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-url-toascii.js) - [parallel/test-whatwg-webstreams-adapters-streambase.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-webstreams-adapters-streambase.js) - [parallel/test-whatwg-webstreams-adapters-to-readablestream.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-webstreams-adapters-to-readablestream.js) - [parallel/test-whatwg-webstreams-adapters-to-readablewritablepair.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-whatwg-webstreams-adapters-to-readablewritablepair.js) From cf6f965e2506a24d345bd8b1d8c20390f3f246fa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Sat, 13 May 2023 21:11:40 +0200 Subject: [PATCH 167/320] chore: upgrade rusty_v8 to 0.71.2 (#19116) Closes https://github.com/denoland/deno/issues/19021 --- .github/workflows/ci.generate.ts | 4 +-- .github/workflows/ci.yml | 6 ++-- Cargo.lock | 4 +-- Cargo.toml | 2 +- tools/deno.lock.json | 47 ++++++++++++++++++++++++++++++++ 5 files changed, 55 insertions(+), 8 deletions(-) diff --git a/.github/workflows/ci.generate.ts b/.github/workflows/ci.generate.ts index f5db4f499e..50c7b54b7b 100755 --- a/.github/workflows/ci.generate.ts +++ b/.github/workflows/ci.generate.ts @@ -17,7 +17,7 @@ const Runners = (() => { })(); // bump the number at the start when you want to purge the cache const prCacheKeyPrefix = - "25-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-"; + "26-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-"; const installPkgsCommand = "sudo apt-get install --no-install-recommends debootstrap clang-15 lld-15"; @@ -476,7 +476,7 @@ const ci = { "~/.cargo/git/db", ].join("\n"), key: - "25-cargo-home-${{ matrix.os }}-${{ hashFiles('Cargo.lock') }}", + "26-cargo-home-${{ matrix.os }}-${{ hashFiles('Cargo.lock') }}", }, }, { diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b3b76647f7..c266f8f40a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -290,7 +290,7 @@ jobs: ~/.cargo/registry/index ~/.cargo/registry/cache ~/.cargo/git/db - key: '25-cargo-home-${{ matrix.os }}-${{ hashFiles(''Cargo.lock'') }}' + key: '26-cargo-home-${{ matrix.os }}-${{ hashFiles(''Cargo.lock'') }}' if: '!(github.event_name == ''pull_request'' && matrix.skip_pr)' - name: Restore cache build output (PR) uses: actions/cache/restore@v3 @@ -302,7 +302,7 @@ jobs: !./target/*/*.zip !./target/*/*.tar.gz key: never_saved - restore-keys: '25-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-' + restore-keys: '26-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-' - name: Apply and update mtime cache if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (!startsWith(github.ref, ''refs/tags/''))' uses: ./.github/mtime_cache @@ -583,7 +583,7 @@ jobs: !./target/*/gn_out !./target/*/*.zip !./target/*/*.tar.gz - key: '25-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' + key: '26-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' publish-canary: name: publish canary runs-on: ubuntu-22.04 diff --git a/Cargo.lock b/Cargo.lock index b2ef8559d4..4b46e4b653 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5703,9 +5703,9 @@ dependencies = [ [[package]] name = "v8" -version = "0.71.1" +version = "0.71.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a2ece81e9f3d573376d5301b0d1c1c0ffcb63d57e6164ddf1bc844b4c8a23b" +checksum = "1a4bbfd886a9c2f87170438c0cdb6b1ddbfe80412ab591c83d24c7e48e487313" dependencies = [ "bitflags 1.3.2", "fslock", diff --git a/Cargo.toml b/Cargo.toml index fc31a15262..6f4d15b918 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -41,7 +41,7 @@ license = "MIT" repository = "https://github.com/denoland/deno" [workspace.dependencies] -v8 = { version = "0.71.1", default-features = false } +v8 = { version = "0.71.2", default-features = false } deno_ast = { version = "0.26.0", features = ["transpiling"] } deno_core = { version = "0.186.0", path = "./core" } diff --git a/tools/deno.lock.json b/tools/deno.lock.json index e28ff3c71b..4b5df5c920 100644 --- a/tools/deno.lock.json +++ b/tools/deno.lock.json @@ -108,6 +108,53 @@ "https://deno.land/std@0.108.0/path/win32.ts": "2edb2f71f10578ee1168de01a8cbd3c65483e45a46bc2fa3156a0c6bfbd2720d", "https://deno.land/std@0.108.0/testing/_diff.ts": "ccd6c3af6e44c74bf1591acb1361995f5f50df64323a6e7fb3f16c8ea792c940", "https://deno.land/std@0.108.0/testing/asserts.ts": "6b0d6ba564bdff807bd0f0e93e02c48aa3177acf19416bf84a7f420191ef74cd", + "https://deno.land/std@0.173.0/_util/asserts.ts": "178dfc49a464aee693a7e285567b3d0b555dc805ff490505a8aae34f9cfb1462", + "https://deno.land/std@0.173.0/bytes/bytes_list.ts": "b4cbdfd2c263a13e8a904b12d082f6177ea97d9297274a4be134e989450dfa6a", + "https://deno.land/std@0.173.0/bytes/concat.ts": "d26d6f3d7922e6d663dacfcd357563b7bf4a380ce5b9c2bbe0c8586662f25ce2", + "https://deno.land/std@0.173.0/bytes/copy.ts": "939d89e302a9761dcf1d9c937c7711174ed74c59eef40a1e4569a05c9de88219", + "https://deno.land/std@0.173.0/encoding/_yaml/dumper/dumper.ts": "49053c293a2250b33f2efc0ce3973280c6dc3bc0b41397af3863b5f03340e01b", + "https://deno.land/std@0.173.0/encoding/_yaml/dumper/dumper_state.ts": "975a3702752a29251c5746206507dfebbfede60dd2c0dec161dc22633fbc6085", + "https://deno.land/std@0.173.0/encoding/_yaml/error.ts": "e60ab51d7c0253cf0d1cf7d445202e8e3da5c77aae0032071ba7400121c281b4", + "https://deno.land/std@0.173.0/encoding/_yaml/loader/loader.ts": "6c59f60faaf78d73db0e016293f4bfed19e6356d7064230d07d6b68a65a1df5d", + "https://deno.land/std@0.173.0/encoding/_yaml/loader/loader_state.ts": "fcc82fcdf167acb0e9e5e32b32682e58b45f2d44210bf685794797ccb5621232", + "https://deno.land/std@0.173.0/encoding/_yaml/mark.ts": "0027d6f62a70a6c64b85bd1751ddf1646ea97edcefbf5bea1706d5e519f4e34f", + "https://deno.land/std@0.173.0/encoding/_yaml/parse.ts": "63e79582e07145ca1d3205d1ac72b82bf5ce14159dabae195abe7e36de8111bd", + "https://deno.land/std@0.173.0/encoding/_yaml/schema.ts": "0833c75c59bf72c8a8f96f6c0615bcd98d23fdd9b076657f42b5c1a4f9d972b0", + "https://deno.land/std@0.173.0/encoding/_yaml/schema/core.ts": "366f56673336ba24f5723c04319efcc7471be5f55d5f8d95c9b4a38ec233d4c6", + "https://deno.land/std@0.173.0/encoding/_yaml/schema/default.ts": "96e9ed6ead36f53a0832c542fc9b8cca7f8b4a67c1c8424e1423a39ee7154db7", + "https://deno.land/std@0.173.0/encoding/_yaml/schema/extended.ts": "f9bd75c79ebdfb92a8e167488b6bde7113a31b8fabe20ad7eed0904fba11bcd2", + "https://deno.land/std@0.173.0/encoding/_yaml/schema/failsafe.ts": "cddcbf0258bbe0cd77ca10e2f5aec13439f50d4068f96aab08ca2d64496dabe8", + "https://deno.land/std@0.173.0/encoding/_yaml/schema/json.ts": "c86905dfb1b6c4633750bfbb5bd529a30be5c08287ab7eb6694390b40e276487", + "https://deno.land/std@0.173.0/encoding/_yaml/schema/mod.ts": "051f93dd97a15aaad2da62bd24627e8fd2f02fb026d21567d924b720d606f078", + "https://deno.land/std@0.173.0/encoding/_yaml/state.ts": "ef03d55ec235d48dcfbecc0ab3ade90bfae69a61094846e08003421c2cf5cfc6", + "https://deno.land/std@0.173.0/encoding/_yaml/stringify.ts": "426b73e4dbaeed26ed855add3862786d7e374bd4c59e5e1bd9a6fcd5082be3c7", + "https://deno.land/std@0.173.0/encoding/_yaml/type.ts": "5ded5472a0f17a219ac3b0e90d96dc8472a68654a40258a31e03a6c6297b6788", + "https://deno.land/std@0.173.0/encoding/_yaml/type/binary.ts": "935d39794420ac3718d26716192239de6a53566c6f2ba5010e8ed26936b94a89", + "https://deno.land/std@0.173.0/encoding/_yaml/type/bool.ts": "1c99cfbaa94b022575b636a73e1549569b26fc6bbff2cd5e539aa77b49bdf303", + "https://deno.land/std@0.173.0/encoding/_yaml/type/float.ts": "f60ad19b27050add694bfc255b7efef27103f047861aa657823ff3f6853bad11", + "https://deno.land/std@0.173.0/encoding/_yaml/type/function.ts": "65a37f6bef43ef141854ee48a1058d9c9c4c80ed6eed6cd35608329a6957e27a", + "https://deno.land/std@0.173.0/encoding/_yaml/type/int.ts": "892f59bb7b2dbd64dd9b643c17441af95c0b962ad027e454cb84a68864787b86", + "https://deno.land/std@0.173.0/encoding/_yaml/type/map.ts": "92e647a6aec0dc184ea4b039a77a15883b54da754311189c595b43f6aaa50030", + "https://deno.land/std@0.173.0/encoding/_yaml/type/merge.ts": "8192bf3e4d637f32567917f48bb276043da9cf729cf594e5ec191f7cd229337e", + "https://deno.land/std@0.173.0/encoding/_yaml/type/mod.ts": "060e2b3d38725094b77ea3a3f05fc7e671fced8e67ca18e525be98c4aa8f4bbb", + "https://deno.land/std@0.173.0/encoding/_yaml/type/nil.ts": "606e8f0c44d73117c81abec822f89ef81e40f712258c74f186baa1af659b8887", + "https://deno.land/std@0.173.0/encoding/_yaml/type/omap.ts": "fbd5da9970c211335ff7c8fa11e9c5e9256e568d52418ac237d1538c5cb0d5e6", + "https://deno.land/std@0.173.0/encoding/_yaml/type/pairs.ts": "ea487a44c0ae64d8d952779fa1cb5fa0a12f32a0b5d3d1e8c1f06f446448427c", + "https://deno.land/std@0.173.0/encoding/_yaml/type/regexp.ts": "672000d22a1062d61577d30b218c28f5cb1d039a7a60079fdde6a4e558d5ca51", + "https://deno.land/std@0.173.0/encoding/_yaml/type/seq.ts": "39b28f7c7aa41263c5c42cab9d184f03555e9ba19493766afc0c0c325a9ac49f", + "https://deno.land/std@0.173.0/encoding/_yaml/type/set.ts": "0e30a9f750306b514c8ae9869d1ac2548d57beab55b33e85ea9673ca0a08264c", + "https://deno.land/std@0.173.0/encoding/_yaml/type/str.ts": "a67a3c6e429d95041399e964015511779b1130ea5889fa257c48457bd3446e31", + "https://deno.land/std@0.173.0/encoding/_yaml/type/timestamp.ts": "706ea80a76a73e48efaeb400ace087da1f927647b53ad6f754f4e06d51af087f", + "https://deno.land/std@0.173.0/encoding/_yaml/type/undefined.ts": "94a316ca450597ccbc6750cbd79097ad0d5f3a019797eed3c841a040c29540ba", + "https://deno.land/std@0.173.0/encoding/_yaml/utils.ts": "26b311f0d42a7ce025060bd6320a68b50e52fd24a839581eb31734cd48e20393", + "https://deno.land/std@0.173.0/encoding/yaml.ts": "02571d1bbbcfd7c5647789cee872ecf9c1c470e1b1a40948ed219fb661e19d87", + "https://deno.land/std@0.173.0/io/buf_reader.ts": "90a7adcb3638d8e1361695cdf844d58bcd97c41711dc6f9f8acc0626ebe097f5", + "https://deno.land/std@0.173.0/io/buf_writer.ts": "759c69d304b04d2909976f2a03a24a107276fbd81ed13593c5c2d43d104b52f3", + "https://deno.land/std@0.173.0/io/buffer.ts": "24abd4a65403ca3fdffcb6d3f985b0285adfd785f1311ce681708a21126776ad", + "https://deno.land/std@0.173.0/io/read_delim.ts": "7e102c66f00a118fa1e1ccd4abb080496f43766686907fd8b9522fdf85443586", + "https://deno.land/std@0.173.0/io/read_lines.ts": "baee9e35034f2fdfccf63bc24b7e3cb45aa1c1c5de26d178f7bcbc572e87772f", + "https://deno.land/std@0.173.0/io/read_string_delim.ts": "46eb0c9db3547caf8c759631effa200bbe48924f9b34f41edc627bde36cee52d", + "https://deno.land/std@0.173.0/types.d.ts": "220ed56662a0bd393ba5d124aa6ae2ad36a00d2fcbc0e8666a65f4606aaa9784", "https://deno.land/std@0.179.0/_util/asserts.ts": "178dfc49a464aee693a7e285567b3d0b555dc805ff490505a8aae34f9cfb1462", "https://deno.land/std@0.179.0/_util/os.ts": "d932f56d41e4f6a6093d56044e29ce637f8dcc43c5a90af43504a889cf1775e3", "https://deno.land/std@0.179.0/bytes/copy.ts": "939d89e302a9761dcf1d9c937c7711174ed74c59eef40a1e4569a05c9de88219", From 226a373c497cc3439e032332fefaffa75211afbd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Sat, 13 May 2023 21:42:34 +0200 Subject: [PATCH 168/320] fix(lsp): increase default max heap size to 3Gb (#19115) --- cli/main.rs | 8 +++++++- cli/standalone/mod.rs | 2 +- cli/util/v8.rs | 15 ++++++++++++--- 3 files changed, 20 insertions(+), 5 deletions(-) diff --git a/cli/main.rs b/cli/main.rs index 03e7cf41e7..75425cf105 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -281,7 +281,13 @@ pub fn main() { Err(err) => unwrap_or_exit(Err(AnyError::from(err))), }; - init_v8_flags(&flags.v8_flags, get_v8_flags_from_env()); + let default_v8_flags = match flags.subcommand { + // Using same default as VSCode: + // https://github.com/microsoft/vscode/blob/48d4ba271686e8072fc6674137415bc80d936bc7/extensions/typescript-language-features/src/configuration/configuration.ts#L213-L214 + DenoSubcommand::Lsp => vec!["--max-old-space-size=3072".to_string()], + _ => vec![], + }; + init_v8_flags(&default_v8_flags, &flags.v8_flags, get_v8_flags_from_env()); util::logger::init(flags.log_level); diff --git a/cli/standalone/mod.rs b/cli/standalone/mod.rs index db2743be86..eb5cf5d828 100644 --- a/cli/standalone/mod.rs +++ b/cli/standalone/mod.rs @@ -430,7 +430,7 @@ pub async fn run( }, ); - v8_set_flags(construct_v8_flags(&metadata.v8_flags, vec![])); + v8_set_flags(construct_v8_flags(&[], &metadata.v8_flags, vec![])); let mut worker = worker_factory .create_main_worker(main_module.clone(), permissions) diff --git a/cli/util/v8.rs b/cli/util/v8.rs index 6afaf285e3..93a2ef83f7 100644 --- a/cli/util/v8.rs +++ b/cli/util/v8.rs @@ -10,17 +10,26 @@ pub fn get_v8_flags_from_env() -> Vec { #[inline(always)] pub fn construct_v8_flags( + default_v8_flags: &[String], v8_flags: &[String], env_v8_flags: Vec, ) -> Vec { std::iter::once("UNUSED_BUT_NECESSARY_ARG0".to_owned()) + .chain(default_v8_flags.iter().cloned()) .chain(env_v8_flags.into_iter()) .chain(v8_flags.iter().cloned()) .collect::>() } -pub fn init_v8_flags(v8_flags: &[String], env_v8_flags: Vec) { - if v8_flags.is_empty() && env_v8_flags.is_empty() { +pub fn init_v8_flags( + default_v8_flags: &[String], + v8_flags: &[String], + env_v8_flags: Vec, +) { + if default_v8_flags.is_empty() + && v8_flags.is_empty() + && env_v8_flags.is_empty() + { return; } @@ -29,7 +38,7 @@ pub fn init_v8_flags(v8_flags: &[String], env_v8_flags: Vec) { .chain(v8_flags) .any(|flag| flag == "-help" || flag == "--help"); // Keep in sync with `standalone.rs`. - let v8_flags = construct_v8_flags(v8_flags, env_v8_flags); + let v8_flags = construct_v8_flags(default_v8_flags, v8_flags, env_v8_flags); let unrecognized_v8_flags = deno_core::v8_set_flags(v8_flags) .into_iter() .skip(1) From 6bea6b31d3b502165e7e890c889919d67df1a795 Mon Sep 17 00:00:00 2001 From: Yoshiya Hinosawa Date: Sun, 14 May 2023 20:27:14 +0900 Subject: [PATCH 169/320] fix(dts): move BroadcastChannel type to lib.deno.unstable.d.ts (#19108) --- cli/tests/integration/check_tests.rs | 11 +++++++++++ cli/tests/integration/lsp_tests.rs | 2 +- cli/tests/testdata/check/broadcast_channel.ts | 1 + .../testdata/check/broadcast_channel.ts.error.out | 4 ++++ cli/tsc/dts/lib.deno.shared_globals.d.ts | 1 - cli/tsc/dts/lib.deno.unstable.d.ts | 1 + 6 files changed, 18 insertions(+), 2 deletions(-) create mode 100644 cli/tests/testdata/check/broadcast_channel.ts create mode 100644 cli/tests/testdata/check/broadcast_channel.ts.error.out diff --git a/cli/tests/integration/check_tests.rs b/cli/tests/integration/check_tests.rs index 84ddd53be8..84ed929906 100644 --- a/cli/tests/integration/check_tests.rs +++ b/cli/tests/integration/check_tests.rs @@ -84,6 +84,17 @@ itest!(check_no_error_truncation { exit_code: 1, }); +itest!(check_broadcast_channel_stable { + args: "check --quiet check/broadcast_channel.ts", + output: "check/broadcast_channel.ts.error.out", + exit_code: 1, +}); + +itest!(check_broadcast_channel_unstable { + args: "check --quiet --unstable check/broadcast_channel.ts", + exit_code: 0, +}); + #[test] fn cache_switching_config_then_no_config() { let context = TestContext::default(); diff --git a/cli/tests/integration/lsp_tests.rs b/cli/tests/integration/lsp_tests.rs index 500a27ed2c..656ec9ade7 100644 --- a/cli/tests/integration/lsp_tests.rs +++ b/cli/tests/integration/lsp_tests.rs @@ -4713,7 +4713,7 @@ fn lsp_completions_auto_import() { "source": "./b.ts", "data": { "exportName": "foo", - "exportMapKey": "foo|6845|file:///a/b", + "exportMapKey": "foo|6806|file:///a/b", "moduleSpecifier": "./b.ts", "fileName": "file:///a/b.ts" }, diff --git a/cli/tests/testdata/check/broadcast_channel.ts b/cli/tests/testdata/check/broadcast_channel.ts new file mode 100644 index 0000000000..6c75b4a8e1 --- /dev/null +++ b/cli/tests/testdata/check/broadcast_channel.ts @@ -0,0 +1 @@ +const _channel = new BroadcastChannel("foo"); diff --git a/cli/tests/testdata/check/broadcast_channel.ts.error.out b/cli/tests/testdata/check/broadcast_channel.ts.error.out new file mode 100644 index 0000000000..baa6727e3e --- /dev/null +++ b/cli/tests/testdata/check/broadcast_channel.ts.error.out @@ -0,0 +1,4 @@ +error: TS2304 [ERROR]: Cannot find name 'BroadcastChannel'. +const _channel = new BroadcastChannel("foo"); + ~~~~~~~~~~~~~~~~ + at [WILDCARD] diff --git a/cli/tsc/dts/lib.deno.shared_globals.d.ts b/cli/tsc/dts/lib.deno.shared_globals.d.ts index d0b44f58af..69850f0b2d 100644 --- a/cli/tsc/dts/lib.deno.shared_globals.d.ts +++ b/cli/tsc/dts/lib.deno.shared_globals.d.ts @@ -11,7 +11,6 @@ /// /// /// -/// /** @category WebAssembly */ declare namespace WebAssembly { diff --git a/cli/tsc/dts/lib.deno.unstable.d.ts b/cli/tsc/dts/lib.deno.unstable.d.ts index 8613da2ab5..e70d508200 100644 --- a/cli/tsc/dts/lib.deno.unstable.d.ts +++ b/cli/tsc/dts/lib.deno.unstable.d.ts @@ -2,6 +2,7 @@ /// /// +/// declare namespace Deno { export {}; // stop default export type behavior From b99159bf14d15418a7dbb22e9ce78b15d52971cc Mon Sep 17 00:00:00 2001 From: yzy-1 <50034950+yzy-1@users.noreply.github.com> Date: Sun, 14 May 2023 19:55:26 +0000 Subject: [PATCH 170/320] fix(runtime): Example hello_runtime panic (#19125) After commit f34fcd, running example will panic because esm_entry_point is not set. Closes https://github.com/denoland/deno/issues/19127 --- runtime/examples/hello_runtime.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/runtime/examples/hello_runtime.rs b/runtime/examples/hello_runtime.rs index 2bc371b680..d4e39dd2db 100644 --- a/runtime/examples/hello_runtime.rs +++ b/runtime/examples/hello_runtime.rs @@ -9,7 +9,11 @@ use deno_runtime::worker::WorkerOptions; use std::path::Path; use std::rc::Rc; -deno_core::extension!(hello_runtime, esm = ["hello_runtime_bootstrap.js"]); +deno_core::extension!( + hello_runtime, + esm_entry_point = "ext:hello_runtime/hello_runtime_bootstrap.js", + esm = ["hello_runtime_bootstrap.js"] +); #[tokio::main] async fn main() -> Result<(), AnyError> { From 9845361153f35f6a68a82eb3a13845fddbeab026 Mon Sep 17 00:00:00 2001 From: Matt Mastracci Date: Sun, 14 May 2023 15:40:01 -0600 Subject: [PATCH 171/320] refactor(core): bake single-thread assumptions into spawn/spawn_blocking (#19056) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Partially supersedes #19016. This migrates `spawn` and `spawn_blocking` to `deno_core`, and removes the requirement for `spawn` tasks to be `Send` given our single-threaded executor. While we don't need to technically do anything w/`spawn_blocking`, this allows us to have a single `JoinHandle` type that works for both cases, and allows us to more easily experiment with alternative `spawn_blocking` implementations that do not require tokio (ie: rayon). Async ops (+~35%): Before: ``` time 1310 ms rate 763358 time 1267 ms rate 789265 time 1259 ms rate 794281 time 1266 ms rate 789889 ``` After: ``` time 956 ms rate 1046025 time 954 ms rate 1048218 time 924 ms rate 1082251 time 920 ms rate 1086956 ``` HTTP serve (+~4.4%): Before: ``` Running 10s test @ http://localhost:4500 2 threads and 10 connections Thread Stats Avg Stdev Max +/- Stdev Latency 68.78us 19.77us 1.43ms 86.84% Req/Sec 68.78k 5.00k 73.84k 91.58% 1381833 requests in 10.10s, 167.36MB read Requests/sec: 136823.29 Transfer/sec: 16.57MB ``` After: ``` Running 10s test @ http://localhost:4500 2 threads and 10 connections Thread Stats Avg Stdev Max +/- Stdev Latency 63.12us 17.43us 1.11ms 85.13% Req/Sec 71.82k 3.71k 77.02k 79.21% 1443195 requests in 10.10s, 174.79MB read Requests/sec: 142921.99 Transfer/sec: 17.31MB ``` Suggested-By: alice@ryhl.io Co-authored-by: Bartek Iwańczuk --- cli/cache/cache_db.rs | 5 +- cli/cache/incremental.rs | 5 +- cli/lsp/client.rs | 7 +- cli/lsp/diagnostics.rs | 14 +- cli/lsp/language_server.rs | 3 +- cli/lsp/parent_process_checker.rs | 3 +- cli/lsp/testing/execution.rs | 14 +- cli/main.rs | 4 +- cli/npm/resolvers/common.rs | 3 +- cli/npm/resolvers/local.rs | 5 +- cli/tests/integration/cert_tests.rs | 173 +++++++++++------------ cli/tests/integration/inspector_tests.rs | 2 +- cli/tests/integration/run_tests.rs | 75 +++++----- cli/tools/bench.rs | 14 +- cli/tools/fmt.rs | 3 +- cli/tools/repl/mod.rs | 3 +- cli/tools/task.rs | 14 +- cli/tools/test.rs | 24 ++-- cli/tools/upgrade.rs | 3 +- cli/util/draw_thread.rs | 3 +- cli/util/fs.rs | 3 +- core/Cargo.toml | 2 +- core/lib.rs | 1 + core/task.rs | 131 +++++++++++++++++ core/task_queue.rs | 2 +- ext/cache/sqlite.rs | 13 +- ext/crypto/decrypt.rs | 3 +- ext/crypto/encrypt.rs | 3 +- ext/crypto/generate_key.rs | 3 +- ext/crypto/lib.rs | 3 +- ext/ffi/call.rs | 5 +- ext/fs/std_fs.rs | 43 +++--- ext/http/http_next.rs | 19 ++- ext/http/lib.rs | 8 +- ext/io/lib.rs | 5 +- ext/net/ops_tls.rs | 6 +- ext/node/ops/crypto/mod.rs | 48 +++---- ext/websocket/lib.rs | 2 +- runtime/inspector_server.rs | 5 +- runtime/tokio_util.rs | 13 +- runtime/web_worker.rs | 4 +- tools/wpt/expectation.json | 4 +- 42 files changed, 415 insertions(+), 288 deletions(-) create mode 100644 core/task.rs diff --git a/cli/cache/cache_db.rs b/cli/cache/cache_db.rs index 90840de1a0..e05ecd962b 100644 --- a/cli/cache/cache_db.rs +++ b/cli/cache/cache_db.rs @@ -3,6 +3,7 @@ use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; use deno_core::parking_lot::MutexGuard; +use deno_core::task::spawn_blocking; use deno_runtime::deno_webstorage::rusqlite; use deno_runtime::deno_webstorage::rusqlite::Connection; use deno_runtime::deno_webstorage::rusqlite::OptionalExtension; @@ -95,7 +96,7 @@ impl Drop for CacheDB { // Hand off SQLite connection to another thread to do the surprisingly expensive cleanup let inner = inner.into_inner().into_inner(); if let Some(conn) = inner { - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { drop(conn); log::trace!( "Cleaned up SQLite connection at {}", @@ -168,7 +169,7 @@ impl CacheDB { fn spawn_eager_init_thread(&self) { let clone = self.clone(); debug_assert!(tokio::runtime::Handle::try_current().is_ok()); - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { let lock = clone.conn.lock(); clone.initialize(&lock); }); diff --git a/cli/cache/incremental.rs b/cli/cache/incremental.rs index deb30cdd18..c50b876fa9 100644 --- a/cli/cache/incremental.rs +++ b/cli/cache/incremental.rs @@ -7,9 +7,10 @@ use std::path::PathBuf; use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; use deno_core::serde_json; +use deno_core::task::spawn; +use deno_core::task::JoinHandle; use deno_runtime::deno_webstorage::rusqlite::params; use serde::Serialize; -use tokio::task::JoinHandle; use super::cache_db::CacheDB; use super::cache_db::CacheDBConfiguration; @@ -93,7 +94,7 @@ impl IncrementalCacheInner { tokio::sync::mpsc::unbounded_channel::(); // sqlite isn't `Sync`, so we do all the updating on a dedicated task - let handle = tokio::task::spawn(async move { + let handle = spawn(async move { while let Some(message) = receiver.recv().await { match message { ReceiverMessage::Update(path, hash) => { diff --git a/cli/lsp/client.rs b/cli/lsp/client.rs index d24d4c2a9e..4923a4585e 100644 --- a/cli/lsp/client.rs +++ b/cli/lsp/client.rs @@ -8,6 +8,7 @@ use deno_core::anyhow::bail; use deno_core::error::AnyError; use deno_core::serde_json; use deno_core::serde_json::Value; +use deno_core::task::spawn; use tower_lsp::lsp_types as lsp; use tower_lsp::lsp_types::ConfigurationItem; @@ -56,7 +57,7 @@ impl Client { ) { // do on a task in case the caller currently is in the lsp lock let client = self.0.clone(); - tokio::task::spawn(async move { + spawn(async move { client.send_registry_state_notification(params).await; }); } @@ -64,7 +65,7 @@ impl Client { pub fn send_test_notification(&self, params: TestingNotification) { // do on a task in case the caller currently is in the lsp lock let client = self.0.clone(); - tokio::task::spawn(async move { + spawn(async move { client.send_test_notification(params).await; }); } @@ -77,7 +78,7 @@ impl Client { // do on a task in case the caller currently is in the lsp lock let client = self.0.clone(); let message = message.to_string(); - tokio::task::spawn(async move { + spawn(async move { client.show_message(message_type, message).await; }); } diff --git a/cli/lsp/diagnostics.rs b/cli/lsp/diagnostics.rs index 0f96a498bd..7b5a30a0ea 100644 --- a/cli/lsp/diagnostics.rs +++ b/cli/lsp/diagnostics.rs @@ -25,6 +25,8 @@ use deno_core::resolve_url; use deno_core::serde::Deserialize; use deno_core::serde_json; use deno_core::serde_json::json; +use deno_core::task::spawn; +use deno_core::task::JoinHandle; use deno_core::ModuleSpecifier; use deno_graph::Resolution; use deno_graph::ResolutionError; @@ -197,9 +199,9 @@ impl DiagnosticsServer { runtime.block_on(async { let mut token = CancellationToken::new(); - let mut ts_handle: Option> = None; - let mut lint_handle: Option> = None; - let mut deps_handle: Option> = None; + let mut ts_handle: Option> = None; + let mut lint_handle: Option> = None; + let mut deps_handle: Option> = None; let diagnostics_publisher = DiagnosticsPublisher::new(client.clone()); loop { @@ -213,7 +215,7 @@ impl DiagnosticsServer { diagnostics_publisher.clear().await; let previous_ts_handle = ts_handle.take(); - ts_handle = Some(tokio::spawn({ + ts_handle = Some(spawn({ let performance = performance.clone(); let diagnostics_publisher = diagnostics_publisher.clone(); let ts_server = ts_server.clone(); @@ -265,7 +267,7 @@ impl DiagnosticsServer { })); let previous_deps_handle = deps_handle.take(); - deps_handle = Some(tokio::spawn({ + deps_handle = Some(spawn({ let performance = performance.clone(); let diagnostics_publisher = diagnostics_publisher.clone(); let token = token.clone(); @@ -293,7 +295,7 @@ impl DiagnosticsServer { })); let previous_lint_handle = lint_handle.take(); - lint_handle = Some(tokio::spawn({ + lint_handle = Some(spawn({ let performance = performance.clone(); let diagnostics_publisher = diagnostics_publisher.clone(); let token = token.clone(); diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index de5cd6f09c..e76ea0040a 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -8,6 +8,7 @@ use deno_core::resolve_url; use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::serde_json::Value; +use deno_core::task::spawn; use deno_core::ModuleSpecifier; use deno_runtime::deno_fs; use deno_runtime::deno_node::NodeResolver; @@ -240,7 +241,7 @@ impl LanguageServer { let cli_options = result.cli_options; let roots = result.roots; let open_docs = result.open_docs; - let handle = tokio::task::spawn_local(async move { + let handle = spawn(async move { create_graph_for_caching(cli_options, roots, open_docs).await }); if let Err(err) = handle.await.unwrap() { diff --git a/cli/lsp/parent_process_checker.rs b/cli/lsp/parent_process_checker.rs index 4cc3bcef3b..f83543c04c 100644 --- a/cli/lsp/parent_process_checker.rs +++ b/cli/lsp/parent_process_checker.rs @@ -1,5 +1,6 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use deno_core::task::spawn; use tokio::time::sleep; use tokio::time::Duration; @@ -7,7 +8,7 @@ use tokio::time::Duration; /// provided process id. Once that process no longer exists /// it will terminate the current process. pub fn start(parent_process_id: u32) { - tokio::task::spawn(async move { + spawn(async move { loop { sleep(Duration::from_secs(30)).await; diff --git a/cli/lsp/testing/execution.rs b/cli/lsp/testing/execution.rs index 4834cd0c9c..ce8c8b5acc 100644 --- a/cli/lsp/testing/execution.rs +++ b/cli/lsp/testing/execution.rs @@ -24,9 +24,11 @@ use deno_core::futures::stream; use deno_core::futures::StreamExt; use deno_core::parking_lot::Mutex; use deno_core::parking_lot::RwLock; +use deno_core::task::spawn; +use deno_core::task::spawn_blocking; use deno_core::ModuleSpecifier; use deno_runtime::permissions::Permissions; -use deno_runtime::tokio_util::run_local; +use deno_runtime::tokio_util::create_and_run_current_thread; use indexmap::IndexMap; use std::collections::HashMap; use std::collections::HashSet; @@ -284,7 +286,7 @@ impl TestRun { }; let token = self.token.clone(); - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { if fail_fast_tracker.should_stop() { return Ok(()); } @@ -292,13 +294,13 @@ impl TestRun { let file_result = if token.is_cancelled() { Ok(()) } else { - run_local(test::test_specifier( - &worker_factory, + create_and_run_current_thread(test::test_specifier( + worker_factory, permissions, specifier, sender.clone(), fail_fast_tracker, - &test::TestSpecifierOptions { + test::TestSpecifierOptions { filter, shuffle: None, trace_ops: false, @@ -331,7 +333,7 @@ impl TestRun { )); let handler = { - tokio::task::spawn(async move { + spawn(async move { let earlier = Instant::now(); let mut summary = test::TestSummary::new(); let mut used_only = false; diff --git a/cli/main.rs b/cli/main.rs index 75425cf105..023d5a2084 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -46,7 +46,7 @@ use deno_core::error::AnyError; use deno_core::error::JsError; use deno_runtime::colors; use deno_runtime::fmt_errors::format_js_error; -use deno_runtime::tokio_util::run_local; +use deno_runtime::tokio_util::create_and_run_current_thread; use factory::CliFactory; use std::env; use std::env::current_exe; @@ -294,7 +294,7 @@ pub fn main() { run_subcommand(flags).await }; - let exit_code = unwrap_or_exit(run_local(future)); + let exit_code = unwrap_or_exit(create_and_run_current_thread(future)); std::process::exit(exit_code); } diff --git a/cli/npm/resolvers/common.rs b/cli/npm/resolvers/common.rs index fc040a7ccb..c91b206cf1 100644 --- a/cli/npm/resolvers/common.rs +++ b/cli/npm/resolvers/common.rs @@ -9,6 +9,7 @@ use async_trait::async_trait; use deno_ast::ModuleSpecifier; use deno_core::error::AnyError; use deno_core::futures; +use deno_core::task::spawn; use deno_core::url::Url; use deno_npm::NpmPackageId; use deno_npm::NpmResolutionPackage; @@ -71,7 +72,7 @@ pub async fn cache_packages( assert_eq!(package.copy_index, 0); // the caller should not provide any of these let cache = cache.clone(); let registry_url = registry_url.clone(); - let handle = tokio::task::spawn(async move { + let handle = spawn(async move { cache .ensure_package(&package.pkg_id.nv, &package.dist, ®istry_url) .await diff --git a/cli/npm/resolvers/local.rs b/cli/npm/resolvers/local.rs index cd1dc36715..b2ad083576 100644 --- a/cli/npm/resolvers/local.rs +++ b/cli/npm/resolvers/local.rs @@ -18,6 +18,8 @@ use deno_ast::ModuleSpecifier; use deno_core::anyhow::bail; use deno_core::anyhow::Context; use deno_core::error::AnyError; +use deno_core::task::spawn; +use deno_core::task::JoinHandle; use deno_core::url::Url; use deno_npm::resolution::NpmResolutionSnapshot; use deno_npm::NpmPackageCacheFolderId; @@ -27,7 +29,6 @@ use deno_runtime::deno_fs; use deno_runtime::deno_node::NodePermissions; use deno_runtime::deno_node::NodeResolutionMode; use deno_runtime::deno_node::PackageJson; -use tokio::task::JoinHandle; use crate::npm::cache::mixed_case_package_name_encode; use crate::npm::cache::should_sync_download; @@ -277,7 +278,7 @@ async fn sync_resolution_with_fs( let cache = cache.clone(); let registry_url = registry_url.clone(); let package = package.clone(); - let handle = tokio::task::spawn(async move { + let handle = spawn(async move { cache .ensure_package(&package.pkg_id.nv, &package.dist, ®istry_url) .await?; diff --git a/cli/tests/integration/cert_tests.rs b/cli/tests/integration/cert_tests.rs index d3da6d75af..b04f2d35e8 100644 --- a/cli/tests/integration/cert_tests.rs +++ b/cli/tests/integration/cert_tests.rs @@ -11,7 +11,6 @@ use std::process::Command; use std::sync::Arc; use test_util as util; use test_util::TempDir; -use tokio::task::LocalSet; use util::TestContext; itest_flaky!(cafile_url_imports { @@ -219,113 +218,99 @@ fn cafile_bundle_remote_exports() { #[tokio::test] async fn listen_tls_alpn() { - // TLS streams require the presence of an ambient local task set to gracefully - // close dropped connections in the background. - LocalSet::new() - .run_until(async { - let mut child = util::deno_cmd() - .current_dir(util::testdata_path()) - .arg("run") - .arg("--unstable") - .arg("--quiet") - .arg("--allow-net") - .arg("--allow-read") - .arg("./cert/listen_tls_alpn.ts") - .arg("4504") - .stdout(std::process::Stdio::piped()) - .spawn() - .unwrap(); - let stdout = child.stdout.as_mut().unwrap(); - let mut msg = [0; 5]; - let read = stdout.read(&mut msg).unwrap(); - assert_eq!(read, 5); - assert_eq!(&msg, b"READY"); + let mut child = util::deno_cmd() + .current_dir(util::testdata_path()) + .arg("run") + .arg("--unstable") + .arg("--quiet") + .arg("--allow-net") + .arg("--allow-read") + .arg("./cert/listen_tls_alpn.ts") + .arg("4504") + .stdout(std::process::Stdio::piped()) + .spawn() + .unwrap(); + let stdout = child.stdout.as_mut().unwrap(); + let mut msg = [0; 5]; + let read = stdout.read(&mut msg).unwrap(); + assert_eq!(read, 5); + assert_eq!(&msg, b"READY"); - let mut reader = &mut BufReader::new(Cursor::new(include_bytes!( - "../testdata/tls/RootCA.crt" - ))); - let certs = rustls_pemfile::certs(&mut reader).unwrap(); - let mut root_store = rustls::RootCertStore::empty(); - root_store.add_parsable_certificates(&certs); - let mut cfg = rustls::ClientConfig::builder() - .with_safe_defaults() - .with_root_certificates(root_store) - .with_no_client_auth(); - cfg.alpn_protocols.push(b"foobar".to_vec()); - let cfg = Arc::new(cfg); + let mut reader = &mut BufReader::new(Cursor::new(include_bytes!( + "../testdata/tls/RootCA.crt" + ))); + let certs = rustls_pemfile::certs(&mut reader).unwrap(); + let mut root_store = rustls::RootCertStore::empty(); + root_store.add_parsable_certificates(&certs); + let mut cfg = rustls::ClientConfig::builder() + .with_safe_defaults() + .with_root_certificates(root_store) + .with_no_client_auth(); + cfg.alpn_protocols.push(b"foobar".to_vec()); + let cfg = Arc::new(cfg); - let hostname = rustls::ServerName::try_from("localhost").unwrap(); + let hostname = rustls::ServerName::try_from("localhost").unwrap(); - let tcp_stream = tokio::net::TcpStream::connect("localhost:4504") - .await - .unwrap(); - let mut tls_stream = - TlsStream::new_client_side(tcp_stream, cfg, hostname); + let tcp_stream = tokio::net::TcpStream::connect("localhost:4504") + .await + .unwrap(); + let mut tls_stream = TlsStream::new_client_side(tcp_stream, cfg, hostname); - tls_stream.handshake().await.unwrap(); + tls_stream.handshake().await.unwrap(); - let (_, rustls_connection) = tls_stream.get_ref(); - let alpn = rustls_connection.alpn_protocol().unwrap(); - assert_eq!(alpn, b"foobar"); + let (_, rustls_connection) = tls_stream.get_ref(); + let alpn = rustls_connection.alpn_protocol().unwrap(); + assert_eq!(alpn, b"foobar"); - let status = child.wait().unwrap(); - assert!(status.success()); - }) - .await; + let status = child.wait().unwrap(); + assert!(status.success()); } #[tokio::test] async fn listen_tls_alpn_fail() { - // TLS streams require the presence of an ambient local task set to gracefully - // close dropped connections in the background. - LocalSet::new() - .run_until(async { - let mut child = util::deno_cmd() - .current_dir(util::testdata_path()) - .arg("run") - .arg("--unstable") - .arg("--quiet") - .arg("--allow-net") - .arg("--allow-read") - .arg("./cert/listen_tls_alpn_fail.ts") - .arg("4505") - .stdout(std::process::Stdio::piped()) - .spawn() - .unwrap(); - let stdout = child.stdout.as_mut().unwrap(); - let mut msg = [0; 5]; - let read = stdout.read(&mut msg).unwrap(); - assert_eq!(read, 5); - assert_eq!(&msg, b"READY"); + let mut child = util::deno_cmd() + .current_dir(util::testdata_path()) + .arg("run") + .arg("--unstable") + .arg("--quiet") + .arg("--allow-net") + .arg("--allow-read") + .arg("./cert/listen_tls_alpn_fail.ts") + .arg("4505") + .stdout(std::process::Stdio::piped()) + .spawn() + .unwrap(); + let stdout = child.stdout.as_mut().unwrap(); + let mut msg = [0; 5]; + let read = stdout.read(&mut msg).unwrap(); + assert_eq!(read, 5); + assert_eq!(&msg, b"READY"); - let mut reader = &mut BufReader::new(Cursor::new(include_bytes!( - "../testdata/tls/RootCA.crt" - ))); - let certs = rustls_pemfile::certs(&mut reader).unwrap(); - let mut root_store = rustls::RootCertStore::empty(); - root_store.add_parsable_certificates(&certs); - let mut cfg = rustls::ClientConfig::builder() - .with_safe_defaults() - .with_root_certificates(root_store) - .with_no_client_auth(); - cfg.alpn_protocols.push(b"boofar".to_vec()); - let cfg = Arc::new(cfg); + let mut reader = &mut BufReader::new(Cursor::new(include_bytes!( + "../testdata/tls/RootCA.crt" + ))); + let certs = rustls_pemfile::certs(&mut reader).unwrap(); + let mut root_store = rustls::RootCertStore::empty(); + root_store.add_parsable_certificates(&certs); + let mut cfg = rustls::ClientConfig::builder() + .with_safe_defaults() + .with_root_certificates(root_store) + .with_no_client_auth(); + cfg.alpn_protocols.push(b"boofar".to_vec()); + let cfg = Arc::new(cfg); - let hostname = rustls::ServerName::try_from("localhost").unwrap(); + let hostname = rustls::ServerName::try_from("localhost").unwrap(); - let tcp_stream = tokio::net::TcpStream::connect("localhost:4505") - .await - .unwrap(); - let mut tls_stream = - TlsStream::new_client_side(tcp_stream, cfg, hostname); + let tcp_stream = tokio::net::TcpStream::connect("localhost:4505") + .await + .unwrap(); + let mut tls_stream = TlsStream::new_client_side(tcp_stream, cfg, hostname); - tls_stream.handshake().await.unwrap_err(); + tls_stream.handshake().await.unwrap_err(); - let (_, rustls_connection) = tls_stream.get_ref(); - assert!(rustls_connection.alpn_protocol().is_none()); + let (_, rustls_connection) = tls_stream.get_ref(); + assert!(rustls_connection.alpn_protocol().is_none()); - let status = child.wait().unwrap(); - assert!(status.success()); - }) - .await; + let status = child.wait().unwrap(); + assert!(status.success()); } diff --git a/cli/tests/integration/inspector_tests.rs b/cli/tests/integration/inspector_tests.rs index cf66c4adc1..8fa9ec85c0 100644 --- a/cli/tests/integration/inspector_tests.rs +++ b/cli/tests/integration/inspector_tests.rs @@ -29,7 +29,7 @@ where Fut::Output: Send + 'static, { fn execute(&self, fut: Fut) { - tokio::task::spawn(fut); + deno_core::task::spawn(fut); } } diff --git a/cli/tests/integration/run_tests.rs b/cli/tests/integration/run_tests.rs index e6ea85da45..bc717351a0 100644 --- a/cli/tests/integration/run_tests.rs +++ b/cli/tests/integration/run_tests.rs @@ -9,7 +9,6 @@ use std::process::Stdio; use std::time::Duration; use test_util as util; use test_util::TempDir; -use tokio::task::LocalSet; use trust_dns_client::serialize::txt::Lexer; use trust_dns_client::serialize::txt::Parser; use util::assert_contains; @@ -3886,50 +3885,44 @@ async fn test_resolve_dns() { #[tokio::test] async fn http2_request_url() { - // TLS streams require the presence of an ambient local task set to gracefully - // close dropped connections in the background. - LocalSet::new() - .run_until(async { - let mut child = util::deno_cmd() - .current_dir(util::testdata_path()) - .arg("run") - .arg("--unstable") - .arg("--quiet") - .arg("--allow-net") - .arg("--allow-read") - .arg("./run/http2_request_url.ts") - .arg("4506") - .stdout(std::process::Stdio::piped()) - .spawn() - .unwrap(); - let stdout = child.stdout.as_mut().unwrap(); - let mut buffer = [0; 5]; - let read = stdout.read(&mut buffer).unwrap(); - assert_eq!(read, 5); - let msg = std::str::from_utf8(&buffer).unwrap(); - assert_eq!(msg, "READY"); + let mut child = util::deno_cmd() + .current_dir(util::testdata_path()) + .arg("run") + .arg("--unstable") + .arg("--quiet") + .arg("--allow-net") + .arg("--allow-read") + .arg("./run/http2_request_url.ts") + .arg("4506") + .stdout(std::process::Stdio::piped()) + .spawn() + .unwrap(); + let stdout = child.stdout.as_mut().unwrap(); + let mut buffer = [0; 5]; + let read = stdout.read(&mut buffer).unwrap(); + assert_eq!(read, 5); + let msg = std::str::from_utf8(&buffer).unwrap(); + assert_eq!(msg, "READY"); - let cert = reqwest::Certificate::from_pem(include_bytes!( - "../testdata/tls/RootCA.crt" - )) - .unwrap(); + let cert = reqwest::Certificate::from_pem(include_bytes!( + "../testdata/tls/RootCA.crt" + )) + .unwrap(); - let client = reqwest::Client::builder() - .add_root_certificate(cert) - .http2_prior_knowledge() - .build() - .unwrap(); + let client = reqwest::Client::builder() + .add_root_certificate(cert) + .http2_prior_knowledge() + .build() + .unwrap(); - let res = client.get("http://127.0.0.1:4506").send().await.unwrap(); - assert_eq!(200, res.status()); + let res = client.get("http://127.0.0.1:4506").send().await.unwrap(); + assert_eq!(200, res.status()); - let body = res.text().await.unwrap(); - assert_eq!(body, "http://127.0.0.1:4506/"); + let body = res.text().await.unwrap(); + assert_eq!(body, "http://127.0.0.1:4506/"); - child.kill().unwrap(); - child.wait().unwrap(); - }) - .await; + child.kill().unwrap(); + child.wait().unwrap(); } #[cfg(not(windows))] @@ -4173,7 +4166,7 @@ where Fut::Output: Send + 'static, { fn execute(&self, fut: Fut) { - tokio::task::spawn(fut); + deno_core::task::spawn(fut); } } diff --git a/cli/tools/bench.rs b/cli/tools/bench.rs index 3d5f99aba4..107fd2b9b0 100644 --- a/cli/tools/bench.rs +++ b/cli/tools/bench.rs @@ -27,11 +27,13 @@ use deno_core::futures::FutureExt; use deno_core::futures::StreamExt; use deno_core::located_script_name; use deno_core::serde_v8; +use deno_core::task::spawn; +use deno_core::task::spawn_blocking; use deno_core::v8; use deno_core::ModuleSpecifier; use deno_runtime::permissions::Permissions; use deno_runtime::permissions::PermissionsContainer; -use deno_runtime::tokio_util::run_local; +use deno_runtime::tokio_util::create_and_run_current_thread; use indexmap::IndexMap; use indexmap::IndexSet; use log::Level; @@ -436,7 +438,7 @@ async fn check_specifiers( /// Run a single specifier as an executable bench module. async fn bench_specifier( - worker_factory: &CliMainWorkerFactory, + worker_factory: Arc, permissions: Permissions, specifier: ModuleSpecifier, sender: UnboundedSender, @@ -522,15 +524,15 @@ async fn bench_specifiers( let specifier = specifier; let sender = sender.clone(); let options = option_for_handles.clone(); - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { let future = bench_specifier( - &worker_factory, + worker_factory, permissions, specifier, sender, options.filter, ); - run_local(future) + create_and_run_current_thread(future) }) }); @@ -539,7 +541,7 @@ async fn bench_specifiers( .collect::, tokio::task::JoinError>>>(); let handler = { - tokio::task::spawn(async move { + spawn(async move { let mut used_only = false; let mut report = BenchReport::new(); let mut reporter = diff --git a/cli/tools/fmt.rs b/cli/tools/fmt.rs index 70d2bd6395..f2fec93023 100644 --- a/cli/tools/fmt.rs +++ b/cli/tools/fmt.rs @@ -28,6 +28,7 @@ use deno_core::error::generic_error; use deno_core::error::AnyError; use deno_core::futures; use deno_core::parking_lot::Mutex; +use deno_core::task::spawn_blocking; use log::debug; use log::info; use log::warn; @@ -629,7 +630,7 @@ where let handles = file_paths.iter().map(|file_path| { let f = f.clone(); let file_path = file_path.clone(); - tokio::task::spawn_blocking(move || f(file_path)) + spawn_blocking(move || f(file_path)) }); let join_results = futures::future::join_all(handles).await; diff --git a/cli/tools/repl/mod.rs b/cli/tools/repl/mod.rs index 9f4b589196..dfd9931b8d 100644 --- a/cli/tools/repl/mod.rs +++ b/cli/tools/repl/mod.rs @@ -8,6 +8,7 @@ use crate::factory::CliFactory; use crate::file_fetcher::FileFetcher; use deno_core::error::AnyError; use deno_core::futures::StreamExt; +use deno_core::task::spawn_blocking; use deno_runtime::permissions::Permissions; use deno_runtime::permissions::PermissionsContainer; use rustyline::error::ReadlineError; @@ -32,7 +33,7 @@ async fn read_line_and_poll( editor: ReplEditor, ) -> Result { #![allow(clippy::await_holding_refcell_ref)] - let mut line_fut = tokio::task::spawn_blocking(move || editor.readline()); + let mut line_fut = spawn_blocking(move || editor.readline()); let mut poll_worker = true; let notifications_rc = repl_session.notifications.clone(); let mut notifications = notifications_rc.borrow_mut(); diff --git a/cli/tools/task.rs b/cli/tools/task.rs index bf972e2db8..37a1aa1c97 100644 --- a/cli/tools/task.rs +++ b/cli/tools/task.rs @@ -21,6 +21,7 @@ use indexmap::IndexMap; use std::collections::HashMap; use std::path::PathBuf; use std::rc::Rc; +use tokio::task::LocalSet; pub async fn execute_script( flags: Flags, @@ -59,9 +60,10 @@ pub async fn execute_script( let seq_list = deno_task_shell::parser::parse(&script) .with_context(|| format!("Error parsing script '{task_name}'."))?; let env_vars = collect_env_vars(); - let exit_code = - deno_task_shell::execute(seq_list, env_vars, &cwd, Default::default()) - .await; + let local = LocalSet::new(); + let future = + deno_task_shell::execute(seq_list, env_vars, &cwd, Default::default()); + let exit_code = local.run_until(future).await; Ok(exit_code) } else if let Some(script) = package_json_scripts.get(task_name) { let package_json_deps_provider = factory.package_json_deps_provider(); @@ -109,8 +111,10 @@ pub async fn execute_script( .with_context(|| format!("Error parsing script '{task_name}'."))?; let npx_commands = resolve_npm_commands(npm_resolver, node_resolver)?; let env_vars = collect_env_vars(); - let exit_code = - deno_task_shell::execute(seq_list, env_vars, &cwd, npx_commands).await; + let local = LocalSet::new(); + let future = + deno_task_shell::execute(seq_list, env_vars, &cwd, npx_commands); + let exit_code = local.run_until(future).await; Ok(exit_code) } else { eprintln!("Task not found: {task_name}"); diff --git a/cli/tools/test.rs b/cli/tools/test.rs index 50e220a466..f78e325394 100644 --- a/cli/tools/test.rs +++ b/cli/tools/test.rs @@ -34,6 +34,8 @@ use deno_core::futures::StreamExt; use deno_core::located_script_name; use deno_core::parking_lot::Mutex; use deno_core::serde_v8; +use deno_core::task::spawn; +use deno_core::task::spawn_blocking; use deno_core::url::Url; use deno_core::v8; use deno_core::ModuleSpecifier; @@ -42,7 +44,7 @@ use deno_runtime::deno_io::StdioPipe; use deno_runtime::fmt_errors::format_js_error; use deno_runtime::permissions::Permissions; use deno_runtime::permissions::PermissionsContainer; -use deno_runtime::tokio_util::run_local; +use deno_runtime::tokio_util::create_and_run_current_thread; use indexmap::IndexMap; use indexmap::IndexSet; use log::Level; @@ -916,12 +918,12 @@ pub fn format_test_error(js_error: &JsError) -> String { /// Test a single specifier as documentation containing test programs, an executable test module or /// both. pub async fn test_specifier( - worker_factory: &CliMainWorkerFactory, + worker_factory: Arc, permissions: Permissions, specifier: ModuleSpecifier, mut sender: TestEventSender, fail_fast_tracker: FailFastTracker, - options: &TestSpecifierOptions, + options: TestSpecifierOptions, ) -> Result<(), AnyError> { if fail_fast_tracker.should_stop() { return Ok(()); @@ -1316,7 +1318,7 @@ async fn test_specifiers( let concurrent_jobs = options.concurrent_jobs; let sender_ = sender.downgrade(); - let sigint_handler_handle = tokio::task::spawn(async move { + let sigint_handler_handle = spawn(async move { signal::ctrl_c().await.unwrap(); sender_.upgrade().map(|s| s.send(TestEvent::Sigint).ok()); }); @@ -1328,14 +1330,14 @@ async fn test_specifiers( let sender = sender.clone(); let fail_fast_tracker = FailFastTracker::new(options.fail_fast); let specifier_options = options.specifier.clone(); - tokio::task::spawn_blocking(move || { - run_local(test_specifier( - &worker_factory, + spawn_blocking(move || { + create_and_run_current_thread(test_specifier( + worker_factory, permissions, specifier, sender.clone(), fail_fast_tracker, - &specifier_options, + specifier_options, )) }) }); @@ -1350,7 +1352,7 @@ async fn test_specifiers( )); let handler = { - tokio::task::spawn(async move { + spawn(async move { let earlier = Instant::now(); let mut tests = IndexMap::new(); let mut test_steps = IndexMap::new(); @@ -1887,7 +1889,7 @@ pub async fn run_tests_with_watch( // run, a process-scoped basic exit handler is required due to a tokio // limitation where it doesn't unbind its own handler for the entire process // once a user adds one. - tokio::task::spawn(async move { + spawn(async move { loop { signal::ctrl_c().await.unwrap(); if !HAS_TEST_RUN_SIGINT_HANDLER.load(Ordering::Relaxed) { @@ -2070,7 +2072,7 @@ fn start_output_redirect_thread( sender: UnboundedSender, flush_state: Arc>>>, ) { - tokio::task::spawn_blocking(move || loop { + spawn_blocking(move || loop { let mut buffer = [0; 512]; let size = match pipe_reader.read(&mut buffer) { Ok(0) | Err(_) => break, diff --git a/cli/tools/upgrade.rs b/cli/tools/upgrade.rs index b5aefe4798..cbd924755b 100644 --- a/cli/tools/upgrade.rs +++ b/cli/tools/upgrade.rs @@ -17,6 +17,7 @@ use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::futures::future::BoxFuture; use deno_core::futures::FutureExt; +use deno_core::task::spawn; use deno_semver::Version; use once_cell::sync::Lazy; use std::borrow::Cow; @@ -198,7 +199,7 @@ pub fn check_for_upgrades( if update_checker.should_check_for_new_version() { let env = update_checker.env.clone(); // do this asynchronously on a separate task - tokio::spawn(async move { + spawn(async move { // Sleep for a small amount of time to not unnecessarily impact startup // time. tokio::time::sleep(UPGRADE_CHECK_FETCH_DELAY).await; diff --git a/cli/util/draw_thread.rs b/cli/util/draw_thread.rs index 028b20d00e..2fd81a78ab 100644 --- a/cli/util/draw_thread.rs +++ b/cli/util/draw_thread.rs @@ -2,6 +2,7 @@ use console_static_text::ConsoleStaticText; use deno_core::parking_lot::Mutex; +use deno_core::task::spawn_blocking; use deno_runtime::ops::tty::ConsoleSize; use once_cell::sync::Lazy; use std::sync::Arc; @@ -162,7 +163,7 @@ impl DrawThread { internal_state.has_draw_thread = true; let drawer_id = internal_state.drawer_id; - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { let mut previous_size = console_size(); loop { let mut delay_ms = 120; diff --git a/cli/util/fs.rs b/cli/util/fs.rs index 94ec24fe6b..658002e3b6 100644 --- a/cli/util/fs.rs +++ b/cli/util/fs.rs @@ -3,6 +3,7 @@ use deno_core::anyhow::Context; use deno_core::error::AnyError; pub use deno_core::normalize_path; +use deno_core::task::spawn_blocking; use deno_core::ModuleSpecifier; use deno_runtime::deno_crypto::rand; use deno_runtime::deno_node::PathClean; @@ -503,7 +504,7 @@ impl LaxSingleProcessFsFlag { // This uses a blocking task because we use a single threaded // runtime and this is time sensitive so we don't want it to update // at the whims of of whatever is occurring on the runtime thread. - tokio::task::spawn_blocking({ + spawn_blocking({ let token = token.clone(); let last_updated_path = last_updated_path.clone(); move || { diff --git a/core/Cargo.toml b/core/Cargo.toml index c0854fdb66..2abf5b10e9 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -36,6 +36,7 @@ serde_json = { workspace = true, features = ["preserve_order"] } serde_v8.workspace = true smallvec.workspace = true sourcemap = "6.1" +tokio.workspace = true url.workspace = true v8.workspace = true @@ -46,4 +47,3 @@ path = "examples/http_bench_json_ops/main.rs" # These dependencies are only used for the 'http_bench_*_ops' examples. [dev-dependencies] deno_ast.workspace = true -tokio.workspace = true diff --git a/core/lib.rs b/core/lib.rs index 1b2841a661..58140bb227 100644 --- a/core/lib.rs +++ b/core/lib.rs @@ -23,6 +23,7 @@ mod resources; mod runtime; pub mod snapshot_util; mod source_map; +pub mod task; mod task_queue; // Re-exports diff --git a/core/task.rs b/core/task.rs new file mode 100644 index 0000000000..46a4c8c261 --- /dev/null +++ b/core/task.rs @@ -0,0 +1,131 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use core::pin::Pin; +use core::task::Context; +use core::task::Poll; +use futures::Future; +use std::marker::PhantomData; +use tokio::runtime::Handle; +use tokio::runtime::RuntimeFlavor; + +/// Equivalent to [`tokio::task::JoinHandle`]. +#[repr(transparent)] +pub struct JoinHandle { + handle: tokio::task::JoinHandle>, + _r: PhantomData, +} + +impl JoinHandle { + /// Equivalent to [`tokio::task::JoinHandle::abort`]. + pub fn abort(&self) { + self.handle.abort() + } +} + +impl Future for JoinHandle { + type Output = Result; + + fn poll( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll { + // SAFETY: We are sure that handle is valid here + unsafe { + let me: &mut Self = Pin::into_inner_unchecked(self); + let handle = Pin::new_unchecked(&mut me.handle); + match handle.poll(cx) { + Poll::Pending => Poll::Pending, + Poll::Ready(Ok(r)) => Poll::Ready(Ok(r.into_inner())), + Poll::Ready(Err(e)) => Poll::Ready(Err(e)), + } + } + } +} + +/// Equivalent to [`tokio::task::spawn`], but does not require the future to be [`Send`]. Must only be +/// used on a [`RuntimeFlavor::CurrentThread`] executor, though this is only checked when running with +/// debug assertions. +pub fn spawn + 'static, R: 'static>( + f: F, +) -> JoinHandle { + debug_assert!( + Handle::current().runtime_flavor() == RuntimeFlavor::CurrentThread + ); + // SAFETY: we know this is a current-thread executor + let future = unsafe { MaskFutureAsSend::new(f) }; + JoinHandle { + handle: tokio::task::spawn(future), + _r: Default::default(), + } +} + +/// Equivalent to [`tokio::task::spawn_blocking`]. Currently a thin wrapper around the tokio API, but this +/// may change in the future. +pub fn spawn_blocking< + F: (FnOnce() -> R) + Send + 'static, + R: Send + 'static, +>( + f: F, +) -> JoinHandle { + let handle = tokio::task::spawn_blocking(|| MaskResultAsSend { result: f() }); + JoinHandle { + handle, + _r: Default::default(), + } +} + +#[repr(transparent)] +#[doc(hidden)] +pub struct MaskResultAsSend { + result: R, +} + +/// SAFETY: We ensure that Send bounds are only faked when tokio is running on a current-thread executor +unsafe impl Send for MaskResultAsSend {} + +impl MaskResultAsSend { + #[inline(always)] + pub fn into_inner(self) -> R { + self.result + } +} + +pub struct MaskFutureAsSend { + future: F, +} + +impl MaskFutureAsSend { + /// Mark a non-`Send` future as `Send`. This is a trick to be able to use + /// `tokio::spawn()` (which requires `Send` futures) in a current thread + /// runtime. + /// + /// # Safety + /// + /// You must ensure that the future is actually used on the same + /// thread, ie. always use current thread runtime flavor from Tokio. + pub unsafe fn new(future: F) -> Self { + Self { future } + } +} + +// SAFETY: we are cheating here - this struct is NOT really Send, +// but we need to mark it Send so that we can use `spawn()` in Tokio. +unsafe impl Send for MaskFutureAsSend {} + +impl Future for MaskFutureAsSend { + type Output = MaskResultAsSend; + + fn poll( + self: Pin<&mut Self>, + cx: &mut Context<'_>, + ) -> Poll> { + // SAFETY: We are sure that future is valid here + unsafe { + let me: &mut MaskFutureAsSend = Pin::into_inner_unchecked(self); + let future = Pin::new_unchecked(&mut me.future); + match future.poll(cx) { + Poll::Pending => Poll::Pending, + Poll::Ready(result) => Poll::Ready(MaskResultAsSend { result }), + } + } + } +} diff --git a/core/task_queue.rs b/core/task_queue.rs index 36a169650c..adb25a4f62 100644 --- a/core/task_queue.rs +++ b/core/task_queue.rs @@ -127,7 +127,7 @@ mod tests { for i in 0..100 { let data = data.clone(); tasks.push(task_queue.queue(async move { - tokio::task::spawn_blocking(move || { + crate::task::spawn_blocking(move || { let mut data = data.lock(); if *data != i { panic!("Value was not equal."); diff --git a/ext/cache/sqlite.rs b/ext/cache/sqlite.rs index 2853f793db..4eb9924c7a 100644 --- a/ext/cache/sqlite.rs +++ b/ext/cache/sqlite.rs @@ -10,6 +10,7 @@ use std::time::UNIX_EPOCH; use async_trait::async_trait; use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; +use deno_core::task::spawn_blocking; use deno_core::AsyncRefCell; use deno_core::AsyncResult; use deno_core::ByteString; @@ -99,7 +100,7 @@ impl Cache for SqliteBackedCache { async fn storage_open(&self, cache_name: String) -> Result { let db = self.connection.clone(); let cache_storage_dir = self.cache_storage_dir.clone(); - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { let db = db.lock(); db.execute( "INSERT OR IGNORE INTO cache_storage (cache_name) VALUES (?1)", @@ -124,7 +125,7 @@ impl Cache for SqliteBackedCache { /// Note: this doesn't check the disk, it only checks the sqlite db. async fn storage_has(&self, cache_name: String) -> Result { let db = self.connection.clone(); - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { let db = db.lock(); let cache_exists = db.query_row( "SELECT count(id) FROM cache_storage WHERE cache_name = ?1", @@ -143,7 +144,7 @@ impl Cache for SqliteBackedCache { async fn storage_delete(&self, cache_name: String) -> Result { let db = self.connection.clone(); let cache_storage_dir = self.cache_storage_dir.clone(); - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { let db = db.lock(); let maybe_cache_id = db .query_row( @@ -210,7 +211,7 @@ impl Cache for SqliteBackedCache { > { let db = self.connection.clone(); let cache_storage_dir = self.cache_storage_dir.clone(); - let query_result = tokio::task::spawn_blocking(move || { + let query_result = spawn_blocking(move || { let db = db.lock(); let result = db.query_row( "SELECT response_body_key, response_headers, response_status, response_status_text, request_headers @@ -269,7 +270,7 @@ impl Cache for SqliteBackedCache { request: CacheDeleteRequest, ) -> Result { let db = self.connection.clone(); - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { // TODO(@satyarohith): remove the response body from disk if one exists let db = db.lock(); let rows_effected = db.execute( @@ -287,7 +288,7 @@ async fn insert_cache_asset( put: CachePutRequest, response_body_key: Option, ) -> Result, deno_core::anyhow::Error> { - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { let maybe_response_body = { let db = db.lock(); db.query_row( diff --git a/ext/crypto/decrypt.rs b/ext/crypto/decrypt.rs index 6c4d5b6ba5..fc54fe8182 100644 --- a/ext/crypto/decrypt.rs +++ b/ext/crypto/decrypt.rs @@ -20,6 +20,7 @@ use deno_core::error::custom_error; use deno_core::error::type_error; use deno_core::error::AnyError; use deno_core::op; +use deno_core::task::spawn_blocking; use deno_core::ZeroCopyBuf; use rsa::pkcs1::DecodeRsaPrivateKey; use rsa::PaddingScheme; @@ -98,7 +99,7 @@ pub async fn op_crypto_decrypt( tag_length, } => decrypt_aes_gcm(key, length, tag_length, iv, additional_data, &data), }; - let buf = tokio::task::spawn_blocking(fun).await.unwrap()?; + let buf = spawn_blocking(fun).await.unwrap()?; Ok(buf.into()) } diff --git a/ext/crypto/encrypt.rs b/ext/crypto/encrypt.rs index f34e0cbc6b..2831ca0f4a 100644 --- a/ext/crypto/encrypt.rs +++ b/ext/crypto/encrypt.rs @@ -19,6 +19,7 @@ use ctr::Ctr64BE; use deno_core::error::type_error; use deno_core::error::AnyError; use deno_core::op; +use deno_core::task::spawn_blocking; use deno_core::ZeroCopyBuf; use rand::rngs::OsRng; use rsa::pkcs1::DecodeRsaPublicKey; @@ -99,7 +100,7 @@ pub async fn op_crypto_encrypt( key_length, } => encrypt_aes_ctr(key, key_length, &counter, ctr_length, &data), }; - let buf = tokio::task::spawn_blocking(fun).await.unwrap()?; + let buf = spawn_blocking(fun).await.unwrap()?; Ok(buf.into()) } diff --git a/ext/crypto/generate_key.rs b/ext/crypto/generate_key.rs index 2a9452c433..426c61376e 100644 --- a/ext/crypto/generate_key.rs +++ b/ext/crypto/generate_key.rs @@ -2,6 +2,7 @@ use deno_core::error::AnyError; use deno_core::op; +use deno_core::task::spawn_blocking; use deno_core::ZeroCopyBuf; use elliptic_curve::rand_core::OsRng; use num_traits::FromPrimitive; @@ -56,7 +57,7 @@ pub async fn op_crypto_generate_key( generate_key_hmac(hash, length) } }; - let buf = tokio::task::spawn_blocking(fun).await.unwrap()?; + let buf = spawn_blocking(fun).await.unwrap()?; Ok(buf.into()) } diff --git a/ext/crypto/lib.rs b/ext/crypto/lib.rs index f481f97f6b..05349bf680 100644 --- a/ext/crypto/lib.rs +++ b/ext/crypto/lib.rs @@ -10,6 +10,7 @@ use deno_core::error::type_error; use deno_core::error::AnyError; use deno_core::op; +use deno_core::task::spawn_blocking; use deno_core::OpState; use deno_core::ZeroCopyBuf; use serde::Deserialize; @@ -601,7 +602,7 @@ pub async fn op_crypto_subtle_digest( algorithm: CryptoHash, data: ZeroCopyBuf, ) -> Result { - let output = tokio::task::spawn_blocking(move || { + let output = spawn_blocking(move || { digest::digest(algorithm.into(), &data) .as_ref() .to_vec() diff --git a/ext/ffi/call.rs b/ext/ffi/call.rs index 98186936cf..21358d851e 100644 --- a/ext/ffi/call.rs +++ b/ext/ffi/call.rs @@ -15,6 +15,7 @@ use deno_core::op; use deno_core::serde_json::Value; use deno_core::serde_v8; use deno_core::serde_v8::ExternalPointer; +use deno_core::task::spawn_blocking; use deno_core::v8; use deno_core::OpState; use deno_core::ResourceId; @@ -298,7 +299,7 @@ where .map(|v| v8::Local::::try_from(v.v8_value).unwrap()); let out_buffer_ptr = out_buffer_as_ptr(scope, out_buffer); - let join_handle = tokio::task::spawn_blocking(move || { + let join_handle = spawn_blocking(move || { let PtrSymbol { cif, ptr } = symbol.clone(); ffi_call( call_args, @@ -345,7 +346,7 @@ pub fn op_ffi_call_nonblocking<'scope>( .map(|v| v8::Local::::try_from(v.v8_value).unwrap()); let out_buffer_ptr = out_buffer_as_ptr(scope, out_buffer); - let join_handle = tokio::task::spawn_blocking(move || { + let join_handle = spawn_blocking(move || { let Symbol { cif, ptr, diff --git a/ext/fs/std_fs.rs b/ext/fs/std_fs.rs index 6ac935bbd2..9baf74a2a4 100644 --- a/ext/fs/std_fs.rs +++ b/ext/fs/std_fs.rs @@ -9,6 +9,7 @@ use std::path::Path; use std::path::PathBuf; use std::rc::Rc; +use deno_core::task::spawn_blocking; use deno_io::fs::File; use deno_io::fs::FsResult; use deno_io::fs::FsStat; @@ -86,8 +87,7 @@ impl FileSystem for RealFs { options: OpenOptions, ) -> FsResult> { let opts = open_options(options); - let std_file = - tokio::task::spawn_blocking(move || opts.open(path)).await??; + let std_file = spawn_blocking(move || opts.open(path)).await??; Ok(Rc::new(StdFileResourceInner::file(std_file))) } @@ -105,14 +105,14 @@ impl FileSystem for RealFs { recursive: bool, mode: u32, ) -> FsResult<()> { - tokio::task::spawn_blocking(move || mkdir(&path, recursive, mode)).await? + spawn_blocking(move || mkdir(&path, recursive, mode)).await? } fn chmod_sync(&self, path: &Path, mode: u32) -> FsResult<()> { chmod(path, mode) } async fn chmod_async(&self, path: PathBuf, mode: u32) -> FsResult<()> { - tokio::task::spawn_blocking(move || chmod(&path, mode)).await? + spawn_blocking(move || chmod(&path, mode)).await? } fn chown_sync( @@ -129,53 +129,49 @@ impl FileSystem for RealFs { uid: Option, gid: Option, ) -> FsResult<()> { - tokio::task::spawn_blocking(move || chown(&path, uid, gid)).await? + spawn_blocking(move || chown(&path, uid, gid)).await? } fn remove_sync(&self, path: &Path, recursive: bool) -> FsResult<()> { remove(path, recursive) } async fn remove_async(&self, path: PathBuf, recursive: bool) -> FsResult<()> { - tokio::task::spawn_blocking(move || remove(&path, recursive)).await? + spawn_blocking(move || remove(&path, recursive)).await? } fn copy_file_sync(&self, from: &Path, to: &Path) -> FsResult<()> { copy_file(from, to) } async fn copy_file_async(&self, from: PathBuf, to: PathBuf) -> FsResult<()> { - tokio::task::spawn_blocking(move || copy_file(&from, &to)).await? + spawn_blocking(move || copy_file(&from, &to)).await? } fn stat_sync(&self, path: &Path) -> FsResult { stat(path).map(Into::into) } async fn stat_async(&self, path: PathBuf) -> FsResult { - tokio::task::spawn_blocking(move || stat(&path)) - .await? - .map(Into::into) + spawn_blocking(move || stat(&path)).await?.map(Into::into) } fn lstat_sync(&self, path: &Path) -> FsResult { lstat(path).map(Into::into) } async fn lstat_async(&self, path: PathBuf) -> FsResult { - tokio::task::spawn_blocking(move || lstat(&path)) - .await? - .map(Into::into) + spawn_blocking(move || lstat(&path)).await?.map(Into::into) } fn realpath_sync(&self, path: &Path) -> FsResult { realpath(path) } async fn realpath_async(&self, path: PathBuf) -> FsResult { - tokio::task::spawn_blocking(move || realpath(&path)).await? + spawn_blocking(move || realpath(&path)).await? } fn read_dir_sync(&self, path: &Path) -> FsResult> { read_dir(path) } async fn read_dir_async(&self, path: PathBuf) -> FsResult> { - tokio::task::spawn_blocking(move || read_dir(&path)).await? + spawn_blocking(move || read_dir(&path)).await? } fn rename_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> { @@ -186,7 +182,7 @@ impl FileSystem for RealFs { oldpath: PathBuf, newpath: PathBuf, ) -> FsResult<()> { - tokio::task::spawn_blocking(move || fs::rename(oldpath, newpath)) + spawn_blocking(move || fs::rename(oldpath, newpath)) .await? .map_err(Into::into) } @@ -199,7 +195,7 @@ impl FileSystem for RealFs { oldpath: PathBuf, newpath: PathBuf, ) -> FsResult<()> { - tokio::task::spawn_blocking(move || fs::hard_link(oldpath, newpath)) + spawn_blocking(move || fs::hard_link(oldpath, newpath)) .await? .map_err(Into::into) } @@ -218,15 +214,14 @@ impl FileSystem for RealFs { newpath: PathBuf, file_type: Option, ) -> FsResult<()> { - tokio::task::spawn_blocking(move || symlink(&oldpath, &newpath, file_type)) - .await? + spawn_blocking(move || symlink(&oldpath, &newpath, file_type)).await? } fn read_link_sync(&self, path: &Path) -> FsResult { fs::read_link(path).map_err(Into::into) } async fn read_link_async(&self, path: PathBuf) -> FsResult { - tokio::task::spawn_blocking(move || fs::read_link(path)) + spawn_blocking(move || fs::read_link(path)) .await? .map_err(Into::into) } @@ -235,7 +230,7 @@ impl FileSystem for RealFs { truncate(path, len) } async fn truncate_async(&self, path: PathBuf, len: u64) -> FsResult<()> { - tokio::task::spawn_blocking(move || truncate(&path, len)).await? + spawn_blocking(move || truncate(&path, len)).await? } fn utime_sync( @@ -260,7 +255,7 @@ impl FileSystem for RealFs { ) -> FsResult<()> { let atime = filetime::FileTime::from_unix_time(atime_secs, atime_nanos); let mtime = filetime::FileTime::from_unix_time(mtime_secs, mtime_nanos); - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { filetime::set_file_times(path, atime, mtime).map_err(Into::into) }) .await? @@ -289,7 +284,7 @@ impl FileSystem for RealFs { options: OpenOptions, data: Vec, ) -> FsResult<()> { - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { let opts = open_options(options); let mut file = opts.open(path)?; #[cfg(unix)] @@ -307,7 +302,7 @@ impl FileSystem for RealFs { fs::read(path).map_err(Into::into) } async fn read_file_async(&self, path: PathBuf) -> FsResult> { - tokio::task::spawn_blocking(move || fs::read(path)) + spawn_blocking(move || fs::read(path)) .await? .map_err(Into::into) } diff --git a/ext/http/http_next.rs b/ext/http/http_next.rs index 12db29b1b7..8b2f91be06 100644 --- a/ext/http/http_next.rs +++ b/ext/http/http_next.rs @@ -17,6 +17,8 @@ use cache_control::CacheControl; use deno_core::error::AnyError; use deno_core::futures::TryFutureExt; use deno_core::op; +use deno_core::task::spawn; +use deno_core::task::JoinHandle; use deno_core::AsyncRefCell; use deno_core::AsyncResult; use deno_core::ByteString; @@ -68,9 +70,6 @@ use std::rc::Rc; use tokio::io::AsyncReadExt; use tokio::io::AsyncWriteExt; -use tokio::task::spawn_local; -use tokio::task::JoinHandle; - type Request = hyper1::Request; type Response = hyper1::Response; @@ -262,7 +261,7 @@ pub fn op_http_upgrade_raw( let (read_rx, write_tx) = tokio::io::split(read); let (mut write_rx, mut read_tx) = tokio::io::split(write); - spawn_local(async move { + spawn(async move { let mut upgrade_stream = WebSocketUpgrade::::default(); // Stage 2: Extract the Upgraded connection @@ -285,7 +284,7 @@ pub fn op_http_upgrade_raw( // Stage 3: Pump the data let (mut upgraded_rx, mut upgraded_tx) = tokio::io::split(upgraded); - spawn_local(async move { + spawn(async move { let mut buf = [0; 1024]; loop { let read = upgraded_rx.read(&mut buf).await?; @@ -296,7 +295,7 @@ pub fn op_http_upgrade_raw( } Ok::<_, AnyError>(()) }); - spawn_local(async move { + spawn(async move { let mut buf = [0; 1024]; loop { let read = write_rx.read(&mut buf).await?; @@ -792,11 +791,10 @@ fn serve_https( cancel: Rc, tx: tokio::sync::mpsc::Sender, ) -> JoinHandle> { - // TODO(mmastrac): This is faster if we can use tokio::spawn but then the send bounds get us let svc = service_fn(move |req: Request| { new_slab_future(req, request_info.clone(), tx.clone()) }); - spawn_local( + spawn( async { io.handshake().await?; // If the client specifically negotiates a protocol, we will use it. If not, we'll auto-detect @@ -820,11 +818,10 @@ fn serve_http( cancel: Rc, tx: tokio::sync::mpsc::Sender, ) -> JoinHandle> { - // TODO(mmastrac): This is faster if we can use tokio::spawn but then the send bounds get us let svc = service_fn(move |req: Request| { new_slab_future(req, request_info.clone(), tx.clone()) }); - spawn_local(serve_http2_autodetect(io, svc).try_or_cancel(cancel)) + spawn(serve_http2_autodetect(io, svc).try_or_cancel(cancel)) } fn serve_http_on( @@ -916,7 +913,7 @@ where let cancel_clone = resource.cancel_handle(); let listen_properties_clone: HttpListenProperties = listen_properties.clone(); - let handle = spawn_local(async move { + let handle = spawn(async move { loop { let conn = listener .accept() diff --git a/ext/http/lib.rs b/ext/http/lib.rs index 21d3dc6519..7a1a93f805 100644 --- a/ext/http/lib.rs +++ b/ext/http/lib.rs @@ -20,6 +20,7 @@ use deno_core::futures::FutureExt; use deno_core::futures::StreamExt; use deno_core::futures::TryFutureExt; use deno_core::op; +use deno_core::task::spawn; use deno_core::AsyncRefCell; use deno_core::AsyncResult; use deno_core::BufView; @@ -68,7 +69,6 @@ use std::task::Poll; use tokio::io::AsyncRead; use tokio::io::AsyncWrite; use tokio::io::AsyncWriteExt; -use tokio::task::spawn_local; use crate::network_buffered_stream::NetworkBufferedStream; use crate::reader_stream::ExternallyAbortableReaderStream; @@ -184,7 +184,7 @@ impl HttpConnResource { }; let (task_fut, closed_fut) = task_fut.remote_handle(); let closed_fut = closed_fut.shared(); - spawn_local(task_fut); + spawn(task_fut); Self { addr, @@ -1005,7 +1005,7 @@ where Fut::Output: 'static, { fn execute(&self, fut: Fut) { - spawn_local(fut); + deno_core::task::spawn(fut); } } @@ -1015,7 +1015,7 @@ where Fut::Output: 'static, { fn execute(&self, fut: Fut) { - spawn_local(fut); + deno_core::task::spawn(fut); } } diff --git a/ext/io/lib.rs b/ext/io/lib.rs index 49e4ab714f..6dec7c3a7f 100644 --- a/ext/io/lib.rs +++ b/ext/io/lib.rs @@ -2,6 +2,7 @@ use deno_core::error::AnyError; use deno_core::op; +use deno_core::task::spawn_blocking; use deno_core::AsyncMutFuture; use deno_core::AsyncRefCell; use deno_core::AsyncResult; @@ -350,7 +351,7 @@ impl StdFileResourceInner { } } }; - let (cell_value, result) = tokio::task::spawn_blocking(move || { + let (cell_value, result) = spawn_blocking(move || { let result = action(&mut cell_value); (cell_value, result) }) @@ -372,7 +373,7 @@ impl StdFileResourceInner { // we want to restrict this to one async action at a time let _permit = self.cell_async_task_queue.acquire().await; - tokio::task::spawn_blocking(action).await.unwrap() + spawn_blocking(action).await.unwrap() } } diff --git a/ext/net/ops_tls.rs b/ext/net/ops_tls.rs index b9b37b3282..7f451d0a84 100644 --- a/ext/net/ops_tls.rs +++ b/ext/net/ops_tls.rs @@ -26,6 +26,7 @@ use deno_core::futures::task::Waker; use deno_core::op; use deno_core::parking_lot::Mutex; +use deno_core::task::spawn; use deno_core::AsyncRefCell; use deno_core::AsyncResult; use deno_core::ByteString; @@ -74,7 +75,6 @@ use tokio::io::AsyncWriteExt; use tokio::io::ReadBuf; use tokio::net::TcpListener; use tokio::net::TcpStream; -use tokio::task::spawn_local; #[derive(Copy, Clone, Debug, Eq, PartialEq)] enum Flow { @@ -224,9 +224,9 @@ impl Drop for TlsStream { let use_linger_task = inner.poll_close(&mut cx).is_pending(); if use_linger_task { - spawn_local(poll_fn(move |cx| inner.poll_close(cx))); + spawn(poll_fn(move |cx| inner.poll_close(cx))); } else if cfg!(debug_assertions) { - spawn_local(async {}); // Spawn dummy task to detect missing LocalSet. + spawn(async {}); // Spawn dummy task to detect missing runtime. } } } diff --git a/ext/node/ops/crypto/mod.rs b/ext/node/ops/crypto/mod.rs index 9e1a3da989..0f8feb2a92 100644 --- a/ext/node/ops/crypto/mod.rs +++ b/ext/node/ops/crypto/mod.rs @@ -4,6 +4,7 @@ use deno_core::error::type_error; use deno_core::error::AnyError; use deno_core::op; use deno_core::serde_v8; +use deno_core::task::spawn_blocking; use deno_core::OpState; use deno_core::ResourceId; use deno_core::StringOrBuffer; @@ -57,12 +58,7 @@ pub async fn op_node_check_prime_async( checks: usize, ) -> Result { // TODO(@littledivy): use rayon for CPU-bound tasks - Ok( - tokio::task::spawn_blocking(move || { - primes::is_probably_prime(&num, checks) - }) - .await?, - ) + Ok(spawn_blocking(move || primes::is_probably_prime(&num, checks)).await?) } #[op] @@ -74,10 +70,8 @@ pub fn op_node_check_prime_bytes_async( // TODO(@littledivy): use rayon for CPU-bound tasks Ok(async move { Ok( - tokio::task::spawn_blocking(move || { - primes::is_probably_prime(&candidate, checks) - }) - .await?, + spawn_blocking(move || primes::is_probably_prime(&candidate, checks)) + .await?, ) }) } @@ -462,7 +456,7 @@ pub async fn op_node_pbkdf2_async( digest: String, keylen: usize, ) -> Result { - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { let mut derived_key = vec![0; keylen]; pbkdf2_sync(&password, &salt, iterations, &digest, &mut derived_key) .map(|_| derived_key.into()) @@ -477,7 +471,7 @@ pub fn op_node_generate_secret(buf: &mut [u8]) { #[op] pub async fn op_node_generate_secret_async(len: i32) -> ZeroCopyBuf { - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { let mut buf = vec![0u8; len as usize]; rand::thread_rng().fill(&mut buf[..]); buf.into() @@ -535,7 +529,7 @@ pub async fn op_node_hkdf_async( info: ZeroCopyBuf, okm_len: usize, ) -> Result { - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { let mut okm = vec![0u8; okm_len]; hkdf_sync(&hash, &ikm, &salt, &info, &mut okm)?; Ok(okm.into()) @@ -578,10 +572,7 @@ pub async fn op_node_generate_rsa_async( modulus_length: usize, public_exponent: usize, ) -> Result<(ZeroCopyBuf, ZeroCopyBuf), AnyError> { - tokio::task::spawn_blocking(move || { - generate_rsa(modulus_length, public_exponent) - }) - .await? + spawn_blocking(move || generate_rsa(modulus_length, public_exponent)).await? } fn dsa_generate( @@ -635,10 +626,7 @@ pub async fn op_node_dsa_generate_async( modulus_length: usize, divisor_length: usize, ) -> Result<(ZeroCopyBuf, ZeroCopyBuf), AnyError> { - tokio::task::spawn_blocking(move || { - dsa_generate(modulus_length, divisor_length) - }) - .await? + spawn_blocking(move || dsa_generate(modulus_length, divisor_length)).await? } fn ec_generate( @@ -677,7 +665,7 @@ pub fn op_node_ec_generate( pub async fn op_node_ec_generate_async( named_curve: String, ) -> Result<(ZeroCopyBuf, ZeroCopyBuf), AnyError> { - tokio::task::spawn_blocking(move || ec_generate(&named_curve)).await? + spawn_blocking(move || ec_generate(&named_curve)).await? } fn ed25519_generate() -> Result<(ZeroCopyBuf, ZeroCopyBuf), AnyError> { @@ -704,7 +692,7 @@ pub fn op_node_ed25519_generate() -> Result<(ZeroCopyBuf, ZeroCopyBuf), AnyError #[op] pub async fn op_node_ed25519_generate_async( ) -> Result<(ZeroCopyBuf, ZeroCopyBuf), AnyError> { - tokio::task::spawn_blocking(ed25519_generate).await? + spawn_blocking(ed25519_generate).await? } fn x25519_generate() -> Result<(ZeroCopyBuf, ZeroCopyBuf), AnyError> { @@ -739,7 +727,7 @@ pub fn op_node_x25519_generate() -> Result<(ZeroCopyBuf, ZeroCopyBuf), AnyError> #[op] pub async fn op_node_x25519_generate_async( ) -> Result<(ZeroCopyBuf, ZeroCopyBuf), AnyError> { - tokio::task::spawn_blocking(x25519_generate).await? + spawn_blocking(x25519_generate).await? } fn dh_generate_group( @@ -772,7 +760,7 @@ pub fn op_node_dh_generate_group( pub async fn op_node_dh_generate_group_async( group_name: String, ) -> Result<(ZeroCopyBuf, ZeroCopyBuf), AnyError> { - tokio::task::spawn_blocking(move || dh_generate_group(&group_name)).await? + spawn_blocking(move || dh_generate_group(&group_name)).await? } fn dh_generate( @@ -806,10 +794,8 @@ pub async fn op_node_dh_generate_async( prime_len: usize, generator: usize, ) -> Result<(ZeroCopyBuf, ZeroCopyBuf), AnyError> { - tokio::task::spawn_blocking(move || { - dh_generate(prime.as_deref(), prime_len, generator) - }) - .await? + spawn_blocking(move || dh_generate(prime.as_deref(), prime_len, generator)) + .await? } #[op] @@ -885,7 +871,7 @@ pub async fn op_node_scrypt_async( parallelization: u32, maxmem: u32, ) -> Result { - tokio::task::spawn_blocking(move || { + spawn_blocking(move || { let mut output_buffer = vec![0u8; keylen as usize]; let res = scrypt( password, @@ -1081,5 +1067,5 @@ pub fn op_node_gen_prime(size: usize) -> ZeroCopyBuf { pub async fn op_node_gen_prime_async( size: usize, ) -> Result { - Ok(tokio::task::spawn_blocking(move || gen_prime(size)).await?) + Ok(spawn_blocking(move || gen_prime(size)).await?) } diff --git a/ext/websocket/lib.rs b/ext/websocket/lib.rs index e03a13789f..a002b774ce 100644 --- a/ext/websocket/lib.rs +++ b/ext/websocket/lib.rs @@ -591,6 +591,6 @@ where Fut::Output: 'static, { fn execute(&self, fut: Fut) { - tokio::task::spawn_local(fut); + deno_core::task::spawn(fut); } } diff --git a/runtime/inspector_server.rs b/runtime/inspector_server.rs index 25d0d796c1..1a67068964 100644 --- a/runtime/inspector_server.rs +++ b/runtime/inspector_server.rs @@ -15,6 +15,7 @@ use deno_core::futures::task::Poll; use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::serde_json::Value; +use deno_core::task::spawn; use deno_core::InspectorMsg; use deno_core::InspectorSessionProxy; use deno_core::JsRuntime; @@ -109,7 +110,7 @@ where Fut::Output: 'static, { fn execute(&self, fut: Fut) { - tokio::task::spawn_local(fut); + deno_core::task::spawn(fut); } } @@ -160,7 +161,7 @@ fn handle_ws_request( // spawn a task that will wait for websocket connection and then pump messages between // the socket and inspector proxy - tokio::task::spawn_local(async move { + spawn(async move { let websocket = if let Ok(w) = fut.await { w } else { diff --git a/runtime/tokio_util.rs b/runtime/tokio_util.rs index a4db5e33fa..ce6ef305f0 100644 --- a/runtime/tokio_util.rs +++ b/runtime/tokio_util.rs @@ -1,5 +1,7 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +use deno_core::task::MaskFutureAsSend; + pub fn create_basic_runtime() -> tokio::runtime::Runtime { tokio::runtime::Builder::new_current_thread() .enable_io() @@ -14,11 +16,14 @@ pub fn create_basic_runtime() -> tokio::runtime::Runtime { .unwrap() } -pub fn run_local(future: F) -> R +pub fn create_and_run_current_thread(future: F) -> R where - F: std::future::Future, + F: std::future::Future + 'static, + R: Send + 'static, { let rt = create_basic_runtime(); - let local = tokio::task::LocalSet::new(); - local.block_on(&rt, future) + // SAFETY: this this is guaranteed to be running on a current-thread executor + let future = unsafe { MaskFutureAsSend::new(future) }; + let join_handle = rt.spawn(future); + rt.block_on(join_handle).unwrap().into_inner() } diff --git a/runtime/web_worker.rs b/runtime/web_worker.rs index d8c881ab7c..01262abcf2 100644 --- a/runtime/web_worker.rs +++ b/runtime/web_worker.rs @@ -3,7 +3,7 @@ use crate::colors; use crate::inspector_server::InspectorServer; use crate::ops; use crate::permissions::PermissionsContainer; -use crate::tokio_util::run_local; +use crate::tokio_util::create_and_run_current_thread; use crate::worker::FormatJsErrorFn; use crate::BootstrapOptions; use deno_broadcast_channel::InMemoryBroadcastChannel; @@ -838,5 +838,5 @@ pub fn run_web_worker( debug!("Worker thread shuts down {}", &name); result }; - run_local(fut) + create_and_run_current_thread(fut) } diff --git a/tools/wpt/expectation.json b/tools/wpt/expectation.json index 7cff6b6eb4..4fc0067e70 100644 --- a/tools/wpt/expectation.json +++ b/tools/wpt/expectation.json @@ -3729,7 +3729,9 @@ ], "request-referrer.any.html": false, "request-referrer.any.worker.html": false, - "response-null-body.any.html": true + "response-null-body.any.html": { + "ignore": true + } }, "response": { "json.any.html": true, From 0ccfccdcd271cb75f7453c70c6c3f4cadd7f3858 Mon Sep 17 00:00:00 2001 From: Yoshiya Hinosawa Date: Mon, 15 May 2023 21:22:53 +0900 Subject: [PATCH 172/320] chore: check node_compat config diff in CI (#19119) --- .dprint.json | 2 +- .github/workflows/ci.generate.ts | 10 +++ .github/workflows/ci.yml | 6 ++ .gitmodules | 3 + tools/node_compat/TODO.md | 3 +- tools/node_compat/node | 1 + tools/node_compat/setup.ts | 117 ++++++------------------------- 7 files changed, 45 insertions(+), 97 deletions(-) create mode 160000 tools/node_compat/node diff --git a/.dprint.json b/.dprint.json index e1caea7fe3..31f945acda 100644 --- a/.dprint.json +++ b/.dprint.json @@ -48,7 +48,7 @@ "test_util/wpt", "third_party", "tools/node_compat/TODO.md", - "tools/node_compat/versions", + "tools/node_compat/node", "tools/wpt/expectation.json", "tools/wpt/manifest.json", "ext/websocket/autobahn/reports" diff --git a/.github/workflows/ci.generate.ts b/.github/workflows/ci.generate.ts index 50c7b54b7b..84ff65b4c6 100755 --- a/.github/workflows/ci.generate.ts +++ b/.github/workflows/ci.generate.ts @@ -362,6 +362,10 @@ const ci = { ...submoduleStep("./test_util/wpt"), if: "matrix.wpt", }, + { + ...submoduleStep("./tools/node_compat/node"), + if: "matrix.job == 'lint'", + }, { name: "Create source tarballs (release, linux)", if: [ @@ -541,6 +545,12 @@ const ci = { run: "deno run --unstable --allow-write --allow-read --allow-run ./tools/lint.js", }, + { + name: "node_compat/setup.ts --check", + if: "matrix.job == 'lint'", + run: + "deno run --allow-write --allow-read --allow-run=git ./tools/node_compat/setup.ts --check", + }, { name: "Build debug", if: "matrix.job == 'test' && matrix.profile == 'debug'", diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c266f8f40a..9679ffbb63 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -120,6 +120,9 @@ jobs: - name: Clone submodule ./test_util/wpt run: git submodule update --init --recursive --depth=1 -- ./test_util/wpt if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.wpt)' + - name: Clone submodule ./tools/node_compat/node + run: git submodule update --init --recursive --depth=1 -- ./tools/node_compat/node + if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.job == ''lint'')' - name: 'Create source tarballs (release, linux)' if: |- !(github.event_name == 'pull_request' && matrix.skip_pr) && (startsWith(matrix.os, 'ubuntu') && @@ -328,6 +331,9 @@ jobs: - name: lint.js if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.job == ''lint'')' run: deno run --unstable --allow-write --allow-read --allow-run ./tools/lint.js + - name: node_compat/setup.ts --check + if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.job == ''lint'')' + run: deno run --allow-write --allow-read --allow-run=git ./tools/node_compat/setup.ts --check - name: Build debug if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.job == ''test'' && matrix.profile == ''debug'')' run: cargo build --locked --all-targets diff --git a/.gitmodules b/.gitmodules index 9e4f12afa6..a81f84fc6f 100644 --- a/.gitmodules +++ b/.gitmodules @@ -10,3 +10,6 @@ path = test_util/wpt url = https://github.com/web-platform-tests/wpt.git +[submodule "tools/node_compat/node"] + path = tools/node_compat/node + url = https://github.com/denoland/node_test.git diff --git a/tools/node_compat/TODO.md b/tools/node_compat/TODO.md index 650cd4b165..2ea6983f91 100644 --- a/tools/node_compat/TODO.md +++ b/tools/node_compat/TODO.md @@ -3,7 +3,7 @@ NOTE: This file should not be manually edited. Please edit 'cli/tests/node_compat/config.json' and run 'tools/node_compat/setup.ts' instead. -Total: 2934 +Total: 2935 - [abort/test-abort-backtrace.js](https://github.com/nodejs/node/tree/v18.12.1/test/abort/test-abort-backtrace.js) - [abort/test-abort-fatal-error.js](https://github.com/nodejs/node/tree/v18.12.1/test/abort/test-abort-fatal-error.js) @@ -1815,6 +1815,7 @@ Total: 2934 - [parallel/test-process-env.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-process-env.js) - [parallel/test-process-euid-egid.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-process-euid-egid.js) - [parallel/test-process-exception-capture-errors.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-process-exception-capture-errors.js) +- [parallel/test-process-exception-capture-should-abort-on-uncaught-setflagsfromstring.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-process-exception-capture-should-abort-on-uncaught-setflagsfromstring.js) - [parallel/test-process-exception-capture-should-abort-on-uncaught.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-process-exception-capture-should-abort-on-uncaught.js) - [parallel/test-process-exception-capture.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-process-exception-capture.js) - [parallel/test-process-exec-argv.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-process-exec-argv.js) diff --git a/tools/node_compat/node b/tools/node_compat/node new file mode 160000 index 0000000000..d0d9c1ba9d --- /dev/null +++ b/tools/node_compat/node @@ -0,0 +1 @@ +Subproject commit d0d9c1ba9d3facf1086438e21d6d329c599e5a3b diff --git a/tools/node_compat/setup.ts b/tools/node_compat/setup.ts index c8fd6a8e09..5bd5a5ba56 100755 --- a/tools/node_compat/setup.ts +++ b/tools/node_compat/setup.ts @@ -1,23 +1,11 @@ -#!/usr/bin/env -S deno run --allow-read=. --allow-write=. --allow-net=nodejs.org +#!/usr/bin/env -S deno run --allow-read=. --allow-write=. --allow-run=git // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -/** This script downloads Node.js source tarball, extracts it and copies the - * test files according to the config file `cli/tests/node_compat/config.json` - */ +/** This copies the test files according to the config file `cli/tests/node_compat/config.jsonc` */ -import { Foras, gunzip } from "https://deno.land/x/denoflate@2.0.2/deno/mod.ts"; -import { Untar } from "../../test_util/std/archive/untar.ts"; import { walk } from "../../test_util/std/fs/walk.ts"; -import { - dirname, - fromFileUrl, - join, - sep, -} from "../../test_util/std/path/mod.ts"; +import { sep } from "../../test_util/std/path/mod.ts"; import { ensureFile } from "../../test_util/std/fs/ensure_file.ts"; -import { Buffer } from "../../test_util/std/io/buffer.ts"; -import { copy } from "../../test_util/std/streams/copy.ts"; -import { readAll } from "../../test_util/std/streams/read_all.ts"; import { writeAll } from "../../test_util/std/streams/write_all.ts"; import { withoutAll } from "../../test_util/std/collections/without_all.ts"; import { relative } from "../../test_util/std/path/posix.ts"; @@ -27,8 +15,6 @@ import { config, ignoreList } from "../../cli/tests/node_compat/common.ts"; const encoder = new TextEncoder(); const NODE_VERSION = config.nodeVersion; -const NODE_NAME = "node-v" + NODE_VERSION; -const NODE_ARCHIVE_NAME = `${NODE_NAME}.tar.gz`; const NODE_IGNORED_TEST_DIRS = [ "addons", @@ -51,25 +37,17 @@ const NODE_IGNORED_TEST_DIRS = [ "wpt", ]; -const NODE_TARBALL_URL = - `https://nodejs.org/dist/v${NODE_VERSION}/${NODE_ARCHIVE_NAME}`; -const NODE_VERSIONS_ROOT = new URL("versions/", import.meta.url); -const NODE_TARBALL_LOCAL_URL = new URL(NODE_ARCHIVE_NAME, NODE_VERSIONS_ROOT); -// local dir url where we copy the node tests -const NODE_LOCAL_ROOT_URL = new URL(NODE_NAME, NODE_VERSIONS_ROOT); -const NODE_LOCAL_TEST_URL = new URL(NODE_NAME + "/test/", NODE_VERSIONS_ROOT); +const VENDORED_NODE_TEST = new URL("node/test/", import.meta.url); const NODE_COMPAT_TEST_DEST_URL = new URL( "../../cli/tests/node_compat/test/", import.meta.url, ); -Foras.initSyncBundledOnce(); - async function getNodeTests(): Promise { const paths: string[] = []; - const rootPath = NODE_LOCAL_TEST_URL.href.slice(7); + const rootPath = VENDORED_NODE_TEST.href.slice(7); for await ( - const item of walk(NODE_LOCAL_TEST_URL, { exts: [".js"] }) + const item of walk(VENDORED_NODE_TEST, { exts: [".js"] }) ) { const path = relative(rootPath, item.path); if (NODE_IGNORED_TEST_DIRS.every((dir) => !path.startsWith(dir))) { @@ -125,33 +103,6 @@ async function clearTests() { } } -async function decompressTests() { - console.log(`Decompressing ${NODE_ARCHIVE_NAME}...`); - - const compressedFile = await Deno.open(NODE_TARBALL_LOCAL_URL); - - const buffer = new Buffer(gunzip(await readAll(compressedFile))); - compressedFile.close(); - - const tar = new Untar(buffer); - const outFolder = dirname(fromFileUrl(NODE_TARBALL_LOCAL_URL)); - const testsFolder = `${NODE_NAME}/test`; - - for await (const entry of tar) { - if (entry.type !== "file") continue; - if (!entry.fileName.startsWith(testsFolder)) continue; - const path = join(outFolder, entry.fileName); - await ensureFile(path); - const file = await Deno.open(path, { - create: true, - truncate: true, - write: true, - }); - await copy(entry, file); - file.close(); - } -} - /** Checks if file has entry in config.json */ function hasEntry(file: string, suite: string) { return Array.isArray(config.tests[suite]) && @@ -161,12 +112,12 @@ function hasEntry(file: string, suite: string) { async function copyTests() { console.log("Copying test files..."); - for await (const entry of walk(NODE_LOCAL_TEST_URL, { skip: ignoreList })) { + for await (const entry of walk(VENDORED_NODE_TEST, { skip: ignoreList })) { const fragments = entry.path.split(sep); // suite is the directory name after test/. For example, if the file is - // "node-v18.12.1/test/fixtures/policy/main.mjs" + // "node_comapt/node/test/fixtures/policy/main.mjs" // then suite is "fixtures/policy" - const suite = fragments.slice(fragments.indexOf(NODE_NAME) + 2, -1) + const suite = fragments.slice(fragments.indexOf("node_compat") + 3, -1) .join("/"); if (!hasEntry(entry.name, suite)) { continue; @@ -180,8 +131,9 @@ async function copyTests() { write: true, }); const srcFile = await Deno.open( - new URL(`${suite}/${entry.name}`, NODE_LOCAL_TEST_URL), + new URL(`${suite}/${entry.name}`, VENDORED_NODE_TEST), ); + // Add header to js files if (dest.pathname.endsWith("js")) { await writeAll( destFile, @@ -199,44 +151,19 @@ async function copyTests() { } } -/** Downloads Node tarball */ -async function downloadFile() { - console.log( - `Downloading ${NODE_TARBALL_URL} in "${NODE_TARBALL_LOCAL_URL}" ...`, - ); - const response = await fetch(NODE_TARBALL_URL); - if (!response.ok) { - throw new Error(`Request failed with status ${response.status}`); - } - await ensureFile(NODE_TARBALL_LOCAL_URL); - const file = await Deno.open(NODE_TARBALL_LOCAL_URL, { - truncate: true, - write: true, - create: true, - }); - await response.body.pipeTo(file.writable); -} - // main -try { - Deno.lstatSync(NODE_TARBALL_LOCAL_URL); -} catch (e) { - if (!(e instanceof Deno.errors.NotFound)) { - throw e; - } - await downloadFile(); -} - -try { - Deno.lstatSync(NODE_LOCAL_ROOT_URL); -} catch (e) { - if (!(e instanceof Deno.errors.NotFound)) { - throw e; - } - await decompressTests(); -} - await clearTests(); await copyTests(); await updateToDo(); + +if (Deno.args[0] === "--check") { + const cmd = new Deno.Command("git", { args: ["status", "-s"] }); + const { stdout } = await cmd.output(); + + if (stdout.length > 0) { + console.log("The following files have been changed:"); + console.log(new TextDecoder().decode(stdout)); + Deno.exit(1); + } +} From bfe93c6e814b5ba27e9bd356359910ff3c3f49bd Mon Sep 17 00:00:00 2001 From: Luca Casonato Date: Mon, 15 May 2023 16:55:47 +0200 Subject: [PATCH 173/320] refactor(ext/http): generic abstract listeners (#19132) Improve abstractions around listeners to support listener + connection network stream combinations not previously possible (for example a listener exposed as a Tcp, creating Unix network streams). --- Cargo.lock | 1 + ext/http/Cargo.toml | 1 + ext/http/http_next.rs | 42 +++------- ext/http/request_properties.rs | 144 +++++++++++++++++++++++---------- ext/net/raw.rs | 2 +- 5 files changed, 118 insertions(+), 72 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4b46e4b653..6dcecdd031 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1026,6 +1026,7 @@ name = "deno_http" version = "0.99.0" dependencies = [ "async-compression", + "async-trait", "base64 0.13.1", "bencher", "brotli", diff --git a/ext/http/Cargo.toml b/ext/http/Cargo.toml index 8bf1d42e2b..e555d742e5 100644 --- a/ext/http/Cargo.toml +++ b/ext/http/Cargo.toml @@ -22,6 +22,7 @@ harness = false [dependencies] async-compression = { version = "0.3.12", features = ["tokio", "brotli", "gzip"] } +async-trait.workspace = true base64.workspace = true brotli = "3.3.4" bytes.workspace = true diff --git a/ext/http/http_next.rs b/ext/http/http_next.rs index 8b2f91be06..eaa19a89d0 100644 --- a/ext/http/http_next.rs +++ b/ext/http/http_next.rs @@ -33,7 +33,6 @@ use deno_core::ZeroCopyBuf; use deno_net::ops_tls::TlsStream; use deno_net::raw::put_network_stream_resource; use deno_net::raw::NetworkStream; -use deno_net::raw::NetworkStreamAddress; use fly_accept_encoding::Encoding; use http::header::ACCEPT_ENCODING; use http::header::CACHE_CONTROL; @@ -61,9 +60,6 @@ use std::borrow::Cow; use std::cell::RefCell; use std::future::Future; use std::io; -use std::net::Ipv4Addr; -use std::net::SocketAddr; -use std::net::SocketAddrV4; use std::pin::Pin; use std::rc::Rc; @@ -825,7 +821,7 @@ fn serve_http( } fn serve_http_on( - network_stream: NetworkStream, + connection: HTTP::Connection, listen_properties: &HttpListenProperties, cancel: Rc, tx: tokio::sync::mpsc::Sender, @@ -833,15 +829,10 @@ fn serve_http_on( where HTTP: HttpPropertyExtractor, { - // We always want some sort of peer address. If we can't get one, just make up one. - let peer_address = network_stream.peer_address().unwrap_or_else(|_| { - NetworkStreamAddress::Ip(SocketAddr::V4(SocketAddrV4::new( - Ipv4Addr::new(0, 0, 0, 0), - 0, - ))) - }); let connection_properties: HttpConnectionProperties = - HTTP::connection_properties(listen_properties, &peer_address); + HTTP::connection_properties(listen_properties, &connection); + + let network_stream = HTTP::to_network_stream_from_connection(connection); match network_stream { NetworkStream::Tcp(conn) => { @@ -895,14 +886,10 @@ pub fn op_http_serve( where HTTP: HttpPropertyExtractor, { - let listener = HTTP::get_network_stream_listener_for_rid( - &mut state.borrow_mut(), - listener_rid, - )?; + let listener = + HTTP::get_listener_for_rid(&mut state.borrow_mut(), listener_rid)?; - let local_address = listener.listen_address()?; - let listen_properties = - HTTP::listen_properties(listener.stream(), &local_address); + let listen_properties = HTTP::listen_properties_from_listener(&listener)?; let (tx, rx) = tokio::sync::mpsc::channel(10); let resource: Rc = Rc::new(HttpJoinHandle( @@ -915,8 +902,7 @@ where let listen_properties_clone: HttpListenProperties = listen_properties.clone(); let handle = spawn(async move { loop { - let conn = listener - .accept() + let conn = HTTP::accept_connection_from_listener(&listener) .try_or_cancel(cancel_clone.clone()) .await?; serve_http_on::( @@ -945,17 +931,15 @@ where #[op(v8)] pub fn op_http_serve_on( state: Rc>, - conn: ResourceId, + connection_rid: ResourceId, ) -> Result<(ResourceId, &'static str, String), AnyError> where HTTP: HttpPropertyExtractor, { - let network_stream: NetworkStream = - HTTP::get_network_stream_for_rid(&mut state.borrow_mut(), conn)?; + let connection = + HTTP::get_connection_for_rid(&mut state.borrow_mut(), connection_rid)?; - let local_address = network_stream.local_address()?; - let listen_properties = - HTTP::listen_properties(network_stream.stream(), &local_address); + let listen_properties = HTTP::listen_properties_from_connection(&connection)?; let (tx, rx) = tokio::sync::mpsc::channel(10); let resource: Rc = Rc::new(HttpJoinHandle( @@ -966,7 +950,7 @@ where let handle: JoinHandle> = serve_http_on::( - network_stream, + connection, &listen_properties, resource.cancel_handle(), tx, diff --git a/ext/http/request_properties.rs b/ext/http/request_properties.rs index 9c0c0e8152..905139673e 100644 --- a/ext/http/request_properties.rs +++ b/ext/http/request_properties.rs @@ -1,10 +1,10 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. use deno_core::error::AnyError; use deno_core::OpState; use deno_core::ResourceId; -use deno_net::raw::NetworkStream; -// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. use deno_net::raw::take_network_stream_listener_resource; use deno_net::raw::take_network_stream_resource; +use deno_net::raw::NetworkStream; use deno_net::raw::NetworkStreamAddress; use deno_net::raw::NetworkStreamListener; use deno_net::raw::NetworkStreamType; @@ -12,23 +12,26 @@ use hyper::HeaderMap; use hyper::Uri; use hyper1::header::HOST; use std::borrow::Cow; +use std::net::Ipv4Addr; +use std::net::SocketAddr; +use std::net::SocketAddrV4; use std::rc::Rc; // TODO(mmastrac): I don't like that we have to clone this, but it's one-time setup #[derive(Clone)] pub struct HttpListenProperties { - pub stream_type: NetworkStreamType, pub scheme: &'static str, pub fallback_host: String, pub local_port: Option, + pub stream_type: NetworkStreamType, } #[derive(Clone)] pub struct HttpConnectionProperties { - pub stream_type: NetworkStreamType, pub peer_address: Rc, pub peer_port: Option, pub local_port: Option, + pub stream_type: NetworkStreamType, } pub struct HttpRequestProperties { @@ -37,31 +40,49 @@ pub struct HttpRequestProperties { /// Pluggable trait to determine listen, connection and request properties /// for embedders that wish to provide alternative routes for incoming HTTP. +#[async_trait::async_trait(?Send)] pub trait HttpPropertyExtractor { - /// Given a listener [`ResourceId`], returns the [`NetworkStreamListener`]. - fn get_network_stream_listener_for_rid( + type Listener: 'static; + type Connection; + + /// Given a listener [`ResourceId`], returns the [`HttpPropertyExtractor::Listener`]. + fn get_listener_for_rid( state: &mut OpState, listener_rid: ResourceId, - ) -> Result; + ) -> Result; - /// Given a connection [`ResourceId`], returns the [`NetworkStream`]. - fn get_network_stream_for_rid( + /// Given a connection [`ResourceId`], returns the [`HttpPropertyExtractor::Connection`]. + fn get_connection_for_rid( state: &mut OpState, - rid: ResourceId, - ) -> Result; + connection_rid: ResourceId, + ) -> Result; /// Determines the listener properties. - fn listen_properties( - stream_type: NetworkStreamType, - local_address: &NetworkStreamAddress, - ) -> HttpListenProperties; + fn listen_properties_from_listener( + listener: &Self::Listener, + ) -> Result; + + /// Determines the listener properties given a [`HttpPropertyExtractor::Connection`]. + fn listen_properties_from_connection( + connection: &Self::Connection, + ) -> Result; + + /// Accept a new [`HttpPropertyExtractor::Connection`] from the given listener [`HttpPropertyExtractor::Listener`]. + async fn accept_connection_from_listener( + listener: &Self::Listener, + ) -> Result; /// Determines the connection properties. fn connection_properties( listen_properties: &HttpListenProperties, - peer_address: &NetworkStreamAddress, + connection: &Self::Connection, ) -> HttpConnectionProperties; + /// Turn a given [`HttpPropertyExtractor::Connection`] into a [`NetworkStream`]. + fn to_network_stream_from_connection( + connection: Self::Connection, + ) -> NetworkStream; + /// Determines the request properties. fn request_properties( connection_properties: &HttpConnectionProperties, @@ -72,15 +93,13 @@ pub trait HttpPropertyExtractor { pub struct DefaultHttpPropertyExtractor {} +#[async_trait::async_trait(?Send)] impl HttpPropertyExtractor for DefaultHttpPropertyExtractor { - fn get_network_stream_for_rid( - state: &mut OpState, - rid: ResourceId, - ) -> Result { - take_network_stream_resource(&mut state.resource_table, rid) - } + type Listener = NetworkStreamListener; - fn get_network_stream_listener_for_rid( + type Connection = NetworkStream; + + fn get_listener_for_rid( state: &mut OpState, listener_rid: ResourceId, ) -> Result { @@ -90,30 +109,52 @@ impl HttpPropertyExtractor for DefaultHttpPropertyExtractor { ) } - fn listen_properties( - stream_type: NetworkStreamType, - local_address: &NetworkStreamAddress, - ) -> HttpListenProperties { - let scheme = req_scheme_from_stream_type(stream_type); - let fallback_host = req_host_from_addr(stream_type, local_address); - let local_port: Option = match local_address { - NetworkStreamAddress::Ip(ip) => Some(ip.port()), - #[cfg(unix)] - NetworkStreamAddress::Unix(_) => None, - }; + fn get_connection_for_rid( + state: &mut OpState, + stream_rid: ResourceId, + ) -> Result { + take_network_stream_resource(&mut state.resource_table, stream_rid) + } - HttpListenProperties { - scheme, - fallback_host, - local_port, - stream_type, - } + async fn accept_connection_from_listener( + listener: &NetworkStreamListener, + ) -> Result { + listener.accept().await.map_err(Into::into) + } + + fn listen_properties_from_listener( + listener: &NetworkStreamListener, + ) -> Result { + let stream_type = listener.stream(); + let local_address = listener.listen_address()?; + listener_properties(stream_type, local_address) + } + + fn listen_properties_from_connection( + connection: &Self::Connection, + ) -> Result { + let stream_type = connection.stream(); + let local_address = connection.local_address()?; + listener_properties(stream_type, local_address) + } + + fn to_network_stream_from_connection( + connection: Self::Connection, + ) -> NetworkStream { + connection } fn connection_properties( listen_properties: &HttpListenProperties, - peer_address: &NetworkStreamAddress, + connection: &NetworkStream, ) -> HttpConnectionProperties { + // We always want some sort of peer address. If we can't get one, just make up one. + let peer_address = connection.peer_address().unwrap_or_else(|_| { + NetworkStreamAddress::Ip(SocketAddr::V4(SocketAddrV4::new( + Ipv4Addr::new(0, 0, 0, 0), + 0, + ))) + }); let peer_port: Option = match peer_address { NetworkStreamAddress::Ip(ip) => Some(ip.port()), #[cfg(unix)] @@ -128,10 +169,10 @@ impl HttpPropertyExtractor for DefaultHttpPropertyExtractor { let stream_type = listen_properties.stream_type; HttpConnectionProperties { - stream_type, peer_address, peer_port, local_port, + stream_type, } } @@ -152,6 +193,25 @@ impl HttpPropertyExtractor for DefaultHttpPropertyExtractor { } } +fn listener_properties( + stream_type: NetworkStreamType, + local_address: NetworkStreamAddress, +) -> Result { + let scheme = req_scheme_from_stream_type(stream_type); + let fallback_host = req_host_from_addr(stream_type, &local_address); + let local_port: Option = match local_address { + NetworkStreamAddress::Ip(ip) => Some(ip.port()), + #[cfg(unix)] + NetworkStreamAddress::Unix(_) => None, + }; + Ok(HttpListenProperties { + scheme, + fallback_host, + local_port, + stream_type, + }) +} + /// Compute the fallback address from the [`NetworkStreamListenAddress`]. If the request has no authority/host in /// its URI, and there is no [`HeaderName::HOST`] header, we fall back to this. fn req_host_from_addr( diff --git a/ext/net/raw.rs b/ext/net/raw.rs index 3b50af41e0..3f230a08ba 100644 --- a/ext/net/raw.rs +++ b/ext/net/raw.rs @@ -179,7 +179,7 @@ pub enum NetworkStreamAddress { impl NetworkStreamListener { /// Accepts a connection on this listener. - pub async fn accept(&self) -> Result { + pub async fn accept(&self) -> Result { Ok(match self { Self::Tcp(tcp) => { let (stream, _addr) = tcp.accept().await?; From 3356173d00486ffda99f3907de97489ac79c70dd Mon Sep 17 00:00:00 2001 From: Levente Kurusa Date: Mon, 15 May 2023 19:41:53 +0200 Subject: [PATCH 174/320] feat(node/crypto): Diffie Hellman Support (#18943) Support crypto.DiffieHellman class in ext/node/crypto --- cli/tests/node_compat/config.jsonc | 2 + .../test/parallel/test-crypto-dh.js | 214 ++++++++++++++++++ ext/node/lib.rs | 2 + ext/node/ops/crypto/dh.rs | 2 +- ext/node/ops/crypto/mod.rs | 25 ++ .../internal/crypto/diffiehellman.ts | 151 ++++++++++-- tools/node_compat/TODO.md | 3 +- 7 files changed, 372 insertions(+), 27 deletions(-) create mode 100644 cli/tests/node_compat/test/parallel/test-crypto-dh.js diff --git a/cli/tests/node_compat/config.jsonc b/cli/tests/node_compat/config.jsonc index 2146daf926..8631efcad1 100644 --- a/cli/tests/node_compat/config.jsonc +++ b/cli/tests/node_compat/config.jsonc @@ -45,6 +45,7 @@ "test-child-process-stdout-flush-exit.js", "test-child-process-stdout-flush.js", "test-console-instance.js", + "test-crypto-dh.js", "test-crypto-hkdf.js", "test-crypto-hmac.js", "test-crypto-prime.js", @@ -239,6 +240,7 @@ "test-console-sync-write-error.js", "test-console-table.js", "test-console-tty-colors.js", + "test-crypto-dh.js", "test-crypto-hkdf.js", "test-crypto-hmac.js", "test-crypto-prime.js", diff --git a/cli/tests/node_compat/test/parallel/test-crypto-dh.js b/cli/tests/node_compat/test/parallel/test-crypto-dh.js new file mode 100644 index 0000000000..b436207ac4 --- /dev/null +++ b/cli/tests/node_compat/test/parallel/test-crypto-dh.js @@ -0,0 +1,214 @@ +// deno-fmt-ignore-file +// deno-lint-ignore-file + +// Copyright Joyent and Node contributors. All rights reserved. MIT license. +// Taken from Node 18.12.1 +// This file is automatically generated by "node/_tools/setup.ts". Do not modify this file manually + +'use strict'; +const common = require('../common'); +if (!common.hasCrypto) + common.skip('missing crypto'); + +const assert = require('assert'); +const crypto = require('crypto'); + +const size = common.hasFipsCrypto || common.hasOpenSSL3 ? 1024 : 256; +const dh1 = crypto.createDiffieHellman(size); +const p1 = dh1.getPrime('buffer'); +const dh2 = crypto.createDiffieHellman(p1, 'buffer'); +const key1 = dh1.generateKeys(); +const key2 = dh2.generateKeys('hex'); +const secret1 = dh1.computeSecret(key2, 'hex', 'base64'); +const secret2 = dh2.computeSecret(key1, 'latin1', 'buffer'); + +// Test Diffie-Hellman with two parties sharing a secret, +// using various encodings as we go along +assert.strictEqual(secret2.toString('base64'), secret1); +assert.strictEqual(dh1.verifyError, 0); +assert.strictEqual(dh2.verifyError, 0); + +// https://github.com/nodejs/node/issues/32738 +// XXX(bnoordhuis) validateInt32() throwing ERR_OUT_OF_RANGE and RangeError +// instead of ERR_INVALID_ARG_TYPE and TypeError is questionable, IMO. +assert.throws(() => crypto.createDiffieHellman(13.37), { + code: 'ERR_OUT_OF_RANGE', + name: 'RangeError', + message: 'The value of "sizeOrKey" is out of range. ' + + 'It must be an integer. Received 13.37', +}); + +assert.throws(() => crypto.createDiffieHellman('abcdef', 13.37), { + code: 'ERR_OUT_OF_RANGE', + name: 'RangeError', + message: 'The value of "generator" is out of range. ' + + 'It must be an integer. Received 13.37', +}); + +for (const bits of [-1, 0, 1]) { + if (common.hasOpenSSL3) { + assert.throws(() => crypto.createDiffieHellman(bits), { + code: 'ERR_OSSL_DH_MODULUS_TOO_SMALL', + name: 'Error', + message: /modulus too small/, + }); + } else { + assert.throws(() => crypto.createDiffieHellman(bits), { + code: 'ERR_OSSL_BN_BITS_TOO_SMALL', + name: 'Error', + message: /bits too small/, + }); + } +} + +// Through a fluke of history, g=0 defaults to DH_GENERATOR (2). +{ + const g = 0; + crypto.createDiffieHellman('abcdef', g); + crypto.createDiffieHellman('abcdef', 'hex', g); +} + +for (const g of [-1, 1]) { + const ex = { + code: 'ERR_OSSL_DH_BAD_GENERATOR', + name: 'Error', + message: /bad generator/, + }; + assert.throws(() => crypto.createDiffieHellman('abcdef', g), ex); + assert.throws(() => crypto.createDiffieHellman('abcdef', 'hex', g), ex); +} + +crypto.createDiffieHellman('abcdef', Buffer.from([2])); // OK + +for (const g of [Buffer.from([]), + Buffer.from([0]), + Buffer.from([1])]) { + const ex = { + code: 'ERR_OSSL_DH_BAD_GENERATOR', + name: 'Error', + message: /bad generator/, + }; + assert.throws(() => crypto.createDiffieHellman('abcdef', g), ex); + assert.throws(() => crypto.createDiffieHellman('abcdef', 'hex', g), ex); +} + +[ + [0x1, 0x2], + () => { }, + /abc/, + {}, +].forEach((input) => { + assert.throws( + () => crypto.createDiffieHellman(input), + { + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError', + } + ); +}); + +// Create "another dh1" using generated keys from dh1, +// and compute secret again +const dh3 = crypto.createDiffieHellman(p1, 'buffer'); +const privkey1 = dh1.getPrivateKey(); +dh3.setPublicKey(key1); +dh3.setPrivateKey(privkey1); + +assert.deepStrictEqual(dh1.getPrime(), dh3.getPrime()); +assert.deepStrictEqual(dh1.getGenerator(), dh3.getGenerator()); +assert.deepStrictEqual(dh1.getPublicKey(), dh3.getPublicKey()); +assert.deepStrictEqual(dh1.getPrivateKey(), dh3.getPrivateKey()); +assert.strictEqual(dh3.verifyError, 0); + +const secret3 = dh3.computeSecret(key2, 'hex', 'base64'); + +assert.strictEqual(secret1, secret3); + +// computeSecret works without a public key set at all. +const dh4 = crypto.createDiffieHellman(p1, 'buffer'); +dh4.setPrivateKey(privkey1); + +assert.deepStrictEqual(dh1.getPrime(), dh4.getPrime()); +assert.deepStrictEqual(dh1.getGenerator(), dh4.getGenerator()); +assert.deepStrictEqual(dh1.getPrivateKey(), dh4.getPrivateKey()); +assert.strictEqual(dh4.verifyError, 0); + +const secret4 = dh4.computeSecret(key2, 'hex', 'base64'); + +assert.strictEqual(secret1, secret4); + + +if (false) { + let wrongBlockLength; + if (common.hasOpenSSL3) { + wrongBlockLength = { + message: 'error:1C80006B:Provider routines::wrong final block length', + code: 'ERR_OSSL_WRONG_FINAL_BLOCK_LENGTH', + library: 'Provider routines', + reason: 'wrong final block length' + }; + } else { + wrongBlockLength = { + message: 'error:0606506D:digital envelope' + + ' routines:EVP_DecryptFinal_ex:wrong final block length', + code: 'ERR_OSSL_EVP_WRONG_FINAL_BLOCK_LENGTH', + library: 'digital envelope routines', + reason: 'wrong final block length' + }; + } + + // Run this one twice to make sure that the dh3 clears its error properly + { + const c = crypto.createDecipheriv('aes-128-ecb', crypto.randomBytes(16), ''); + assert.throws(() => { + c.final('utf8'); + }, wrongBlockLength); + } + + { + const c = crypto.createDecipheriv('aes-128-ecb', crypto.randomBytes(16), ''); + assert.throws(() => { + c.final('utf8'); + }, wrongBlockLength); + } + + assert.throws(() => { + dh3.computeSecret(''); + }, { message: common.hasOpenSSL3 ? + 'error:02800080:Diffie-Hellman routines::invalid secret' : + 'Supplied key is too small' }); + + // Invalid test: curve argument is undefined + assert.throws( + () => crypto.createECDH(), + { + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError', + message: 'The "curve" argument must be of type string. ' + + 'Received undefined' + }); + + assert.throws( + function() { + crypto.getDiffieHellman('unknown-group'); + }, + { + name: 'Error', + code: 'ERR_CRYPTO_UNKNOWN_DH_GROUP', + message: 'Unknown DH group' + }, + 'crypto.getDiffieHellman(\'unknown-group\') ' + + 'failed to throw the expected error.' + ); +} + +assert.throws( + () => crypto.createDiffieHellman('', true), + { + code: 'ERR_INVALID_ARG_TYPE' + } +); +[true, Symbol(), {}, () => {}, []].forEach((generator) => assert.throws( + () => crypto.createDiffieHellman('', 'base64', generator), + { code: 'ERR_INVALID_ARG_TYPE' } +)); diff --git a/ext/node/lib.rs b/ext/node/lib.rs index e01954109a..aed325c93d 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -170,6 +170,8 @@ deno_core::extension!(deno_node, ops::crypto::op_node_dh_generate_group, ops::crypto::op_node_dh_generate_group_async, ops::crypto::op_node_dh_generate, + ops::crypto::op_node_dh_generate2, + ops::crypto::op_node_dh_compute_secret, ops::crypto::op_node_dh_generate_async, ops::crypto::op_node_verify, ops::crypto::op_node_random_int, diff --git a/ext/node/ops/crypto/dh.rs b/ext/node/ops/crypto/dh.rs index 4da9a01bf8..8b756d9a2d 100644 --- a/ext/node/ops/crypto/dh.rs +++ b/ext/node/ops/crypto/dh.rs @@ -63,7 +63,7 @@ impl DiffieHellman { } pub fn new(prime: Prime, generator: usize) -> Self { - let private_key = PrivateKey::new(32); + let private_key = PrivateKey::new(prime.bits()); let generator = BigUint::from_usize(generator).unwrap(); let public_key = private_key.compute_public_key(&generator, &prime); diff --git a/ext/node/ops/crypto/mod.rs b/ext/node/ops/crypto/mod.rs index 0f8feb2a92..05f2d34f7e 100644 --- a/ext/node/ops/crypto/mod.rs +++ b/ext/node/ops/crypto/mod.rs @@ -11,6 +11,7 @@ use deno_core::StringOrBuffer; use deno_core::ZeroCopyBuf; use hkdf::Hkdf; use num_bigint::BigInt; +use num_bigint_dig::BigUint; use num_traits::FromPrimitive; use rand::distributions::Distribution; use rand::distributions::Uniform; @@ -788,6 +789,30 @@ pub fn op_node_dh_generate( dh_generate(prime, prime_len, generator) } +// TODO(lev): This duplication should be avoided. +#[op] +pub fn op_node_dh_generate2( + prime: ZeroCopyBuf, + prime_len: usize, + generator: usize, +) -> Result<(ZeroCopyBuf, ZeroCopyBuf), AnyError> { + dh_generate(Some(prime).as_deref(), prime_len, generator) +} + +#[op] +pub fn op_node_dh_compute_secret( + prime: ZeroCopyBuf, + private_key: ZeroCopyBuf, + their_public_key: ZeroCopyBuf, +) -> Result { + let pubkey: BigUint = BigUint::from_bytes_be(their_public_key.as_ref()); + let privkey: BigUint = BigUint::from_bytes_be(private_key.as_ref()); + let primei: BigUint = BigUint::from_bytes_be(prime.as_ref()); + let shared_secret: BigUint = pubkey.modpow(&privkey, &primei); + + Ok(shared_secret.to_bytes_be().into()) +} + #[op] pub async fn op_node_dh_generate_async( prime: Option, diff --git a/ext/node/polyfills/internal/crypto/diffiehellman.ts b/ext/node/polyfills/internal/crypto/diffiehellman.ts index 62a802126f..2531c07c7a 100644 --- a/ext/node/polyfills/internal/crypto/diffiehellman.ts +++ b/ext/node/polyfills/internal/crypto/diffiehellman.ts @@ -6,7 +6,10 @@ import { isAnyArrayBuffer, isArrayBufferView, } from "ext:deno_node/internal/util/types.ts"; -import { ERR_INVALID_ARG_TYPE } from "ext:deno_node/internal/errors.ts"; +import { + ERR_INVALID_ARG_TYPE, + NodeError, +} from "ext:deno_node/internal/errors.ts"; import { validateInt32, validateString, @@ -32,9 +35,14 @@ const DH_GENERATOR = 2; export class DiffieHellman { verifyError!: number; + #prime: Buffer; + #primeLength: number; + #generator: Buffer; + #privateKey: Buffer; + #publicKey: Buffer; constructor( - sizeOrKey: unknown, + sizeOrKey: number | string | ArrayBufferView, keyEncoding?: unknown, generator?: unknown, genEncoding?: unknown, @@ -71,24 +79,68 @@ export class DiffieHellman { genEncoding = genEncoding || encoding; if (typeof sizeOrKey !== "number") { - sizeOrKey = toBuf(sizeOrKey as string, keyEncoding as string); + this.#prime = toBuf(sizeOrKey as string, keyEncoding as string); + } else { + // The supplied parameter is our primeLength, generate a suitable prime. + this.#primeLength = sizeOrKey as number; + if (this.#primeLength < 2) { + throw new NodeError("ERR_OSSL_BN_BITS_TOO_SMALL", "bits too small"); + } + + this.#prime = Buffer.from( + ops.op_node_gen_prime(this.#primeLength).buffer, + ); } if (!generator) { - generator = DH_GENERATOR; + // While the commonly used cyclic group generators for DH are 2 and 5, we + // need this a buffer, because, well.. Node. + this.#generator = Buffer.alloc(4); + this.#generator.writeUint32BE(DH_GENERATOR); } else if (typeof generator === "number") { validateInt32(generator, "generator"); + this.#generator = Buffer.alloc(4); + if (generator <= 0 || generator >= 0x7fffffff) { + throw new NodeError("ERR_OSSL_DH_BAD_GENERATOR", "bad generator"); + } + this.#generator.writeUint32BE(generator); } else if (typeof generator === "string") { generator = toBuf(generator, genEncoding as string); + this.#generator = generator; } else if (!isArrayBufferView(generator) && !isAnyArrayBuffer(generator)) { throw new ERR_INVALID_ARG_TYPE( "generator", ["number", "string", "ArrayBuffer", "Buffer", "TypedArray", "DataView"], generator, ); + } else { + this.#generator = Buffer.from(generator); } - notImplemented("crypto.DiffieHellman"); + this.#checkGenerator(); + + // TODO(lev): actually implement this value + this.verifyError = 0; + } + + #checkGenerator(): number { + let generator: number; + + if (this.#generator.length == 0) { + throw new NodeError("ERR_OSSL_DH_BAD_GENERATOR", "bad generator"); + } else if (this.#generator.length == 1) { + generator = this.#generator.readUint8(); + } else if (this.#generator.length == 2) { + generator = this.#generator.readUint16BE(); + } else { + generator = this.#generator.readUint32BE(); + } + + if (generator != 2 && generator != 5) { + throw new NodeError("ERR_OSSL_DH_BAD_GENERATOR", "bad generator"); + } + + return generator; } computeSecret(otherPublicKey: ArrayBufferView): Buffer; @@ -106,59 +158,110 @@ export class DiffieHellman { outputEncoding: BinaryToTextEncoding, ): string; computeSecret( - _otherPublicKey: ArrayBufferView | string, - _inputEncoding?: BinaryToTextEncoding, - _outputEncoding?: BinaryToTextEncoding, + otherPublicKey: ArrayBufferView | string, + inputEncoding?: BinaryToTextEncoding, + outputEncoding?: BinaryToTextEncoding, ): Buffer | string { - notImplemented("crypto.DiffieHellman.prototype.computeSecret"); + let buf; + if (inputEncoding != undefined && inputEncoding != "buffer") { + buf = Buffer.from(otherPublicKey.buffer, inputEncoding); + } else { + buf = Buffer.from(otherPublicKey.buffer); + } + + const sharedSecret = ops.op_node_dh_compute_secret( + this.#prime, + this.#privateKey, + buf, + ); + + if (outputEncoding == undefined || outputEncoding == "buffer") { + return Buffer.from(sharedSecret.buffer); + } + + return Buffer.from(sharedSecret.buffer).toString(outputEncoding); } generateKeys(): Buffer; generateKeys(encoding: BinaryToTextEncoding): string; generateKeys(_encoding?: BinaryToTextEncoding): Buffer | string { - notImplemented("crypto.DiffieHellman.prototype.generateKeys"); + const generator = this.#checkGenerator(); + const [privateKey, publicKey] = ops.op_node_dh_generate2( + this.#prime, + this.#primeLength, + generator, + ); + + this.#privateKey = Buffer.from(privateKey.buffer); + this.#publicKey = Buffer.from(publicKey.buffer); + + return this.#publicKey; } getGenerator(): Buffer; getGenerator(encoding: BinaryToTextEncoding): string; - getGenerator(_encoding?: BinaryToTextEncoding): Buffer | string { - notImplemented("crypto.DiffieHellman.prototype.getGenerator"); + getGenerator(encoding?: BinaryToTextEncoding): Buffer | string { + if (encoding !== undefined && encoding != "buffer") { + return this.#generator.toString(encoding); + } + + return this.#generator; } getPrime(): Buffer; getPrime(encoding: BinaryToTextEncoding): string; - getPrime(_encoding?: BinaryToTextEncoding): Buffer | string { - notImplemented("crypto.DiffieHellman.prototype.getPrime"); + getPrime(encoding?: BinaryToTextEncoding): Buffer | string { + if (encoding !== undefined && encoding != "buffer") { + return this.#prime.toString(encoding); + } + + return this.#prime; } getPrivateKey(): Buffer; getPrivateKey(encoding: BinaryToTextEncoding): string; - getPrivateKey(_encoding?: BinaryToTextEncoding): Buffer | string { - notImplemented("crypto.DiffieHellman.prototype.getPrivateKey"); + getPrivateKey(encoding?: BinaryToTextEncoding): Buffer | string { + if (encoding !== undefined && encoding != "buffer") { + return this.#privateKey.toString(encoding); + } + + return this.#privateKey; } getPublicKey(): Buffer; getPublicKey(encoding: BinaryToTextEncoding): string; - getPublicKey(_encoding?: BinaryToTextEncoding): Buffer | string { - notImplemented("crypto.DiffieHellman.prototype.getPublicKey"); + getPublicKey(encoding?: BinaryToTextEncoding): Buffer | string { + if (encoding !== undefined && encoding != "buffer") { + return this.#publicKey.toString(encoding); + } + + return this.#publicKey; } setPrivateKey(privateKey: ArrayBufferView): void; setPrivateKey(privateKey: string, encoding: BufferEncoding): void; setPrivateKey( - _privateKey: ArrayBufferView | string, - _encoding?: BufferEncoding, + privateKey: ArrayBufferView | string, + encoding?: BufferEncoding, ) { - notImplemented("crypto.DiffieHellman.prototype.setPrivateKey"); + if (encoding == undefined || encoding == "buffer") { + this.#privateKey = Buffer.from(privateKey); + } else { + this.#privateKey = Buffer.from(privateKey, encoding); + } } setPublicKey(publicKey: ArrayBufferView): void; setPublicKey(publicKey: string, encoding: BufferEncoding): void; setPublicKey( - _publicKey: ArrayBufferView | string, - _encoding?: BufferEncoding, + publicKey: ArrayBufferView | string, + encoding?: BufferEncoding, ) { - notImplemented("crypto.DiffieHellman.prototype.setPublicKey"); + if (encoding == undefined || encoding == "buffer") { + this.#publicKey = Buffer.from(publicKey); + } else { + this.#publicKey = Buffer.from(publicKey, encoding); + } } } diff --git a/tools/node_compat/TODO.md b/tools/node_compat/TODO.md index 2ea6983f91..a94dbc090a 100644 --- a/tools/node_compat/TODO.md +++ b/tools/node_compat/TODO.md @@ -3,7 +3,7 @@ NOTE: This file should not be manually edited. Please edit 'cli/tests/node_compat/config.json' and run 'tools/node_compat/setup.ts' instead. -Total: 2935 +Total: 2934 - [abort/test-abort-backtrace.js](https://github.com/nodejs/node/tree/v18.12.1/test/abort/test-abort-backtrace.js) - [abort/test-abort-fatal-error.js](https://github.com/nodejs/node/tree/v18.12.1/test/abort/test-abort-fatal-error.js) @@ -476,7 +476,6 @@ Total: 2935 - [parallel/test-crypto-dh-padding.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-dh-padding.js) - [parallel/test-crypto-dh-shared.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-dh-shared.js) - [parallel/test-crypto-dh-stateless.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-dh-stateless.js) -- [parallel/test-crypto-dh.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-dh.js) - [parallel/test-crypto-domain.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-domain.js) - [parallel/test-crypto-domains.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-domains.js) - [parallel/test-crypto-ecb.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-ecb.js) From 1171c549526ba9eaa070f7d02431748f12fddf9d Mon Sep 17 00:00:00 2001 From: Levente Kurusa Date: Tue, 16 May 2023 00:07:58 +0200 Subject: [PATCH 175/320] feat(node/crypto): Builtin Diffie-Hellman Groups (#19137) Towards #18455 --- cli/tests/node_compat/config.jsonc | 1 + .../test/parallel/test-crypto-dh-shared.js | 22 + .../test/parallel/test-crypto-dh.js | 26 +- .../internal/crypto/diffiehellman.ts | 870 +++++++++++++++++- ext/node/polyfills/internal/errors.ts | 9 + tools/node_compat/TODO.md | 3 +- 6 files changed, 900 insertions(+), 31 deletions(-) create mode 100644 cli/tests/node_compat/test/parallel/test-crypto-dh-shared.js diff --git a/cli/tests/node_compat/config.jsonc b/cli/tests/node_compat/config.jsonc index 8631efcad1..36b22a6724 100644 --- a/cli/tests/node_compat/config.jsonc +++ b/cli/tests/node_compat/config.jsonc @@ -240,6 +240,7 @@ "test-console-sync-write-error.js", "test-console-table.js", "test-console-tty-colors.js", + "test-crypto-dh-shared.js", "test-crypto-dh.js", "test-crypto-hkdf.js", "test-crypto-hmac.js", diff --git a/cli/tests/node_compat/test/parallel/test-crypto-dh-shared.js b/cli/tests/node_compat/test/parallel/test-crypto-dh-shared.js new file mode 100644 index 0000000000..5ab6fe4656 --- /dev/null +++ b/cli/tests/node_compat/test/parallel/test-crypto-dh-shared.js @@ -0,0 +1,22 @@ +// deno-fmt-ignore-file +// deno-lint-ignore-file + +// Copyright Joyent and Node contributors. All rights reserved. MIT license. +// Taken from Node 18.12.1 +// This file is automatically generated by "node/_tools/setup.ts". Do not modify this file manually + +'use strict'; +const common = require('../common'); +if (!common.hasCrypto) + common.skip('missing crypto'); + +const assert = require('assert'); +const crypto = require('crypto'); + +const alice = crypto.createDiffieHellmanGroup('modp5'); +const bob = crypto.createDiffieHellmanGroup('modp5'); +alice.generateKeys(); +bob.generateKeys(); +const aSecret = alice.computeSecret(bob.getPublicKey()).toString('hex'); +const bSecret = bob.computeSecret(alice.getPublicKey()).toString('hex'); +assert.strictEqual(aSecret, bSecret); diff --git a/cli/tests/node_compat/test/parallel/test-crypto-dh.js b/cli/tests/node_compat/test/parallel/test-crypto-dh.js index b436207ac4..bcf0c67647 100644 --- a/cli/tests/node_compat/test/parallel/test-crypto-dh.js +++ b/cli/tests/node_compat/test/parallel/test-crypto-dh.js @@ -187,21 +187,21 @@ if (false) { message: 'The "curve" argument must be of type string. ' + 'Received undefined' }); - - assert.throws( - function() { - crypto.getDiffieHellman('unknown-group'); - }, - { - name: 'Error', - code: 'ERR_CRYPTO_UNKNOWN_DH_GROUP', - message: 'Unknown DH group' - }, - 'crypto.getDiffieHellman(\'unknown-group\') ' + - 'failed to throw the expected error.' - ); } +assert.throws( + function() { + crypto.getDiffieHellman('unknown-group'); + }, + { + name: 'Error', + code: 'ERR_CRYPTO_UNKNOWN_DH_GROUP', + message: 'Unknown DH group' + }, + 'crypto.getDiffieHellman(\'unknown-group\') ' + + 'failed to throw the expected error.' +); + assert.throws( () => crypto.createDiffieHellman('', true), { diff --git a/ext/node/polyfills/internal/crypto/diffiehellman.ts b/ext/node/polyfills/internal/crypto/diffiehellman.ts index 2531c07c7a..a5817d59a0 100644 --- a/ext/node/polyfills/internal/crypto/diffiehellman.ts +++ b/ext/node/polyfills/internal/crypto/diffiehellman.ts @@ -7,6 +7,7 @@ import { isArrayBufferView, } from "ext:deno_node/internal/util/types.ts"; import { + ERR_CRYPTO_UNKNOWN_DH_GROUP, ERR_INVALID_ARG_TYPE, NodeError, } from "ext:deno_node/internal/errors.ts"; @@ -265,11 +266,844 @@ export class DiffieHellman { } } +const DH_GROUP_NAMES = [ + "modp5", + "modp14", + "modp15", + "modp16", + "modp17", + "modp18", +]; +const DH_GROUPS = { + "modp5": { + prime: [ + 0xFFFFFFFF, + 0xFFFFFFFF, + 0xC90FDAA2, + 0x2168C234, + 0xC4C6628B, + 0x80DC1CD1, + 0x29024E08, + 0x8A67CC74, + 0x020BBEA6, + 0x3B139B22, + 0x514A0879, + 0x8E3404DD, + 0xEF9519B3, + 0xCD3A431B, + 0x302B0A6D, + 0xF25F1437, + 0x4FE1356D, + 0x6D51C245, + 0xE485B576, + 0x625E7EC6, + 0xF44C42E9, + 0xA637ED6B, + 0x0BFF5CB6, + 0xF406B7ED, + 0xEE386BFB, + 0x5A899FA5, + 0xAE9F2411, + 0x7C4B1FE6, + 0x49286651, + 0xECE45B3D, + 0xC2007CB8, + 0xA163BF05, + 0x98DA4836, + 0x1C55D39A, + 0x69163FA8, + 0xFD24CF5F, + 0x83655D23, + 0xDCA3AD96, + 0x1C62F356, + 0x208552BB, + 0x9ED52907, + 0x7096966D, + 0x670C354E, + 0x4ABC9804, + 0xF1746C08, + 0xCA237327, + 0xFFFFFFFF, + 0xFFFFFFFF, + ], + generator: 2, + }, + "modp14": { + prime: [ + 0xFFFFFFFF, + 0xFFFFFFFF, + 0xC90FDAA2, + 0x2168C234, + 0xC4C6628B, + 0x80DC1CD1, + 0x29024E08, + 0x8A67CC74, + 0x020BBEA6, + 0x3B139B22, + 0x514A0879, + 0x8E3404DD, + 0xEF9519B3, + 0xCD3A431B, + 0x302B0A6D, + 0xF25F1437, + 0x4FE1356D, + 0x6D51C245, + 0xE485B576, + 0x625E7EC6, + 0xF44C42E9, + 0xA637ED6B, + 0x0BFF5CB6, + 0xF406B7ED, + 0xEE386BFB, + 0x5A899FA5, + 0xAE9F2411, + 0x7C4B1FE6, + 0x49286651, + 0xECE45B3D, + 0xC2007CB8, + 0xA163BF05, + 0x98DA4836, + 0x1C55D39A, + 0x69163FA8, + 0xFD24CF5F, + 0x83655D23, + 0xDCA3AD96, + 0x1C62F356, + 0x208552BB, + 0x9ED52907, + 0x7096966D, + 0x670C354E, + 0x4ABC9804, + 0xF1746C08, + 0xCA18217C, + 0x32905E46, + 0x2E36CE3B, + 0xE39E772C, + 0x180E8603, + 0x9B2783A2, + 0xEC07A28F, + 0xB5C55DF0, + 0x6F4C52C9, + 0xDE2BCBF6, + 0x95581718, + 0x3995497C, + 0xEA956AE5, + 0x15D22618, + 0x98FA0510, + 0x15728E5A, + 0x8AACAA68, + 0xFFFFFFFF, + 0xFFFFFFFF, + ], + generator: 2, + }, + "modp15": { + prime: [ + 0xFFFFFFFF, + 0xFFFFFFFF, + 0xC90FDAA2, + 0x2168C234, + 0xC4C6628B, + 0x80DC1CD1, + 0x29024E08, + 0x8A67CC74, + 0x020BBEA6, + 0x3B139B22, + 0x514A0879, + 0x8E3404DD, + 0xEF9519B3, + 0xCD3A431B, + 0x302B0A6D, + 0xF25F1437, + 0x4FE1356D, + 0x6D51C245, + 0xE485B576, + 0x625E7EC6, + 0xF44C42E9, + 0xA637ED6B, + 0x0BFF5CB6, + 0xF406B7ED, + 0xEE386BFB, + 0x5A899FA5, + 0xAE9F2411, + 0x7C4B1FE6, + 0x49286651, + 0xECE45B3D, + 0xC2007CB8, + 0xA163BF05, + 0x98DA4836, + 0x1C55D39A, + 0x69163FA8, + 0xFD24CF5F, + 0x83655D23, + 0xDCA3AD96, + 0x1C62F356, + 0x208552BB, + 0x9ED52907, + 0x7096966D, + 0x670C354E, + 0x4ABC9804, + 0xF1746C08, + 0xCA18217C, + 0x32905E46, + 0x2E36CE3B, + 0xE39E772C, + 0x180E8603, + 0x9B2783A2, + 0xEC07A28F, + 0xB5C55DF0, + 0x6F4C52C9, + 0xDE2BCBF6, + 0x95581718, + 0x3995497C, + 0xEA956AE5, + 0x15D22618, + 0x98FA0510, + 0x15728E5A, + 0x8AAAC42D, + 0xAD33170D, + 0x04507A33, + 0xA85521AB, + 0xDF1CBA64, + 0xECFB8504, + 0x58DBEF0A, + 0x8AEA7157, + 0x5D060C7D, + 0xB3970F85, + 0xA6E1E4C7, + 0xABF5AE8C, + 0xDB0933D7, + 0x1E8C94E0, + 0x4A25619D, + 0xCEE3D226, + 0x1AD2EE6B, + 0xF12FFA06, + 0xD98A0864, + 0xD8760273, + 0x3EC86A64, + 0x521F2B18, + 0x177B200C, + 0xBBE11757, + 0x7A615D6C, + 0x770988C0, + 0xBAD946E2, + 0x08E24FA0, + 0x74E5AB31, + 0x43DB5BFC, + 0xE0FD108E, + 0x4B82D120, + 0xA93AD2CA, + 0xFFFFFFFF, + 0xFFFFFFFF, + ], + generator: 2, + }, + "modp16": { + prime: [ + 0xFFFFFFFF, + 0xFFFFFFFF, + 0xC90FDAA2, + 0x2168C234, + 0xC4C6628B, + 0x80DC1CD1, + 0x29024E08, + 0x8A67CC74, + 0x020BBEA6, + 0x3B139B22, + 0x514A0879, + 0x8E3404DD, + 0xEF9519B3, + 0xCD3A431B, + 0x302B0A6D, + 0xF25F1437, + 0x4FE1356D, + 0x6D51C245, + 0xE485B576, + 0x625E7EC6, + 0xF44C42E9, + 0xA637ED6B, + 0x0BFF5CB6, + 0xF406B7ED, + 0xEE386BFB, + 0x5A899FA5, + 0xAE9F2411, + 0x7C4B1FE6, + 0x49286651, + 0xECE45B3D, + 0xC2007CB8, + 0xA163BF05, + 0x98DA4836, + 0x1C55D39A, + 0x69163FA8, + 0xFD24CF5F, + 0x83655D23, + 0xDCA3AD96, + 0x1C62F356, + 0x208552BB, + 0x9ED52907, + 0x7096966D, + 0x670C354E, + 0x4ABC9804, + 0xF1746C08, + 0xCA18217C, + 0x32905E46, + 0x2E36CE3B, + 0xE39E772C, + 0x180E8603, + 0x9B2783A2, + 0xEC07A28F, + 0xB5C55DF0, + 0x6F4C52C9, + 0xDE2BCBF6, + 0x95581718, + 0x3995497C, + 0xEA956AE5, + 0x15D22618, + 0x98FA0510, + 0x15728E5A, + 0x8AAAC42D, + 0xAD33170D, + 0x04507A33, + 0xA85521AB, + 0xDF1CBA64, + 0xECFB8504, + 0x58DBEF0A, + 0x8AEA7157, + 0x5D060C7D, + 0xB3970F85, + 0xA6E1E4C7, + 0xABF5AE8C, + 0xDB0933D7, + 0x1E8C94E0, + 0x4A25619D, + 0xCEE3D226, + 0x1AD2EE6B, + 0xF12FFA06, + 0xD98A0864, + 0xD8760273, + 0x3EC86A64, + 0x521F2B18, + 0x177B200C, + 0xBBE11757, + 0x7A615D6C, + 0x770988C0, + 0xBAD946E2, + 0x08E24FA0, + 0x74E5AB31, + 0x43DB5BFC, + 0xE0FD108E, + 0x4B82D120, + 0xA9210801, + 0x1A723C12, + 0xA787E6D7, + 0x88719A10, + 0xBDBA5B26, + 0x99C32718, + 0x6AF4E23C, + 0x1A946834, + 0xB6150BDA, + 0x2583E9CA, + 0x2AD44CE8, + 0xDBBBC2DB, + 0x04DE8EF9, + 0x2E8EFC14, + 0x1FBECAA6, + 0x287C5947, + 0x4E6BC05D, + 0x99B2964F, + 0xA090C3A2, + 0x233BA186, + 0x515BE7ED, + 0x1F612970, + 0xCEE2D7AF, + 0xB81BDD76, + 0x2170481C, + 0xD0069127, + 0xD5B05AA9, + 0x93B4EA98, + 0x8D8FDDC1, + 0x86FFB7DC, + 0x90A6C08F, + 0x4DF435C9, + 0x34063199, + 0xFFFFFFFF, + 0xFFFFFFFF, + ], + generator: 2, + }, + "modp17": { + prime: [ + 0xFFFFFFFF, + 0xFFFFFFFF, + 0xC90FDAA2, + 0x2168C234, + 0xC4C6628B, + 0x80DC1CD1, + 0x29024E08, + 0x8A67CC74, + 0x020BBEA6, + 0x3B139B22, + 0x514A0879, + 0x8E3404DD, + 0xEF9519B3, + 0xCD3A431B, + 0x302B0A6D, + 0xF25F1437, + 0x4FE1356D, + 0x6D51C245, + 0xE485B576, + 0x625E7EC6, + 0xF44C42E9, + 0xA637ED6B, + 0x0BFF5CB6, + 0xF406B7ED, + 0xEE386BFB, + 0x5A899FA5, + 0xAE9F2411, + 0x7C4B1FE6, + 0x49286651, + 0xECE45B3D, + 0xC2007CB8, + 0xA163BF05, + 0x98DA4836, + 0x1C55D39A, + 0x69163FA8, + 0xFD24CF5F, + 0x83655D23, + 0xDCA3AD96, + 0x1C62F356, + 0x208552BB, + 0x9ED52907, + 0x7096966D, + 0x670C354E, + 0x4ABC9804, + 0xF1746C08, + 0xCA18217C, + 0x32905E46, + 0x2E36CE3B, + 0xE39E772C, + 0x180E8603, + 0x9B2783A2, + 0xEC07A28F, + 0xB5C55DF0, + 0x6F4C52C9, + 0xDE2BCBF6, + 0x95581718, + 0x3995497C, + 0xEA956AE5, + 0x15D22618, + 0x98FA0510, + 0x15728E5A, + 0x8AAAC42D, + 0xAD33170D, + 0x04507A33, + 0xA85521AB, + 0xDF1CBA64, + 0xECFB8504, + 0x58DBEF0A, + 0x8AEA7157, + 0x5D060C7D, + 0xB3970F85, + 0xA6E1E4C7, + 0xABF5AE8C, + 0xDB0933D7, + 0x1E8C94E0, + 0x4A25619D, + 0xCEE3D226, + 0x1AD2EE6B, + 0xF12FFA06, + 0xD98A0864, + 0xD8760273, + 0x3EC86A64, + 0x521F2B18, + 0x177B200C, + 0xBBE11757, + 0x7A615D6C, + 0x770988C0, + 0xBAD946E2, + 0x08E24FA0, + 0x74E5AB31, + 0x43DB5BFC, + 0xE0FD108E, + 0x4B82D120, + 0xA9210801, + 0x1A723C12, + 0xA787E6D7, + 0x88719A10, + 0xBDBA5B26, + 0x99C32718, + 0x6AF4E23C, + 0x1A946834, + 0xB6150BDA, + 0x2583E9CA, + 0x2AD44CE8, + 0xDBBBC2DB, + 0x04DE8EF9, + 0x2E8EFC14, + 0x1FBECAA6, + 0x287C5947, + 0x4E6BC05D, + 0x99B2964F, + 0xA090C3A2, + 0x233BA186, + 0x515BE7ED, + 0x1F612970, + 0xCEE2D7AF, + 0xB81BDD76, + 0x2170481C, + 0xD0069127, + 0xD5B05AA9, + 0x93B4EA98, + 0x8D8FDDC1, + 0x86FFB7DC, + 0x90A6C08F, + 0x4DF435C9, + 0x34028492, + 0x36C3FAB4, + 0xD27C7026, + 0xC1D4DCB2, + 0x602646DE, + 0xC9751E76, + 0x3DBA37BD, + 0xF8FF9406, + 0xAD9E530E, + 0xE5DB382F, + 0x413001AE, + 0xB06A53ED, + 0x9027D831, + 0x179727B0, + 0x865A8918, + 0xDA3EDBEB, + 0xCF9B14ED, + 0x44CE6CBA, + 0xCED4BB1B, + 0xDB7F1447, + 0xE6CC254B, + 0x33205151, + 0x2BD7AF42, + 0x6FB8F401, + 0x378CD2BF, + 0x5983CA01, + 0xC64B92EC, + 0xF032EA15, + 0xD1721D03, + 0xF482D7CE, + 0x6E74FEF6, + 0xD55E702F, + 0x46980C82, + 0xB5A84031, + 0x900B1C9E, + 0x59E7C97F, + 0xBEC7E8F3, + 0x23A97A7E, + 0x36CC88BE, + 0x0F1D45B7, + 0xFF585AC5, + 0x4BD407B2, + 0x2B4154AA, + 0xCC8F6D7E, + 0xBF48E1D8, + 0x14CC5ED2, + 0x0F8037E0, + 0xA79715EE, + 0xF29BE328, + 0x06A1D58B, + 0xB7C5DA76, + 0xF550AA3D, + 0x8A1FBFF0, + 0xEB19CCB1, + 0xA313D55C, + 0xDA56C9EC, + 0x2EF29632, + 0x387FE8D7, + 0x6E3C0468, + 0x043E8F66, + 0x3F4860EE, + 0x12BF2D5B, + 0x0B7474D6, + 0xE694F91E, + 0x6DCC4024, + 0xFFFFFFFF, + 0xFFFFFFFF, + ], + generator: 2, + }, + "modp18": { + prime: [ + 0xFFFFFFFF, + 0xFFFFFFFF, + 0xC90FDAA2, + 0x2168C234, + 0xC4C6628B, + 0x80DC1CD1, + 0x29024E08, + 0x8A67CC74, + 0x020BBEA6, + 0x3B139B22, + 0x514A0879, + 0x8E3404DD, + 0xEF9519B3, + 0xCD3A431B, + 0x302B0A6D, + 0xF25F1437, + 0x4FE1356D, + 0x6D51C245, + 0xE485B576, + 0x625E7EC6, + 0xF44C42E9, + 0xA637ED6B, + 0x0BFF5CB6, + 0xF406B7ED, + 0xEE386BFB, + 0x5A899FA5, + 0xAE9F2411, + 0x7C4B1FE6, + 0x49286651, + 0xECE45B3D, + 0xC2007CB8, + 0xA163BF05, + 0x98DA4836, + 0x1C55D39A, + 0x69163FA8, + 0xFD24CF5F, + 0x83655D23, + 0xDCA3AD96, + 0x1C62F356, + 0x208552BB, + 0x9ED52907, + 0x7096966D, + 0x670C354E, + 0x4ABC9804, + 0xF1746C08, + 0xCA18217C, + 0x32905E46, + 0x2E36CE3B, + 0xE39E772C, + 0x180E8603, + 0x9B2783A2, + 0xEC07A28F, + 0xB5C55DF0, + 0x6F4C52C9, + 0xDE2BCBF6, + 0x95581718, + 0x3995497C, + 0xEA956AE5, + 0x15D22618, + 0x98FA0510, + 0x15728E5A, + 0x8AAAC42D, + 0xAD33170D, + 0x04507A33, + 0xA85521AB, + 0xDF1CBA64, + 0xECFB8504, + 0x58DBEF0A, + 0x8AEA7157, + 0x5D060C7D, + 0xB3970F85, + 0xA6E1E4C7, + 0xABF5AE8C, + 0xDB0933D7, + 0x1E8C94E0, + 0x4A25619D, + 0xCEE3D226, + 0x1AD2EE6B, + 0xF12FFA06, + 0xD98A0864, + 0xD8760273, + 0x3EC86A64, + 0x521F2B18, + 0x177B200C, + 0xBBE11757, + 0x7A615D6C, + 0x770988C0, + 0xBAD946E2, + 0x08E24FA0, + 0x74E5AB31, + 0x43DB5BFC, + 0xE0FD108E, + 0x4B82D120, + 0xA9210801, + 0x1A723C12, + 0xA787E6D7, + 0x88719A10, + 0xBDBA5B26, + 0x99C32718, + 0x6AF4E23C, + 0x1A946834, + 0xB6150BDA, + 0x2583E9CA, + 0x2AD44CE8, + 0xDBBBC2DB, + 0x04DE8EF9, + 0x2E8EFC14, + 0x1FBECAA6, + 0x287C5947, + 0x4E6BC05D, + 0x99B2964F, + 0xA090C3A2, + 0x233BA186, + 0x515BE7ED, + 0x1F612970, + 0xCEE2D7AF, + 0xB81BDD76, + 0x2170481C, + 0xD0069127, + 0xD5B05AA9, + 0x93B4EA98, + 0x8D8FDDC1, + 0x86FFB7DC, + 0x90A6C08F, + 0x4DF435C9, + 0x34028492, + 0x36C3FAB4, + 0xD27C7026, + 0xC1D4DCB2, + 0x602646DE, + 0xC9751E76, + 0x3DBA37BD, + 0xF8FF9406, + 0xAD9E530E, + 0xE5DB382F, + 0x413001AE, + 0xB06A53ED, + 0x9027D831, + 0x179727B0, + 0x865A8918, + 0xDA3EDBEB, + 0xCF9B14ED, + 0x44CE6CBA, + 0xCED4BB1B, + 0xDB7F1447, + 0xE6CC254B, + 0x33205151, + 0x2BD7AF42, + 0x6FB8F401, + 0x378CD2BF, + 0x5983CA01, + 0xC64B92EC, + 0xF032EA15, + 0xD1721D03, + 0xF482D7CE, + 0x6E74FEF6, + 0xD55E702F, + 0x46980C82, + 0xB5A84031, + 0x900B1C9E, + 0x59E7C97F, + 0xBEC7E8F3, + 0x23A97A7E, + 0x36CC88BE, + 0x0F1D45B7, + 0xFF585AC5, + 0x4BD407B2, + 0x2B4154AA, + 0xCC8F6D7E, + 0xBF48E1D8, + 0x14CC5ED2, + 0x0F8037E0, + 0xA79715EE, + 0xF29BE328, + 0x06A1D58B, + 0xB7C5DA76, + 0xF550AA3D, + 0x8A1FBFF0, + 0xEB19CCB1, + 0xA313D55C, + 0xDA56C9EC, + 0x2EF29632, + 0x387FE8D7, + 0x6E3C0468, + 0x043E8F66, + 0x3F4860EE, + 0x12BF2D5B, + 0x0B7474D6, + 0xE694F91E, + 0x6DBE1159, + 0x74A3926F, + 0x12FEE5E4, + 0x38777CB6, + 0xA932DF8C, + 0xD8BEC4D0, + 0x73B931BA, + 0x3BC832B6, + 0x8D9DD300, + 0x741FA7BF, + 0x8AFC47ED, + 0x2576F693, + 0x6BA42466, + 0x3AAB639C, + 0x5AE4F568, + 0x3423B474, + 0x2BF1C978, + 0x238F16CB, + 0xE39D652D, + 0xE3FDB8BE, + 0xFC848AD9, + 0x22222E04, + 0xA4037C07, + 0x13EB57A8, + 0x1A23F0C7, + 0x3473FC64, + 0x6CEA306B, + 0x4BCBC886, + 0x2F8385DD, + 0xFA9D4B7F, + 0xA2C087E8, + 0x79683303, + 0xED5BDD3A, + 0x062B3CF5, + 0xB3A278A6, + 0x6D2A13F8, + 0x3F44F82D, + 0xDF310EE0, + 0x74AB6A36, + 0x4597E899, + 0xA0255DC1, + 0x64F31CC5, + 0x0846851D, + 0xF9AB4819, + 0x5DED7EA1, + 0xB1D510BD, + 0x7EE74D73, + 0xFAF36BC3, + 0x1ECFA268, + 0x359046F4, + 0xEB879F92, + 0x4009438B, + 0x481C6CD7, + 0x889A002E, + 0xD5EE382B, + 0xC9190DA6, + 0xFC026E47, + 0x9558E447, + 0x5677E9AA, + 0x9E3050E2, + 0x765694DF, + 0xC81F56E8, + 0x80B96E71, + 0x60C980DD, + 0x98EDD3DF, + 0xFFFFFFFF, + 0xFFFFFFFF, + ], + generator: 2, + }, +}; + export class DiffieHellmanGroup { verifyError!: number; + #diffiehellman: DiffieHellman; - constructor(_name: string) { - notImplemented("crypto.DiffieHellmanGroup"); + constructor(name: string) { + if (!DH_GROUP_NAMES.includes(name)) { + throw new ERR_CRYPTO_UNKNOWN_DH_GROUP(); + } + this.#diffiehellman = new DiffieHellman( + Buffer.from(DH_GROUPS[name].prime), + DH_GROUPS[name].generator, + ); + this.verifyError = 0; } computeSecret(otherPublicKey: ArrayBufferView): Buffer; @@ -287,41 +1121,45 @@ export class DiffieHellmanGroup { outputEncoding: BinaryToTextEncoding, ): string; computeSecret( - _otherPublicKey: ArrayBufferView | string, - _inputEncoding?: BinaryToTextEncoding, - _outputEncoding?: BinaryToTextEncoding, + otherPublicKey: ArrayBufferView | string, + inputEncoding?: BinaryToTextEncoding, + outputEncoding?: BinaryToTextEncoding, ): Buffer | string { - notImplemented("crypto.DiffieHellman.prototype.computeSecret"); + return this.#diffiehellman.computeSecret( + otherPublicKey, + inputEncoding, + outputEncoding, + ); } generateKeys(): Buffer; generateKeys(encoding: BinaryToTextEncoding): string; - generateKeys(_encoding?: BinaryToTextEncoding): Buffer | string { - notImplemented("crypto.DiffieHellman.prototype.generateKeys"); + generateKeys(encoding?: BinaryToTextEncoding): Buffer | string { + return this.#diffiehellman.generateKeys(encoding); } getGenerator(): Buffer; getGenerator(encoding: BinaryToTextEncoding): string; - getGenerator(_encoding?: BinaryToTextEncoding): Buffer | string { - notImplemented("crypto.DiffieHellman.prototype.getGenerator"); + getGenerator(encoding?: BinaryToTextEncoding): Buffer | string { + return this.#diffiehellman.getGenerator(encoding); } getPrime(): Buffer; getPrime(encoding: BinaryToTextEncoding): string; - getPrime(_encoding?: BinaryToTextEncoding): Buffer | string { - notImplemented("crypto.DiffieHellman.prototype.getPrime"); + getPrime(encoding?: BinaryToTextEncoding): Buffer | string { + return this.#diffiehellman.getPrime(encoding); } getPrivateKey(): Buffer; getPrivateKey(encoding: BinaryToTextEncoding): string; - getPrivateKey(_encoding?: BinaryToTextEncoding): Buffer | string { - notImplemented("crypto.DiffieHellman.prototype.getPrivateKey"); + getPrivateKey(encoding?: BinaryToTextEncoding): Buffer | string { + return this.#diffiehellman.getPrivateKey(encoding); } getPublicKey(): Buffer; getPublicKey(encoding: BinaryToTextEncoding): string; - getPublicKey(_encoding?: BinaryToTextEncoding): Buffer | string { - notImplemented("crypto.DiffieHellman.prototype.getPublicKey"); + getPublicKey(encoding?: BinaryToTextEncoding): Buffer | string { + return this.#diffiehellman.getPublicKey(encoding); } } diff --git a/ext/node/polyfills/internal/errors.ts b/ext/node/polyfills/internal/errors.ts index 44aba4d161..5e6b9378c6 100644 --- a/ext/node/polyfills/internal/errors.ts +++ b/ext/node/polyfills/internal/errors.ts @@ -836,6 +836,15 @@ export class ERR_CRYPTO_ECDH_INVALID_PUBLIC_KEY extends NodeError { } } +export class ERR_CRYPTO_UNKNOWN_DH_GROUP extends NodeError { + constructor() { + super( + "ERR_CRYPTO_UNKNOWN_DH_GROUP", + "Unknown DH group", + ); + } +} + export class ERR_CRYPTO_ENGINE_UNKNOWN extends NodeError { constructor(x: string) { super("ERR_CRYPTO_ENGINE_UNKNOWN", `Engine "${x}" was not found`); diff --git a/tools/node_compat/TODO.md b/tools/node_compat/TODO.md index a94dbc090a..48a51bbd64 100644 --- a/tools/node_compat/TODO.md +++ b/tools/node_compat/TODO.md @@ -3,7 +3,7 @@ NOTE: This file should not be manually edited. Please edit 'cli/tests/node_compat/config.json' and run 'tools/node_compat/setup.ts' instead. -Total: 2934 +Total: 2933 - [abort/test-abort-backtrace.js](https://github.com/nodejs/node/tree/v18.12.1/test/abort/test-abort-backtrace.js) - [abort/test-abort-fatal-error.js](https://github.com/nodejs/node/tree/v18.12.1/test/abort/test-abort-fatal-error.js) @@ -474,7 +474,6 @@ Total: 2934 - [parallel/test-crypto-dh-modp2.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-dh-modp2.js) - [parallel/test-crypto-dh-odd-key.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-dh-odd-key.js) - [parallel/test-crypto-dh-padding.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-dh-padding.js) -- [parallel/test-crypto-dh-shared.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-dh-shared.js) - [parallel/test-crypto-dh-stateless.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-dh-stateless.js) - [parallel/test-crypto-domain.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-domain.js) - [parallel/test-crypto-domains.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-crypto-domains.js) From 27303ef688ae56008aafab513d84e39096a51e34 Mon Sep 17 00:00:00 2001 From: Luca Casonato Date: Tue, 16 May 2023 01:24:41 +0200 Subject: [PATCH 176/320] refactor(ext/http): simpler ws server in http_next (#19133) Merges `op_http_upgrade_next` and `op_ws_server_create`, significantly simplifying websocket construction in ext/http (next), and removing one JS -> Rust call. Also WS server now doesn't bypass `HttpPropertyExtractor`. --- ext/http/00_serve.js | 15 ++++++--------- ext/http/http_next.rs | 19 +++++-------------- ext/http/lib.rs | 2 +- ext/net/raw.rs | 25 ------------------------- ext/websocket/lib.rs | 18 ------------------ 5 files changed, 12 insertions(+), 67 deletions(-) diff --git a/ext/http/00_serve.js b/ext/http/00_serve.js index 1746b1d47c..69ad885660 100644 --- a/ext/http/00_serve.js +++ b/ext/http/00_serve.js @@ -49,8 +49,6 @@ const { } = primordials; const { - op_http_wait, - op_http_upgrade_next, op_http_get_request_headers, op_http_get_request_method_and_url, op_http_read_request_body, @@ -63,10 +61,9 @@ const { op_http_set_response_header, op_http_set_response_headers, op_http_upgrade_raw, - op_ws_server_create, + op_http_upgrade_websocket_next, + op_http_wait, } = core.generateAsyncOpHandler( - "op_http_wait", - "op_http_upgrade_next", "op_http_get_request_headers", "op_http_get_request_method_and_url", "op_http_read_request_body", @@ -79,7 +76,8 @@ const { "op_http_set_response_header", "op_http_set_response_headers", "op_http_upgrade_raw", - "op_ws_server_create", + "op_http_upgrade_websocket_next", + "op_http_wait", ); const _upgraded = Symbol("_upgraded"); @@ -208,12 +206,11 @@ class InnerRequest { // Start the upgrade in the background. (async () => { try { - // Returns the connection and extra bytes, which we can pass directly to op_ws_server_create - const upgrade = await op_http_upgrade_next( + // Returns the upgraded websocket connection + const wsRid = await op_http_upgrade_websocket_next( slabId, response.headerList, ); - const wsRid = op_ws_server_create(upgrade[0], upgrade[1]); // We have to wait for the go-ahead signal await goAhead; diff --git a/ext/http/http_next.rs b/ext/http/http_next.rs index eaa19a89d0..a986de7f3a 100644 --- a/ext/http/http_next.rs +++ b/ext/http/http_next.rs @@ -29,10 +29,9 @@ use deno_core::OpState; use deno_core::RcRef; use deno_core::Resource; use deno_core::ResourceId; -use deno_core::ZeroCopyBuf; use deno_net::ops_tls::TlsStream; -use deno_net::raw::put_network_stream_resource; use deno_net::raw::NetworkStream; +use deno_websocket::ws_create_server_stream; use fly_accept_encoding::Encoding; use http::header::ACCEPT_ENCODING; use http::header::CACHE_CONTROL; @@ -314,11 +313,11 @@ pub fn op_http_upgrade_raw( } #[op] -pub async fn op_http_upgrade_next( +pub async fn op_http_upgrade_websocket_next( state: Rc>, index: u32, headers: Vec<(ByteString, ByteString)>, -) -> Result<(ResourceId, ZeroCopyBuf), AnyError> { +) -> Result { // Stage 1: set the respnse to 101 Switching Protocols and send it let upgrade = with_http_mut(index, |http| { // Manually perform the upgrade. We're peeking into hyper's underlying machinery here a bit @@ -343,17 +342,9 @@ pub async fn op_http_upgrade_next( // Stage 2: wait for the request to finish upgrading let upgraded = upgrade.await?; - // Stage 3: return the extracted raw network stream + // Stage 3: take the extracted raw network stream and upgrade it to a websocket, then return it let (stream, bytes) = extract_network_stream(upgraded); - - // We're allocating for those extra bytes, but they are probably going to be empty most of the time - Ok(( - put_network_stream_resource( - &mut state.borrow_mut().resource_table, - stream, - )?, - ZeroCopyBuf::from(bytes.to_vec()), - )) + ws_create_server_stream(&mut state.borrow_mut(), stream, bytes) } #[op(fast)] diff --git a/ext/http/lib.rs b/ext/http/lib.rs index 7a1a93f805..1ed1e60b78 100644 --- a/ext/http/lib.rs +++ b/ext/http/lib.rs @@ -116,8 +116,8 @@ deno_core::extension!( http_next::op_http_set_response_header, http_next::op_http_set_response_headers, http_next::op_http_track, + http_next::op_http_upgrade_websocket_next, http_next::op_http_upgrade_raw, - http_next::op_http_upgrade_next, http_next::op_http_wait, ], esm = ["00_serve.js", "01_http.js"], diff --git a/ext/net/raw.rs b/ext/net/raw.rs index 3f230a08ba..0c92c46707 100644 --- a/ext/net/raw.rs +++ b/ext/net/raw.rs @@ -260,31 +260,6 @@ pub fn take_network_stream_resource( Err(bad_resource_id()) } -/// Inserts a raw stream (back?) into the resource table and returns a resource ID. This can then be used to create raw connection -/// objects on the JS side. -pub fn put_network_stream_resource( - resource_table: &mut ResourceTable, - stream: NetworkStream, -) -> Result { - let res = match stream { - NetworkStream::Tcp(conn) => { - let (r, w) = conn.into_split(); - resource_table.add(TcpStreamResource::new((r, w))) - } - NetworkStream::Tls(conn) => { - let (r, w) = conn.into_split(); - resource_table.add(TlsStreamResource::new((r, w))) - } - #[cfg(unix)] - NetworkStream::Unix(conn) => { - let (r, w) = conn.into_split(); - resource_table.add(UnixStreamResource::new((r, w))) - } - }; - - Ok(res) -} - /// In some cases it may be more efficient to extract the resource from the resource table and use it directly (for example, an HTTP server). /// This method will extract a stream from the resource table and return it, unwrapped. pub fn take_network_stream_listener_resource( diff --git a/ext/websocket/lib.rs b/ext/websocket/lib.rs index a002b774ce..ccda33cfc0 100644 --- a/ext/websocket/lib.rs +++ b/ext/websocket/lib.rs @@ -16,7 +16,6 @@ use deno_core::Resource; use deno_core::ResourceId; use deno_core::StringOrBuffer; use deno_core::ZeroCopyBuf; -use deno_net::raw::take_network_stream_resource; use deno_net::raw::NetworkStream; use deno_tls::create_client_config; use deno_tls::RootCertStoreProvider; @@ -367,22 +366,6 @@ pub fn ws_create_server_stream( Ok(rid) } -#[op] -pub fn op_ws_server_create( - state: &mut OpState, - conn: ResourceId, - extra_bytes: &[u8], -) -> Result { - let network_stream = - take_network_stream_resource(&mut state.resource_table, conn)?; - // Copying the extra bytes, but unlikely this will account for much - ws_create_server_stream( - state, - network_stream, - Bytes::from(extra_bytes.to_vec()), - ) -} - #[op] pub async fn op_ws_send_binary( state: Rc>, @@ -534,7 +517,6 @@ deno_core::extension!(deno_websocket, op_ws_send_text, op_ws_send_ping, op_ws_send_pong, - op_ws_server_create, ], esm = [ "01_websocket.js", "02_websocketstream.js" ], options = { From ab9a17eeee01a3b04fac2bf583c812179f410c91 Mon Sep 17 00:00:00 2001 From: Levente Kurusa Date: Tue, 16 May 2023 03:21:59 +0200 Subject: [PATCH 177/320] fix(deno/upgrade): allow --version vX.Y.Z (#19139) Instead of just supporting X.Y.Z, support vX.Y.Z. Otherwise we'll try to download something like vvX.Y.Z --- cli/tools/upgrade.rs | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/cli/tools/upgrade.rs b/cli/tools/upgrade.rs index cbd924755b..48e8b0ade3 100644 --- a/cli/tools/upgrade.rs +++ b/cli/tools/upgrade.rs @@ -294,6 +294,10 @@ pub async fn upgrade( let install_version = match upgrade_flags.version { Some(passed_version) => { let re_hash = lazy_regex::regex!("^[0-9a-f]{40}$"); + let passed_version = passed_version + .strip_prefix('v') + .unwrap_or(&passed_version) + .to_string(); if upgrade_flags.canary && !re_hash.is_match(&passed_version) { bail!("Invalid commit hash passed"); @@ -317,9 +321,9 @@ pub async fn upgrade( { log::info!("Version {} is already installed", crate::version::deno()); return Ok(()); - } else { - passed_version } + + passed_version } None => { let latest_version = if upgrade_flags.canary { @@ -363,7 +367,7 @@ pub async fn upgrade( let download_url = if upgrade_flags.canary { if env!("TARGET") == "aarch64-apple-darwin" { - bail!("Canary builds are not available for M1"); + bail!("Canary builds are not available for M1/M2"); } format!( From 78fbc3f3f695610083d07a55ef2a116e0bf04a10 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Tue, 16 May 2023 05:16:24 +0200 Subject: [PATCH 178/320] fix(npm): add performance.markResourceTiming sham (#19123) This commit shams "performance.markResourceTiming" API by using a noop function. It is done to provide compatibility with "npm:undici" package. We should look into actually implementing this API properly, but I wanted to unblock support for "undici" and "astro" for now. Ref https://github.com/denoland/deno/issues/19065 --- ext/node/analyze.rs | 3 ++- ext/node/polyfills/01_require.js | 2 +- ext/node/polyfills/02_init.js | 1 + ext/node/polyfills/perf_hooks.ts | 3 +++ 4 files changed, 7 insertions(+), 2 deletions(-) diff --git a/ext/node/analyze.rs b/ext/node/analyze.rs index 6d32c68beb..2e5c2d15f5 100644 --- a/ext/node/analyze.rs +++ b/ext/node/analyze.rs @@ -32,6 +32,7 @@ static NODE_GLOBALS: &[&str] = &[ "setImmediate", "setInterval", "setTimeout", + "performance", ]; #[derive(Debug, Clone)] @@ -528,7 +529,7 @@ mod tests { "var clearTimeout = globalThis.clearTimeout;var console = globalThis.console;", "var global = globalThis.global;var process = globalThis.process;", "var setImmediate = globalThis.setImmediate;var setInterval = globalThis.setInterval;", - "var setTimeout = globalThis.setTimeout;\n", + "var setTimeout = globalThis.setTimeout;var performance = globalThis.performance;\n", "export const x = 1;" ), NODE_GLOBAL_THIS_NAME, diff --git a/ext/node/polyfills/01_require.js b/ext/node/polyfills/01_require.js index a8a70c2fca..7b91d12aa0 100644 --- a/ext/node/polyfills/01_require.js +++ b/ext/node/polyfills/01_require.js @@ -907,7 +907,7 @@ Module.prototype.require = function (id) { Module.wrapper = [ // We provide the non-standard APIs in the CommonJS wrapper // to avoid exposing them in global namespace. - "(function (exports, require, module, __filename, __dirname, globalThis) { const { Buffer, clearImmediate, clearInterval, clearTimeout, console, global, process, setImmediate, setInterval, setTimeout} = globalThis; var window = undefined; (function () {", + "(function (exports, require, module, __filename, __dirname, globalThis) { const { Buffer, clearImmediate, clearInterval, clearTimeout, console, global, process, setImmediate, setInterval, setTimeout, performance} = globalThis; var window = undefined; (function () {", "\n}).call(this); })", ]; Module.wrap = function (script) { diff --git a/ext/node/polyfills/02_init.js b/ext/node/polyfills/02_init.js index b8070d50f7..3aef000601 100644 --- a/ext/node/polyfills/02_init.js +++ b/ext/node/polyfills/02_init.js @@ -34,6 +34,7 @@ function initialize( nodeGlobals.setImmediate = nativeModuleExports["timers"].setImmediate; nodeGlobals.setInterval = nativeModuleExports["timers"].setInterval; nodeGlobals.setTimeout = nativeModuleExports["timers"].setTimeout; + nodeGlobals.performance = nativeModuleExports["perf_hooks"].performance; // add a hidden global for the esm code to use in order to reliably // get node's globalThis diff --git a/ext/node/polyfills/perf_hooks.ts b/ext/node/polyfills/perf_hooks.ts index ac74c10f81..30c50d3637 100644 --- a/ext/node/polyfills/perf_hooks.ts +++ b/ext/node/polyfills/perf_hooks.ts @@ -22,6 +22,8 @@ const performance: timerify: any; // deno-lint-ignore no-explicit-any timeOrigin: any; + // deno-lint-ignore no-explicit-any + markResourceTiming: any; } = { clearMarks: (markName: string) => shimPerformance.clearMarks(markName), eventLoopUtilization: () => @@ -50,6 +52,7 @@ const performance: timerify: () => notImplemented("timerify from performance"), // deno-lint-ignore no-explicit-any timeOrigin: (shimPerformance as any).timeOrigin, + markResourceTiming: () => {}, // @ts-ignore waiting on update in `deno`, but currently this is // a circular dependency toJSON: () => shimPerformance.toJSON(), From 1c74b41855b85c9ec2ee1d83ac0f6b04e1461788 Mon Sep 17 00:00:00 2001 From: Lenni Date: Tue, 16 May 2023 14:30:16 +0200 Subject: [PATCH 179/320] docs: fix typos (#19118) --- .github/PULL_REQUEST_TEMPLATE.md | 2 +- cli/tests/integration/compile_tests.rs | 2 +- cli/tools/compile.rs | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 44c670d275..e6e5a41f03 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,5 +1,5 @@ # Remaining Node Tests -NOTE: This file should not be manually edited. Please edit 'cli/tests/node_compat/config.json' and run 'tools/node_compat/setup.ts' instead. +NOTE: This file should not be manually edited. Please edit `cli/tests/node_compat/config.json` and run `deno task setup` in `tools/node_compat` dir instead. Total: 2934 diff --git a/tools/node_compat/setup.ts b/tools/node_compat/setup.ts index 5bd5a5ba56..132acfe116 100755 --- a/tools/node_compat/setup.ts +++ b/tools/node_compat/setup.ts @@ -76,7 +76,7 @@ async function updateToDo() { await file.write(encoder.encode(` # Remaining Node Tests -NOTE: This file should not be manually edited. Please edit 'cli/tests/node_compat/config.json' and run 'tools/node_compat/setup.ts' instead. +NOTE: This file should not be manually edited. Please edit \`cli/tests/node_compat/config.json\` and run \`deno task setup\` in \`tools/node_compat\` dir instead. Total: ${missingTests.length} @@ -142,7 +142,7 @@ async function copyTests() { // Copyright Joyent and Node contributors. All rights reserved. MIT license. // Taken from Node ${NODE_VERSION} -// This file is automatically generated by "node/_tools/setup.ts". Do not modify this file manually +// This file is automatically generated by \`tools/node_compat/setup.ts\`. Do not modify this file manually. `), ); From 98461e2559a823c5042346db7edf811985c4e9a5 Mon Sep 17 00:00:00 2001 From: Aapo Alasuutari Date: Fri, 2 Jun 2023 16:38:36 +0300 Subject: [PATCH 287/320] chore(tools): Add core import-map (#19346) Adds an import map of the core and ext JavaScript files. This was created manually but a script to create one automatically wouldn't be too much of a big thing either. This should make working on especially the Node polyfills much more pleasant, as it gives you feedback on if your imports are correct. Unfortunately the TypeScript declaration files of some of the internal modules clash with the import map and override the data from the actual files with data from the declaration files. Those do not contain all exports nor is their data always up to date. Still, this is much better than not having one. --- tools/core_import_map.json | 244 +++++++++++++++++++++++++++++++++++++ 1 file changed, 244 insertions(+) create mode 100644 tools/core_import_map.json diff --git a/tools/core_import_map.json b/tools/core_import_map.json new file mode 100644 index 0000000000..c4c4b99f2a --- /dev/null +++ b/tools/core_import_map.json @@ -0,0 +1,244 @@ +{ + "imports": { + "ext:deno_broadcast_channel/01_broadcast_channel.js": "../ext/broadcast_channel/01_broadcast_channel.js", + "ext:deno_cache/01_cache.js": "../ext/cache/01_cache.js", + "ext:deno_console/01_console.js": "../ext/console/01_console.js", + "ext:deno_crypto/00_crypto.js": "../ext/crypto/00_crypto.js", + "ext:deno_fetch/20_headers.js": "../ext/fetch/20_headers.js", + "ext:deno_fetch/21_formdata.js": "../ext/fetch/21_formdata.js", + "ext:deno_fetch/22_body.js": "../ext/fetch/22_body.js", + "ext:deno_fetch/22_http_client.js": "../ext/fetch/22_http_client.js", + "ext:deno_fetch/23_request.js": "../ext/fetch/23_request.js", + "ext:deno_fetch/23_response.js": "../ext/fetch/23_response.js", + "ext:deno_fetch/26_fetch.js": "../ext/fetch/26_fetch.js", + "ext:deno_ffi/00_ffi.js": "../ext/ffi/00_ffi.js", + "ext:deno_fs/30_fs.js": "../ext/fs/30_fs.js", + "ext:deno_http/00_serve.js": "../ext/http/00_serve.js", + "ext:deno_http/01_http.js": "../ext/http/01_http.js", + "ext:deno_io/12_io.js": "../ext/io/12_io.js", + "ext:deno_kv/01_db.ts": "../ext/kv/01_db.ts", + "ext:deno_net/01_net.js": "../ext/net/01_net.js", + "ext:deno_net/02_tls.js": "../ext/net/02_tls.js", + "ext:deno_node/_events.d.ts": "../ext/node/polyfills/_events.d.ts", + "ext:deno_node/_fs/_fs_close.ts": "../ext/node/polyfills/_fs/_fs_close.ts", + "ext:deno_node/_fs/_fs_common.ts": "../ext/node/polyfills/_fs/_fs_common.ts", + "ext:deno_node/_fs/_fs_constants.ts": "../ext/node/polyfills/_fs/_fs_constants.ts", + "ext:deno_node/_fs/_fs_dir.ts": "../ext/node/polyfills/_fs/_fs_dir.ts", + "ext:deno_node/_fs/_fs_dirent.ts": "../ext/node/polyfills/_fs/_fs_dirent.ts", + "ext:deno_node/_fs/_fs_exists.ts": "../ext/node/polyfills/_fs/_fs_exists.ts", + "ext:deno_node/_fs/_fs_lstat.ts": "../ext/node/polyfills/_fs/_fs_lstat.ts", + "ext:deno_node/_fs/_fs_mkdir.ts": "../ext/node/polyfills/_fs/_fs_mkdir.ts", + "ext:deno_node/_fs/_fs_open.ts": "../ext/node/polyfills/_fs/_fs_open.ts", + "ext:deno_node/_fs/_fs_read.ts": "../ext/node/polyfills/_fs/_fs_read.ts", + "ext:deno_node/_fs/_fs_stat.ts": "../ext/node/polyfills/_fs/_fs_stat.ts", + "ext:deno_node/_fs/_fs_watch.ts": "../ext/node/polyfills/_fs/_fs_watch.ts", + "ext:deno_node/_fs/_fs_write.mjs": "../ext/node/polyfills/_fs/_fs_write.mjs", + "ext:deno_node/_fs/_fs_writev.mjs": "../ext/node/polyfills/_fs/_fs_writev.mjs", + "ext:deno_node/_global.d.ts": "../ext/node/polyfills/_global.d.ts", + "ext:deno_node/_http_agent.mjs": "../ext/node/polyfills/_http_agent.mjs", + "ext:deno_node/_http_common.ts": "../ext/node/polyfills/_http_common.ts", + "ext:deno_node/_http_outgoing.ts": "../ext/node/polyfills/_http_outgoing.ts", + "ext:deno_node/_next_tick.ts": "../ext/node/polyfills/_next_tick.ts", + "ext:deno_node/_process/exiting.ts": "../ext/node/polyfills/_process/exiting.ts", + "ext:deno_node/_process/process.ts": "../ext/node/polyfills/_process/process.ts", + "ext:deno_node/_process/streams.mjs": "../ext/node/polyfills/_process/streams.mjs", + "ext:deno_node/_readline_shared_types.d.ts": "../ext/node/polyfills/_readline_shared_types.d.ts", + "ext:deno_node/_stream.d.ts": "../ext/node/polyfills/_stream.d.ts", + "ext:deno_node/_stream.mjs": "../ext/node/polyfills/_stream.mjs", + "ext:deno_node/_tls_common.ts": "../ext/node/polyfills/_tls_common.ts", + "ext:deno_node/_util/asserts.ts": "../ext/node/polyfills/_util/asserts.ts", + "ext:deno_node/_util/async.ts": "../ext/node/polyfills/_util/async.ts", + "ext:deno_node/_util/os.ts": "../ext/node/polyfills/_util/os.ts", + "ext:deno_node/_utils.ts": "../ext/node/polyfills/_utils.ts", + "ext:deno_node/_zlib_binding.mjs": "../ext/node/polyfills/_zlib_binding.mjs", + "ext:deno_node/00_globals.js": "../ext/node/polyfills/00_globals.js", + "ext:deno_node/01_require.js": "../ext/node/polyfills/01_require.js", + "ext:deno_node/assert.ts": "../ext/node/polyfills/assert.ts", + "ext:deno_node/assert/strict.ts": "../ext/node/polyfills/assert/strict.ts", + "ext:deno_node/async_hooks.ts": "../ext/node/polyfills/async_hooks.ts", + "ext:deno_node/buffer.ts": "../ext/node/polyfills/buffer.ts", + "ext:deno_node/child_process.ts": "../ext/node/polyfills/child_process.ts", + "ext:deno_node/cluster.ts": "../ext/node/polyfills/cluster.ts", + "ext:deno_node/console.ts": "../ext/node/polyfills/console.ts", + "ext:deno_node/constants.ts": "../ext/node/polyfills/constants.ts", + "ext:deno_node/crypto.ts": "../ext/node/polyfills/crypto.ts", + "ext:deno_node/dgram.ts": "../ext/node/polyfills/dgram.ts", + "ext:deno_node/diagnostics_channel.ts": "../ext/node/polyfills/diagnostics_channel.ts", + "ext:deno_node/dns.ts": "../ext/node/polyfills/dns.ts", + "ext:deno_node/dns/promises.ts": "../ext/node/polyfills/dns/promises.ts", + "ext:deno_node/domain.ts": "../ext/node/polyfills/domain.ts", + "ext:deno_node/events.ts": "../ext/node/polyfills/events.ts", + "ext:deno_node/fs.ts": "../ext/node/polyfills/fs.ts", + "ext:deno_node/fs/promises.ts": "../ext/node/polyfills/fs/promises.ts", + "ext:deno_node/http.ts": "../ext/node/polyfills/http.ts", + "ext:deno_node/http2.ts": "../ext/node/polyfills/http2.ts", + "ext:deno_node/https.ts": "../ext/node/polyfills/https.ts", + "ext:deno_node/inspector.ts": "../ext/node/polyfills/inspector.ts", + "ext:deno_node/internal_binding/_libuv_winerror.ts": "../ext/node/polyfills/internal_binding/_libuv_winerror.ts", + "ext:deno_node/internal_binding/_listen.ts": "../ext/node/polyfills/internal_binding/_listen.ts", + "ext:deno_node/internal_binding/_node.ts": "../ext/node/polyfills/internal_binding/_node.ts", + "ext:deno_node/internal_binding/_timingSafeEqual.ts": "../ext/node/polyfills/internal_binding/_timingSafeEqual.ts", + "ext:deno_node/internal_binding/_utils.ts": "../ext/node/polyfills/internal_binding/_utils.ts", + "ext:deno_node/internal_binding/ares.ts": "../ext/node/polyfills/internal_binding/ares.ts", + "ext:deno_node/internal_binding/async_wrap.ts": "../ext/node/polyfills/internal_binding/async_wrap.ts", + "ext:deno_node/internal_binding/buffer.ts": "../ext/node/polyfills/internal_binding/buffer.ts", + "ext:deno_node/internal_binding/cares_wrap.ts": "../ext/node/polyfills/internal_binding/cares_wrap.ts", + "ext:deno_node/internal_binding/connection_wrap.ts": "../ext/node/polyfills/internal_binding/connection_wrap.ts", + "ext:deno_node/internal_binding/constants.ts": "../ext/node/polyfills/internal_binding/constants.ts", + "ext:deno_node/internal_binding/crypto.ts": "../ext/node/polyfills/internal_binding/crypto.ts", + "ext:deno_node/internal_binding/handle_wrap.ts": "../ext/node/polyfills/internal_binding/handle_wrap.ts", + "ext:deno_node/internal_binding/mod.ts": "../ext/node/polyfills/internal_binding/mod.ts", + "ext:deno_node/internal_binding/pipe_wrap.ts": "../ext/node/polyfills/internal_binding/pipe_wrap.ts", + "ext:deno_node/internal_binding/stream_wrap.ts": "../ext/node/polyfills/internal_binding/stream_wrap.ts", + "ext:deno_node/internal_binding/string_decoder.ts": "../ext/node/polyfills/internal_binding/string_decoder.ts", + "ext:deno_node/internal_binding/symbols.ts": "../ext/node/polyfills/internal_binding/symbols.ts", + "ext:deno_node/internal_binding/tcp_wrap.ts": "../ext/node/polyfills/internal_binding/tcp_wrap.ts", + "ext:deno_node/internal_binding/types.ts": "../ext/node/polyfills/internal_binding/types.ts", + "ext:deno_node/internal_binding/udp_wrap.ts": "../ext/node/polyfills/internal_binding/udp_wrap.ts", + "ext:deno_node/internal_binding/util.ts": "../ext/node/polyfills/internal_binding/util.ts", + "ext:deno_node/internal_binding/uv.ts": "../ext/node/polyfills/internal_binding/uv.ts", + "ext:deno_node/internal/assert.mjs": "../ext/node/polyfills/internal/assert.mjs", + "ext:deno_node/internal/async_hooks.ts": "../ext/node/polyfills/internal/async_hooks.ts", + "ext:deno_node/internal/buffer.mjs": "../ext/node/polyfills/internal/buffer.mjs", + "ext:deno_node/internal/child_process.ts": "../ext/node/polyfills/internal/child_process.ts", + "ext:deno_node/internal/cli_table.ts": "../ext/node/polyfills/internal/cli_table.ts", + "ext:deno_node/internal/constants.ts": "../ext/node/polyfills/internal/constants.ts", + "ext:deno_node/internal/crypto/_keys.ts": "../ext/node/polyfills/internal/crypto/_keys.ts", + "ext:deno_node/internal/crypto/_randomBytes.ts": "../ext/node/polyfills/internal/crypto/_randomBytes.ts", + "ext:deno_node/internal/crypto/_randomFill.ts": "../ext/node/polyfills/internal/crypto/_randomFill.ts", + "ext:deno_node/internal/crypto/_randomInt.ts": "../ext/node/polyfills/internal/crypto/_randomInt.ts", + "ext:deno_node/internal/crypto/certificate.ts": "../ext/node/polyfills/internal/crypto/certificate.ts", + "ext:deno_node/internal/crypto/cipher.ts": "../ext/node/polyfills/internal/crypto/cipher.ts", + "ext:deno_node/internal/crypto/constants.ts": "../ext/node/polyfills/internal/crypto/constants.ts", + "ext:deno_node/internal/crypto/diffiehellman.ts": "../ext/node/polyfills/internal/crypto/diffiehellman.ts", + "ext:deno_node/internal/crypto/hash.ts": "../ext/node/polyfills/internal/crypto/hash.ts", + "ext:deno_node/internal/crypto/hkdf.ts": "../ext/node/polyfills/internal/crypto/hkdf.ts", + "ext:deno_node/internal/crypto/keygen.ts": "../ext/node/polyfills/internal/crypto/keygen.ts", + "ext:deno_node/internal/crypto/keys.ts": "../ext/node/polyfills/internal/crypto/keys.ts", + "ext:deno_node/internal/crypto/pbkdf2.ts": "../ext/node/polyfills/internal/crypto/pbkdf2.ts", + "ext:deno_node/internal/crypto/random.ts": "../ext/node/polyfills/internal/crypto/random.ts", + "ext:deno_node/internal/crypto/scrypt.ts": "../ext/node/polyfills/internal/crypto/scrypt.ts", + "ext:deno_node/internal/crypto/sig.ts": "../ext/node/polyfills/internal/crypto/sig.ts", + "ext:deno_node/internal/crypto/types.ts": "../ext/node/polyfills/internal/crypto/types.ts", + "ext:deno_node/internal/crypto/util.ts": "../ext/node/polyfills/internal/crypto/util.ts", + "ext:deno_node/internal/crypto/x509.ts": "../ext/node/polyfills/internal/crypto/x509.ts", + "ext:deno_node/internal/dgram.ts": "../ext/node/polyfills/internal/dgram.ts", + "ext:deno_node/internal/dns/promises.ts": "../ext/node/polyfills/internal/dns/promises.ts", + "ext:deno_node/internal/dns/utils.ts": "../ext/node/polyfills/internal/dns/utils.ts", + "ext:deno_node/internal/error_codes.ts": "../ext/node/polyfills/internal/error_codes.ts", + "ext:deno_node/internal/errors.ts": "../ext/node/polyfills/internal/errors.ts", + "ext:deno_node/internal/event_target.mjs": "../ext/node/polyfills/internal/event_target.mjs", + "ext:deno_node/internal/fixed_queue.ts": "../ext/node/polyfills/internal/fixed_queue.ts", + "ext:deno_node/internal/fs/utils.mjs": "../ext/node/polyfills/internal/fs/utils.mjs", + "ext:deno_node/internal/hide_stack_frames.ts": "../ext/node/polyfills/internal/hide_stack_frames.ts", + "ext:deno_node/internal/http.ts": "../ext/node/polyfills/internal/http.ts", + "ext:deno_node/internal/net.ts": "../ext/node/polyfills/internal/net.ts", + "ext:deno_node/internal/normalize_encoding.mjs": "../ext/node/polyfills/internal/normalize_encoding.mjs", + "ext:deno_node/internal/options.ts": "../ext/node/polyfills/internal/options.ts", + "ext:deno_node/internal/primordials.mjs": "../ext/node/polyfills/internal/primordials.mjs", + "ext:deno_node/internal/process/per_thread.mjs": "../ext/node/polyfills/internal/process/per_thread.mjs", + "ext:deno_node/internal/querystring.ts": "../ext/node/polyfills/internal/querystring.ts", + "ext:deno_node/internal/readline/callbacks.mjs": "../ext/node/polyfills/internal/readline/callbacks.mjs", + "ext:deno_node/internal/readline/emitKeypressEvents.mjs": "../ext/node/polyfills/internal/readline/emitKeypressEvents.mjs", + "ext:deno_node/internal/readline/interface.mjs": "../ext/node/polyfills/internal/readline/interface.mjs", + "ext:deno_node/internal/readline/promises.mjs": "../ext/node/polyfills/internal/readline/promises.mjs", + "ext:deno_node/internal/readline/symbols.mjs": "../ext/node/polyfills/internal/readline/symbols.mjs", + "ext:deno_node/internal/readline/utils.mjs": "../ext/node/polyfills/internal/readline/utils.mjs", + "ext:deno_node/internal/streams/add-abort-signal.mjs": "../ext/node/polyfills/internal/streams/add-abort-signal.mjs", + "ext:deno_node/internal/streams/buffer_list.mjs": "../ext/node/polyfills/internal/streams/buffer_list.mjs", + "ext:deno_node/internal/streams/destroy.mjs": "../ext/node/polyfills/internal/streams/destroy.mjs", + "ext:deno_node/internal/streams/duplex.mjs": "../ext/node/polyfills/internal/streams/duplex.mjs", + "ext:deno_node/internal/streams/end-of-stream.mjs": "../ext/node/polyfills/internal/streams/end-of-stream.mjs", + "ext:deno_node/internal/streams/lazy_transform.mjs": "../ext/node/polyfills/internal/streams/lazy_transform.mjs", + "ext:deno_node/internal/streams/passthrough.mjs": "../ext/node/polyfills/internal/streams/passthrough.mjs", + "ext:deno_node/internal/streams/readable.mjs": "../ext/node/polyfills/internal/streams/readable.mjs", + "ext:deno_node/internal/streams/state.mjs": "../ext/node/polyfills/internal/streams/state.mjs", + "ext:deno_node/internal/streams/transform.mjs": "../ext/node/polyfills/internal/streams/transform.mjs", + "ext:deno_node/internal/streams/utils.mjs": "../ext/node/polyfills/internal/streams/utils.mjs", + "ext:deno_node/internal/streams/writable.mjs": "../ext/node/polyfills/internal/streams/writable.mjs", + "ext:deno_node/internal/test/binding.ts": "../ext/node/polyfills/internal/test/binding.ts", + "ext:deno_node/internal/timers.mjs": "../ext/node/polyfills/internal/timers.mjs", + "ext:deno_node/internal/url.ts": "../ext/node/polyfills/internal/url.ts", + "ext:deno_node/internal/util.mjs": "../ext/node/polyfills/internal/util.mjs", + "ext:deno_node/internal/util/debuglog.ts": "../ext/node/polyfills/internal/util/debuglog.ts", + "ext:deno_node/internal/util/inspect.mjs": "../ext/node/polyfills/internal/util/inspect.mjs", + "ext:deno_node/internal/util/types.ts": "../ext/node/polyfills/internal/util/types.ts", + "ext:deno_node/internal/validators.mjs": "../ext/node/polyfills/internal/validators.mjs", + "ext:deno_node/net.ts": "../ext/node/polyfills/net.ts", + "ext:deno_node/os.ts": "../ext/node/polyfills/os.ts", + "ext:deno_node/path.ts": "../ext/node/polyfills/path.ts", + "ext:deno_node/path/_constants.ts": "../ext/node/polyfills/path/_constants.ts", + "ext:deno_node/path/_interface.ts": "../ext/node/polyfills/path/_interface.ts", + "ext:deno_node/path/_posix.ts": "../ext/node/polyfills/path/_posix.ts", + "ext:deno_node/path/_util.ts": "../ext/node/polyfills/path/_util.ts", + "ext:deno_node/path/_win32.ts": "../ext/node/polyfills/path/_win32.ts", + "ext:deno_node/path/mod.ts": "../ext/node/polyfills/path/mod.ts", + "ext:deno_node/path/posix.ts": "../ext/node/polyfills/path/posix.ts", + "ext:deno_node/path/separator.ts": "../ext/node/polyfills/path/separator.ts", + "ext:deno_node/path/win32.ts": "../ext/node/polyfills/path/win32.ts", + "ext:deno_node/perf_hooks.ts": "../ext/node/polyfills/perf_hooks.ts", + "ext:deno_node/process.ts": "../ext/node/polyfills/process.ts", + "ext:deno_node/punycode.ts": "../ext/node/polyfills/punycode.ts", + "ext:deno_node/querystring.ts": "../ext/node/polyfills/querystring.ts", + "ext:deno_node/readline.ts": "../ext/node/polyfills/readline.ts", + "ext:deno_node/readline/promises.ts": "../ext/node/polyfills/readline/promises.ts", + "ext:deno_node/repl.ts": "../ext/node/polyfills/repl.ts", + "ext:deno_node/stream.ts": "../ext/node/polyfills/stream.ts", + "ext:deno_node/stream/consumers.mjs": "../ext/node/polyfills/stream/consumers.mjs", + "ext:deno_node/stream/promises.mjs": "../ext/node/polyfills/stream/promises.mjs", + "ext:deno_node/stream/web.ts": "../ext/node/polyfills/stream/web.ts", + "ext:deno_node/string_decoder.ts": "../ext/node/polyfills/string_decoder.ts", + "ext:deno_node/sys.ts": "../ext/node/polyfills/sys.ts", + "ext:deno_node/timers.ts": "../ext/node/polyfills/timers.ts", + "ext:deno_node/timers/promises.ts": "../ext/node/polyfills/timers/promises.ts", + "ext:deno_node/tls.ts": "../ext/node/polyfills/tls.ts", + "ext:deno_node/tty.ts": "../ext/node/polyfills/tty.ts", + "ext:deno_node/url.ts": "../ext/node/polyfills/url.ts", + "ext:deno_node/util.ts": "../ext/node/polyfills/util.ts", + "ext:deno_node/util/types.ts": "../ext/node/polyfills/util/types.ts", + "ext:deno_node/v8.ts": "../ext/node/polyfills/v8.ts", + "ext:deno_node/vm.ts": "../ext/node/polyfills/vm.ts", + "ext:deno_node/wasi.ts": "../ext/node/polyfills/wasi.ts", + "ext:deno_node/worker_threads.ts": "../ext/node/polyfills/worker_threads.ts", + "ext:deno_node/zlib.ts": "../ext/node/polyfills/zlib.ts", + "ext:deno_url/00_url.js": "../ext/url/00_url.js", + "ext:deno_url/01_urlpattern.js": "../ext/url/01_urlpattern.js", + "ext:deno_web/00_infra.js": "../ext/web/00_infra.js", + "ext:deno_web/01_dom_exception.js": "../ext/web/01_dom_exception.js", + "ext:deno_web/01_mimesniff.js": "../ext/web/01_mimesniff.js", + "ext:deno_web/02_event.js": "../ext/web/02_event.js", + "ext:deno_web/02_structured_clone.js": "../ext/web/02_structured_clone.js", + "ext:deno_web/02_timers.js": "../ext/web/02_timers.js", + "ext:deno_web/03_abort_signal.js": "../ext/web/03_abort_signal.js", + "ext:deno_web/04_global_interfaces.js": "../ext/web/04_global_interfaces.js", + "ext:deno_web/05_base64.js": "../ext/web/05_base64.js", + "ext:deno_web/06_streams.js": "../ext/web/06_streams.js", + "ext:deno_web/08_text_encoding.js": "../ext/web/08_text_encoding.js", + "ext:deno_web/09_file.js": "../ext/web/09_file.js", + "ext:deno_web/10_filereader.js": "../ext/web/10_filereader.js", + "ext:deno_web/12_location.js": "../ext/web/12_location.js", + "ext:deno_web/13_message_port.js": "../ext/web/13_message_port.js", + "ext:deno_web/14_compression.js": "../ext/web/14_compression.js", + "ext:deno_web/15_performance.js": "../ext/web/15_performance.js", + "ext:deno_webidl/00_webidl.js": "../ext/webidl/00_webidl.js", + "ext:deno_websocket/01_websocket.js": "../ext/websocket/01_websocket.js", + "ext:deno_websocket/02_websocketstream.js": "../ext/websocket/02_websocketstream.js", + "ext:deno_webstorage/01_webstorage.js": "../ext/webstorage/01_webstorage.js", + "ext:runtime/01_errors.js": "../runtime/js/01_errors.js", + "ext:runtime/01_version.ts": "../runtime/js/01_version.ts", + "ext:runtime/06_util.js": "../runtime/js/06_util.js", + "ext:runtime/10_permissions.js": "../runtime/js/10_permissions.js", + "ext:runtime/11_workers.js": "../runtime/js/11_workers.js", + "ext:runtime/13_buffer.js": "../runtime/js/13_buffer.js", + "ext:runtime/30_os.js": "../runtime/js/30_os.js", + "ext:runtime/40_fs_events.js": "../runtime/js/40_fs_events.js", + "ext:runtime/40_http.js": "../runtime/js/40_http.js", + "ext:runtime/40_process.js": "../runtime/js/40_process.js", + "ext:runtime/40_signals.js": "../runtime/js/40_signals.js", + "ext:runtime/40_tty.js": "../runtime/js/40_tty.js", + "ext:runtime/41_prompt.js": "../runtime/js/41_prompt.js", + "ext:runtime/90_deno_ns.js": "../runtime/js/90_deno_ns.js", + "ext:runtime/98_global_scope.js": "../runtime/js/98_global_scope.js", + "ext:deno_node/_util/std_fmt_colors.ts": "../ext/node/polyfills/_util/std_fmt_colors.ts" + } +} From 25fdc7bf6c72967cec2bfbd3f18246d1515fce57 Mon Sep 17 00:00:00 2001 From: nasa Date: Fri, 2 Jun 2023 23:28:05 +0900 Subject: [PATCH 288/320] feat(node_compat): Added base implementation of FileHandle (#19294) ## WHY ref: https://github.com/denoland/deno/issues/19165 Node's fs/promises includes a FileHandle class, but deno does not. The open function in Node's fs/promises returns a FileHandle, which provides an IO interface to the file. However, deno's open function returns a resource id. ### deno ```js > const fs = await import("node:fs/promises"); undefined > const file3 = await fs.open("./README.md"); undefined > file3 3 > file3.read undefined Node: ``` ### Node ```js > const fs = await import("fs/promises"); undefined > const file3 = await fs.open("./tests/e2e_unit/testdata/file.txt"); undefined > file3 FileHandle { _events: [Object: null prototype] {}, _eventsCount: 0, _maxListeners: undefined, close: [Function: close], [Symbol(kCapture)]: false, [Symbol(kHandle)]: FileHandle {}, [Symbol(kFd)]: 24, [Symbol(kRefs)]: 1, [Symbol(kClosePromise)]: null } > file3.read [Function: read] ``` To be compatible with Node, deno's open function should also return a FileHandle. ## WHAT I have implemented the first step in adding a FileHandle. - Changed the return value of the open function to a FileHandle object - Implemented the readFile method in FileHandle - Add test code ## What to do next This PR is the first step in adding a FileHandle, and there are things that should be done next. - Add functionality equivalent to Node's FileHandle to FileHandle (currently there is only readFile) --------- Co-authored-by: Matt Mastracci --- cli/tests/integration/node_unit_tests.rs | 1 + cli/tests/unit_node/_fs/_fs_handle_test.ts | 20 ++++++++++++++ ext/node/lib.rs | 1 + ext/node/polyfills/_fs/_fs_open.ts | 24 +++++++++-------- ext/node/polyfills/_fs/_fs_readFile.ts | 25 +++++++++++------ ext/node/polyfills/internal/fs/handle.ts | 31 ++++++++++++++++++++++ 6 files changed, 83 insertions(+), 19 deletions(-) create mode 100644 cli/tests/unit_node/_fs/_fs_handle_test.ts create mode 100644 ext/node/polyfills/internal/fs/handle.ts diff --git a/cli/tests/integration/node_unit_tests.rs b/cli/tests/integration/node_unit_tests.rs index 1cd52f61da..f62c8761cf 100644 --- a/cli/tests/integration/node_unit_tests.rs +++ b/cli/tests/integration/node_unit_tests.rs @@ -25,6 +25,7 @@ util::unit_test_factory!( _fs_fsync_test = _fs / _fs_fsync_test, _fs_ftruncate_test = _fs / _fs_ftruncate_test, _fs_futimes_test = _fs / _fs_futimes_test, + _fs_handle_test = _fs / _fs_handle_test, _fs_link_test = _fs / _fs_link_test, _fs_lstat_test = _fs / _fs_lstat_test, _fs_mkdir_test = _fs / _fs_mkdir_test, diff --git a/cli/tests/unit_node/_fs/_fs_handle_test.ts b/cli/tests/unit_node/_fs/_fs_handle_test.ts new file mode 100644 index 0000000000..c1e5ef8713 --- /dev/null +++ b/cli/tests/unit_node/_fs/_fs_handle_test.ts @@ -0,0 +1,20 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +import * as path from "../../../../test_util/std/path/mod.ts"; +import { + assert, + assertEquals, +} from "../../../../test_util/std/testing/asserts.ts"; + +const moduleDir = path.dirname(path.fromFileUrl(import.meta.url)); +const testData = path.resolve(moduleDir, "testdata", "hello.txt"); + +Deno.test("readFileSuccess", async function () { + const fs = await import("node:fs/promises"); + const fileHandle = await fs.open(testData); + const data = await fileHandle.readFile(); + + assert(data instanceof Uint8Array); + assertEquals(new TextDecoder().decode(data as Uint8Array), "hello world"); + + Deno.close(fileHandle.fd); +}); diff --git a/ext/node/lib.rs b/ext/node/lib.rs index 6ac64eb2eb..e77b3c0896 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -392,6 +392,7 @@ deno_core::extension!(deno_node, "internal/fixed_queue.ts", "internal/fs/streams.mjs", "internal/fs/utils.mjs", + "internal/fs/handle.ts", "internal/hide_stack_frames.ts", "internal/http.ts", "internal/idna.ts", diff --git a/ext/node/polyfills/_fs/_fs_open.ts b/ext/node/polyfills/_fs/_fs_open.ts index 135520591d..2e29f3df10 100644 --- a/ext/node/polyfills/_fs/_fs_open.ts +++ b/ext/node/polyfills/_fs/_fs_open.ts @@ -8,10 +8,10 @@ import { O_WRONLY, } from "ext:deno_node/_fs/_fs_constants.ts"; import { getOpenOptions } from "ext:deno_node/_fs/_fs_common.ts"; -import { promisify } from "ext:deno_node/internal/util.mjs"; import { parseFileMode } from "ext:deno_node/internal/validators.mjs"; import { ERR_INVALID_ARG_TYPE } from "ext:deno_node/internal/errors.ts"; import { getValidatedPath } from "ext:deno_node/internal/fs/utils.mjs"; +import { FileHandle } from "ext:deno_node/internal/fs/handle.ts"; import type { Buffer } from "ext:deno_node/buffer.ts"; function existsSync(filePath: string | URL): boolean { @@ -139,16 +139,18 @@ export function open( } } -export const openPromise = promisify(open) as ( - & ((path: string | Buffer | URL) => Promise) - & ((path: string | Buffer | URL, flags: openFlags) => Promise) - & ((path: string | Buffer | URL, mode?: number) => Promise) - & (( - path: string | Buffer | URL, - flags?: openFlags, - mode?: number, - ) => Promise) -); +export function openPromise( + path: string | Buffer | URL, + flags?: openFlags = "r", + mode? = 0o666, +): Promise { + return new Promise((resolve, reject) => { + open(path, flags, mode, (err, fd) => { + if (err) reject(err); + else resolve(new FileHandle(fd)); + }); + }); +} export function openSync(path: string | Buffer | URL): number; export function openSync( diff --git a/ext/node/polyfills/_fs/_fs_readFile.ts b/ext/node/polyfills/_fs/_fs_readFile.ts index 0ff8a311ad..b3bd5b94c7 100644 --- a/ext/node/polyfills/_fs/_fs_readFile.ts +++ b/ext/node/polyfills/_fs/_fs_readFile.ts @@ -6,6 +6,8 @@ import { TextOptionsArgument, } from "ext:deno_node/_fs/_fs_common.ts"; import { Buffer } from "ext:deno_node/buffer.ts"; +import { readAll } from "ext:deno_io/12_io.js"; +import { FileHandle } from "ext:deno_node/internal/fs/handle.ts"; import { fromFileUrl } from "ext:deno_node/path.ts"; import { BinaryEncodings, @@ -32,25 +34,26 @@ type TextCallback = (err: Error | null, data?: string) => void; type BinaryCallback = (err: Error | null, data?: Buffer) => void; type GenericCallback = (err: Error | null, data?: string | Buffer) => void; type Callback = TextCallback | BinaryCallback | GenericCallback; +type Path = string | URL | FileHandle; export function readFile( - path: string | URL, + path: Path, options: TextOptionsArgument, callback: TextCallback, ): void; export function readFile( - path: string | URL, + path: Path, options: BinaryOptionsArgument, callback: BinaryCallback, ): void; export function readFile( - path: string | URL, + path: Path, options: null | undefined | FileOptionsArgument, callback: BinaryCallback, ): void; export function readFile(path: string | URL, callback: BinaryCallback): void; export function readFile( - path: string | URL, + path: Path, optOrCallback?: FileOptionsArgument | Callback | null | undefined, callback?: Callback, ) { @@ -64,7 +67,13 @@ export function readFile( const encoding = getEncoding(optOrCallback); - const p = Deno.readFile(path); + let p: Promise; + if (path instanceof FileHandle) { + const fsFile = new Deno.FsFile(path.fd); + p = readAll(fsFile); + } else { + p = Deno.readFile(path); + } if (cb) { p.then((data: Uint8Array) => { @@ -79,9 +88,9 @@ export function readFile( } export const readFilePromise = promisify(readFile) as ( - & ((path: string | URL, opt: TextOptionsArgument) => Promise) - & ((path: string | URL, opt?: BinaryOptionsArgument) => Promise) - & ((path: string | URL, opt?: FileOptionsArgument) => Promise) + & ((path: Path, opt: TextOptionsArgument) => Promise) + & ((path: Path, opt?: BinaryOptionsArgument) => Promise) + & ((path: Path, opt?: FileOptionsArgument) => Promise) ); export function readFileSync( diff --git a/ext/node/polyfills/internal/fs/handle.ts b/ext/node/polyfills/internal/fs/handle.ts new file mode 100644 index 0000000000..a369a4a4d7 --- /dev/null +++ b/ext/node/polyfills/internal/fs/handle.ts @@ -0,0 +1,31 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. +import { EventEmitter } from "ext:deno_node/events.ts"; +import { Buffer } from "ext:deno_node/buffer.ts"; +import { promises } from "ext:deno_node/fs.ts"; +import { + BinaryOptionsArgument, + FileOptionsArgument, + TextOptionsArgument, +} from "ext:deno_node/_fs/_fs_common.ts"; + +export class FileHandle extends EventEmitter { + #rid: number; + constructor(rid: number) { + super(); + this.rid = rid; + } + + get fd() { + return this.rid; + } + + readFile( + opt?: TextOptionsArgument | BinaryOptionsArgument | FileOptionsArgument, + ): Promise { + return promises.readFile(this, opt); + } +} + +export default { + FileHandle, +}; From f5c1ff08e6c67c38043b4c76b5472ad71c93d697 Mon Sep 17 00:00:00 2001 From: Marvin Hagemeister Date: Fri, 2 Jun 2023 17:46:50 +0200 Subject: [PATCH 289/320] fix(node): map stdio [0, 1, 2] to "inherit" (#19352) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Internally, `node-tap` spawns a child process with `stdio: [0, 1, 2]`. Whilst we don't support passing fd numbers as an argument so far, it turns out that `[0, 1, 2]` is equivalent to `"inherit"` which we already support. See: https://nodejs.org/api/child_process.html#optionsstdio Mapping it to `"inherit"` is fine for us and gets us one step closer in getting `node-tap` working. I'm now at the stage where already the coverage table is shown 🎉 --- cli/tests/unit_node/child_process_test.ts | 22 +++++++++++++++++++ .../testdata/child_process_stdio_012.js | 15 +++++++++++++ ext/node/polyfills/internal/child_process.ts | 7 ++++++ 3 files changed, 44 insertions(+) create mode 100644 cli/tests/unit_node/testdata/child_process_stdio_012.js diff --git a/cli/tests/unit_node/child_process_test.ts b/cli/tests/unit_node/child_process_test.ts index d4a2a4cc68..f8de5b6f6d 100644 --- a/cli/tests/unit_node/child_process_test.ts +++ b/cli/tests/unit_node/child_process_test.ts @@ -600,6 +600,28 @@ Deno.test( }, ); +Deno.test( + "[node/child_process spawn] supports stdio [0, 1, 2] option", + async () => { + const cmdFinished = deferred(); + let output = ""; + const script = path.join( + path.dirname(path.fromFileUrl(import.meta.url)), + "testdata", + "child_process_stdio_012.js", + ); + const cp = spawn(Deno.execPath(), ["run", "-A", script]); + cp.stdout?.on("data", (data) => { + output += data; + }); + cp.on("close", () => cmdFinished.resolve()); + await cmdFinished; + + assertStringIncludes(output, "foo"); + assertStringIncludes(output, "close"); + }, +); + Deno.test({ name: "[node/child_process spawn] supports SIGIOT signal", ignore: Deno.build.os === "windows", diff --git a/cli/tests/unit_node/testdata/child_process_stdio_012.js b/cli/tests/unit_node/testdata/child_process_stdio_012.js new file mode 100644 index 0000000000..682d8a084a --- /dev/null +++ b/cli/tests/unit_node/testdata/child_process_stdio_012.js @@ -0,0 +1,15 @@ +import childProcess from "node:child_process"; +import process from "node:process"; +import * as path from "node:path"; + +const script = path.join( + path.dirname(path.fromFileUrl(import.meta.url)), + "node_modules", + "foo", + "index.js", +); + +const child = childProcess.spawn(process.execPath, [script], { + stdio: [0, 1, 2], +}); +child.on("close", () => console.log("close")); diff --git a/ext/node/polyfills/internal/child_process.ts b/ext/node/polyfills/internal/child_process.ts index 365af4add6..d4acf1db2a 100644 --- a/ext/node/polyfills/internal/child_process.ts +++ b/ext/node/polyfills/internal/child_process.ts @@ -469,6 +469,13 @@ function normalizeStdioOption( ...Array, ] { if (Array.isArray(stdio)) { + // `[0, 1, 2]` is equivalent to `"inherit"` + if ( + stdio.length === 3 && stdio[0] === 0 && stdio[1] === 1 && stdio[2] === 2 + ) { + return ["inherit", "inherit", "inherit"]; + } + // At least 3 stdio must be created to match node while (stdio.length < 3) { ArrayPrototypePush(stdio, undefined); From 98320ff1f88e6b6ee1d85d64e99519986f6a7239 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Og=C3=B3rek?= Date: Fri, 2 Jun 2023 17:59:16 +0200 Subject: [PATCH 290/320] perf(ext/http): Use flat list of headers for multiple set/get methods (#19336) This PR attempts to resolve the first item on the list from https://github.com/denoland/deno/issues/19330 which is about using a flat list of interleaved key/value pairs, instead of a nested array of tuples. I can tackle some more if you can provide a quick example of using raw v8 arrays, cc @mmastrac --- ext/http/00_serve.js | 9 +++++++-- ext/http/http_next.rs | 28 ++++++++++++---------------- 2 files changed, 19 insertions(+), 18 deletions(-) diff --git a/ext/http/00_serve.js b/ext/http/00_serve.js index d84244ee4b..dbdc227056 100644 --- a/ext/http/00_serve.js +++ b/ext/http/00_serve.js @@ -334,7 +334,12 @@ class InnerRequest { if (this.#slabId === undefined) { throw new TypeError("request closed"); } - return op_http_get_request_headers(this.#slabId); + const headers = []; + const reqHeaders = op_http_get_request_headers(this.#slabId); + for (let i = 0; i < reqHeaders.length; i += 2) { + headers.push([reqHeaders[i], reqHeaders[i + 1]]); + } + return headers; } get slabId() { @@ -570,7 +575,7 @@ function mapToCallback(context, callback, onError) { if (headers.length == 1) { op_http_set_response_header(req, headers[0][0], headers[0][1]); } else { - op_http_set_response_headers(req, headers); + op_http_set_response_headers(req, headers.flat()); } } diff --git a/ext/http/http_next.rs b/ext/http/http_next.rs index 7a02757c00..9ec9e12c9d 100644 --- a/ext/http/http_next.rs +++ b/ext/http/http_next.rs @@ -256,12 +256,11 @@ pub fn op_http_get_request_header( } #[op] -pub fn op_http_get_request_headers( - slab_id: SlabId, -) -> Vec<(ByteString, ByteString)> { +pub fn op_http_get_request_headers(slab_id: SlabId) -> Vec { let http = slab_get(slab_id); let headers = &http.request_parts().headers; - let mut vec = Vec::with_capacity(headers.len()); + // Two slots for each header key/value pair + let mut vec = Vec::with_capacity(headers.len() * 2); let mut cookies: Option> = None; for (name, value) in headers { if name == COOKIE { @@ -272,7 +271,8 @@ pub fn op_http_get_request_headers( } } else { let name: &[u8] = name.as_ref(); - vec.push((name.into(), value.as_bytes().into())) + vec.push(name.into()); + vec.push(value.as_bytes().into()); } } @@ -283,11 +283,10 @@ pub fn op_http_get_request_headers( // TODO(mmastrac): This should probably happen on the JS side on-demand if let Some(cookies) = cookies { let cookie_sep = "; ".as_bytes(); - vec.push(( - ByteString::from(COOKIE.as_str()), - ByteString::from(cookies.join(cookie_sep)), - )); + vec.push(ByteString::from(COOKIE.as_str())); + vec.push(ByteString::from(cookies.join(cookie_sep))); } + vec } @@ -313,18 +312,15 @@ pub fn op_http_set_response_header(slab_id: SlabId, name: &str, value: &str) { } #[op] -pub fn op_http_set_response_headers( - slab_id: SlabId, - headers: Vec<(ByteString, ByteString)>, -) { +pub fn op_http_set_response_headers(slab_id: SlabId, headers: Vec) { let mut http = slab_get(slab_id); // TODO(mmastrac): Invalid headers should be handled? let resp_headers = http.response().headers_mut(); resp_headers.reserve(headers.len()); - for (name, value) in headers { + for header in headers.chunks_exact(2) { // These are valid latin-1 strings - let name = HeaderName::from_bytes(&name).unwrap(); - let value = HeaderValue::from_bytes(&value).unwrap(); + let name = HeaderName::from_bytes(&header[0]).unwrap(); + let value = HeaderValue::from_bytes(&header[1]).unwrap(); resp_headers.append(name, value); } } From ce5bf9fb2a52fa337afb5f54ec2553eb4d411fd2 Mon Sep 17 00:00:00 2001 From: Igor Zinkovsky Date: Fri, 2 Jun 2023 11:12:26 -0700 Subject: [PATCH 291/320] fix(kv) run sqlite transactions via spawn_blocking (#19350) `rusqlite` does not support async operations; with this PR SQLite operations will run through `spawn_blocking` to ensure that the event loop does not get blocked. There is still only a single SQLite connection. So all operations will do an async wait on the connection. In the future we can add a connection pool if needed. --- ext/kv/sqlite.rs | 319 ++++++++++++++++++++++++++++------------------- 1 file changed, 192 insertions(+), 127 deletions(-) diff --git a/ext/kv/sqlite.rs b/ext/kv/sqlite.rs index 63be1281b4..80d230ab15 100644 --- a/ext/kv/sqlite.rs +++ b/ext/kv/sqlite.rs @@ -1,6 +1,7 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. use std::borrow::Cow; +use std::cell::Cell; use std::cell::RefCell; use std::marker::PhantomData; use std::path::Path; @@ -10,6 +11,8 @@ use std::rc::Rc; use async_trait::async_trait; use deno_core::error::type_error; use deno_core::error::AnyError; +use deno_core::task::spawn_blocking; +use deno_core::AsyncRefCell; use deno_core::OpState; use rusqlite::params; use rusqlite::OpenFlags; @@ -112,11 +115,9 @@ impl DatabaseHandler for SqliteDbHandler

    { state: Rc>, path: Option, ) -> Result { - let conn = match (path.as_deref(), &self.default_storage_dir) { - (Some(":memory:"), _) | (None, None) => { - rusqlite::Connection::open_in_memory()? - } - (Some(path), _) => { + // Validate path + if let Some(path) = &path { + if path != ":memory:" { if path.is_empty() { return Err(type_error("Filename cannot be empty")); } @@ -132,44 +133,92 @@ impl DatabaseHandler for SqliteDbHandler

    { permissions.check_read(path, "Deno.openKv")?; permissions.check_write(path, "Deno.openKv")?; } - let flags = OpenFlags::default().difference(OpenFlags::SQLITE_OPEN_URI); - rusqlite::Connection::open_with_flags(path, flags)? - } - (None, Some(path)) => { - std::fs::create_dir_all(path)?; - let path = path.join("kv.sqlite3"); - rusqlite::Connection::open(&path)? - } - }; - - conn.pragma_update(None, "journal_mode", "wal")?; - conn.execute(STATEMENT_CREATE_MIGRATION_TABLE, [])?; - - let current_version: usize = conn - .query_row( - "select version from migration_state where k = 0", - [], - |row| row.get(0), - ) - .optional()? - .unwrap_or(0); - - for (i, migration) in MIGRATIONS.iter().enumerate() { - let version = i + 1; - if version > current_version { - conn.execute_batch(migration)?; - conn.execute( - "replace into migration_state (k, version) values(?, ?)", - [&0, &version], - )?; } } - Ok(SqliteDb(RefCell::new(conn))) + let default_storage_dir = self.default_storage_dir.clone(); + let conn = spawn_blocking(move || { + let conn = match (path.as_deref(), &default_storage_dir) { + (Some(":memory:"), _) | (None, None) => { + rusqlite::Connection::open_in_memory()? + } + (Some(path), _) => { + let flags = + OpenFlags::default().difference(OpenFlags::SQLITE_OPEN_URI); + rusqlite::Connection::open_with_flags(path, flags)? + } + (None, Some(path)) => { + std::fs::create_dir_all(path)?; + let path = path.join("kv.sqlite3"); + rusqlite::Connection::open(&path)? + } + }; + + conn.pragma_update(None, "journal_mode", "wal")?; + conn.execute(STATEMENT_CREATE_MIGRATION_TABLE, [])?; + + let current_version: usize = conn + .query_row( + "select version from migration_state where k = 0", + [], + |row| row.get(0), + ) + .optional()? + .unwrap_or(0); + + for (i, migration) in MIGRATIONS.iter().enumerate() { + let version = i + 1; + if version > current_version { + conn.execute_batch(migration)?; + conn.execute( + "replace into migration_state (k, version) values(?, ?)", + [&0, &version], + )?; + } + } + + Ok::<_, AnyError>(conn) + }) + .await + .unwrap()?; + + Ok(SqliteDb(Rc::new(AsyncRefCell::new(Cell::new(Some(conn)))))) } } -pub struct SqliteDb(RefCell); +pub struct SqliteDb(Rc>>>); + +impl SqliteDb { + async fn run_tx(&self, f: F) -> Result + where + F: (FnOnce(rusqlite::Transaction<'_>) -> Result) + + Send + + 'static, + R: Send + 'static, + { + // Transactions need exclusive access to the connection. Wait until + // we can borrow_mut the connection. + let cell = self.0.borrow_mut().await; + + // Take the db out of the cell and run the transaction via spawn_blocking. + let mut db = cell.take().unwrap(); + let (result, db) = spawn_blocking(move || { + let result = { + match db.transaction() { + Ok(tx) => f(tx), + Err(e) => Err(e.into()), + } + }; + (result, db) + }) + .await + .unwrap(); + + // Put the db back into the cell. + cell.set(Some(db)); + result + } +} #[async_trait(?Send)] impl Database for SqliteDb { @@ -178,110 +227,126 @@ impl Database for SqliteDb { requests: Vec, _options: SnapshotReadOptions, ) -> Result, AnyError> { - let mut responses = Vec::with_capacity(requests.len()); - let mut db = self.0.borrow_mut(); - let tx = db.transaction()?; + self + .run_tx(move |tx| { + let mut responses = Vec::with_capacity(requests.len()); + for request in requests { + let mut stmt = tx.prepare_cached(if request.reverse { + STATEMENT_KV_RANGE_SCAN_REVERSE + } else { + STATEMENT_KV_RANGE_SCAN + })?; + let entries = stmt + .query_map( + ( + request.start.as_slice(), + request.end.as_slice(), + request.limit.get(), + ), + |row| { + let key: Vec = row.get(0)?; + let value: Vec = row.get(1)?; + let encoding: i64 = row.get(2)?; - for request in requests { - let mut stmt = tx.prepare_cached(if request.reverse { - STATEMENT_KV_RANGE_SCAN_REVERSE - } else { - STATEMENT_KV_RANGE_SCAN - })?; - let entries = stmt - .query_map( - ( - request.start.as_slice(), - request.end.as_slice(), - request.limit.get(), - ), - |row| { - let key: Vec = row.get(0)?; - let value: Vec = row.get(1)?; - let encoding: i64 = row.get(2)?; + let value = decode_value(value, encoding); - let value = decode_value(value, encoding); + let version: i64 = row.get(3)?; + Ok(KvEntry { + key, + value, + versionstamp: version_to_versionstamp(version), + }) + }, + )? + .collect::, rusqlite::Error>>()?; + responses.push(ReadRangeOutput { entries }); + } - let version: i64 = row.get(3)?; - Ok(KvEntry { - key, - value, - versionstamp: version_to_versionstamp(version), - }) - }, - )? - .collect::, rusqlite::Error>>()?; - responses.push(ReadRangeOutput { entries }); - } - - Ok(responses) + Ok(responses) + }) + .await } async fn atomic_write( &self, write: AtomicWrite, ) -> Result, AnyError> { - let mut db = self.0.borrow_mut(); - - let tx = db.transaction()?; - - for check in write.checks { - let real_versionstamp = tx - .prepare_cached(STATEMENT_KV_POINT_GET_VERSION_ONLY)? - .query_row([check.key.as_slice()], |row| row.get(0)) - .optional()? - .map(version_to_versionstamp); - if real_versionstamp != check.versionstamp { - return Ok(None); - } - } - - let version: i64 = tx - .prepare_cached(STATEMENT_INC_AND_GET_DATA_VERSION)? - .query_row([], |row| row.get(0))?; - - for mutation in write.mutations { - match mutation.kind { - MutationKind::Set(value) => { - let (value, encoding) = encode_value(&value); - let changed = tx - .prepare_cached(STATEMENT_KV_POINT_SET)? - .execute(params![mutation.key, &value, &encoding, &version])?; - assert_eq!(changed, 1) + self + .run_tx(move |tx| { + for check in write.checks { + let real_versionstamp = tx + .prepare_cached(STATEMENT_KV_POINT_GET_VERSION_ONLY)? + .query_row([check.key.as_slice()], |row| row.get(0)) + .optional()? + .map(version_to_versionstamp); + if real_versionstamp != check.versionstamp { + return Ok(None); + } } - MutationKind::Delete => { - let changed = tx - .prepare_cached(STATEMENT_KV_POINT_DELETE)? - .execute(params![mutation.key])?; - assert!(changed == 0 || changed == 1) - } - MutationKind::Sum(operand) => { - mutate_le64(&tx, &mutation.key, "sum", &operand, version, |a, b| { - a.wrapping_add(b) - })?; - } - MutationKind::Min(operand) => { - mutate_le64(&tx, &mutation.key, "min", &operand, version, |a, b| { - a.min(b) - })?; - } - MutationKind::Max(operand) => { - mutate_le64(&tx, &mutation.key, "max", &operand, version, |a, b| { - a.max(b) - })?; - } - } - } - // TODO(@losfair): enqueues + let version: i64 = tx + .prepare_cached(STATEMENT_INC_AND_GET_DATA_VERSION)? + .query_row([], |row| row.get(0))?; - tx.commit()?; + for mutation in write.mutations { + match mutation.kind { + MutationKind::Set(value) => { + let (value, encoding) = encode_value(&value); + let changed = tx + .prepare_cached(STATEMENT_KV_POINT_SET)? + .execute(params![mutation.key, &value, &encoding, &version])?; + assert_eq!(changed, 1) + } + MutationKind::Delete => { + let changed = tx + .prepare_cached(STATEMENT_KV_POINT_DELETE)? + .execute(params![mutation.key])?; + assert!(changed == 0 || changed == 1) + } + MutationKind::Sum(operand) => { + mutate_le64( + &tx, + &mutation.key, + "sum", + &operand, + version, + |a, b| a.wrapping_add(b), + )?; + } + MutationKind::Min(operand) => { + mutate_le64( + &tx, + &mutation.key, + "min", + &operand, + version, + |a, b| a.min(b), + )?; + } + MutationKind::Max(operand) => { + mutate_le64( + &tx, + &mutation.key, + "max", + &operand, + version, + |a, b| a.max(b), + )?; + } + } + } - let new_vesionstamp = version_to_versionstamp(version); + // TODO(@losfair): enqueues - Ok(Some(CommitResult { - versionstamp: new_vesionstamp, - })) + tx.commit()?; + + let new_vesionstamp = version_to_versionstamp(version); + + Ok(Some(CommitResult { + versionstamp: new_vesionstamp, + })) + }) + .await } } From 260d2ec3a1d6c49892fd339b6a8171596a72b8ad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Og=C3=B3rek?= Date: Sat, 3 Jun 2023 00:31:27 +0200 Subject: [PATCH 292/320] perf(ext/http): Migrate op_http_get_request_headers to v8::Array (#19354) --- Cargo.lock | 1 + ext/http/Cargo.toml | 1 + ext/http/http_next.rs | 58 +++++++++++++++++++++++++++++++++++-------- 3 files changed, 50 insertions(+), 10 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 07ca3b10fc..4ded396c9f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1065,6 +1065,7 @@ dependencies = [ "ring", "serde", "slab", + "smallvec", "thiserror", "tokio", "tokio-util", diff --git a/ext/http/Cargo.toml b/ext/http/Cargo.toml index 605615d7ed..9e7d39378c 100644 --- a/ext/http/Cargo.toml +++ b/ext/http/Cargo.toml @@ -46,6 +46,7 @@ pin-project.workspace = true ring.workspace = true serde.workspace = true slab.workspace = true +smallvec.workspace = true thiserror.workspace = true tokio.workspace = true tokio-util = { workspace = true, features = ["io"] } diff --git a/ext/http/http_next.rs b/ext/http/http_next.rs index 9ec9e12c9d..289c387053 100644 --- a/ext/http/http_next.rs +++ b/ext/http/http_next.rs @@ -20,8 +20,10 @@ use cache_control::CacheControl; use deno_core::error::AnyError; use deno_core::futures::TryFutureExt; use deno_core::op; +use deno_core::serde_v8; use deno_core::task::spawn; use deno_core::task::JoinHandle; +use deno_core::v8; use deno_core::AsyncRefCell; use deno_core::AsyncResult; use deno_core::ByteString; @@ -51,11 +53,11 @@ use hyper1::server::conn::http1; use hyper1::server::conn::http2; use hyper1::service::service_fn; use hyper1::service::HttpService; - use hyper1::StatusCode; use once_cell::sync::Lazy; use pin_project::pin_project; use pin_project::pinned_drop; +use smallvec::SmallVec; use std::borrow::Cow; use std::cell::RefCell; use std::future::Future; @@ -255,12 +257,17 @@ pub fn op_http_get_request_header( value.map(|value| value.as_bytes().into()) } -#[op] -pub fn op_http_get_request_headers(slab_id: SlabId) -> Vec { +#[op(v8)] +pub fn op_http_get_request_headers<'scope>( + scope: &mut v8::HandleScope<'scope>, + slab_id: SlabId, +) -> serde_v8::Value<'scope> { let http = slab_get(slab_id); let headers = &http.request_parts().headers; // Two slots for each header key/value pair - let mut vec = Vec::with_capacity(headers.len() * 2); + let mut vec: SmallVec<[v8::Local; 32]> = + SmallVec::with_capacity(headers.len() * 2); + let mut cookies: Option> = None; for (name, value) in headers { if name == COOKIE { @@ -270,9 +277,24 @@ pub fn op_http_get_request_headers(slab_id: SlabId) -> Vec { cookies = Some(vec![value.as_bytes()]); } } else { - let name: &[u8] = name.as_ref(); - vec.push(name.into()); - vec.push(value.as_bytes().into()); + vec.push( + v8::String::new_from_one_byte( + scope, + name.as_ref(), + v8::NewStringType::Normal, + ) + .unwrap() + .into(), + ); + vec.push( + v8::String::new_from_one_byte( + scope, + value.as_bytes(), + v8::NewStringType::Normal, + ) + .unwrap() + .into(), + ); } } @@ -283,11 +305,27 @@ pub fn op_http_get_request_headers(slab_id: SlabId) -> Vec { // TODO(mmastrac): This should probably happen on the JS side on-demand if let Some(cookies) = cookies { let cookie_sep = "; ".as_bytes(); - vec.push(ByteString::from(COOKIE.as_str())); - vec.push(ByteString::from(cookies.join(cookie_sep))); + + vec.push( + v8::String::new_external_onebyte_static(scope, COOKIE.as_ref()) + .unwrap() + .into(), + ); + vec.push( + v8::String::new_from_one_byte( + scope, + cookies.join(cookie_sep).as_ref(), + v8::NewStringType::Normal, + ) + .unwrap() + .into(), + ); } - vec + let array = v8::Array::new_with_elements(scope, vec.as_slice()); + let array_value: v8::Local = array.into(); + + array_value.into() } #[op(fast)] From 7d0853d15863b2fb61bcf5927139cfdd3d869d73 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Og=C3=B3rek?= Date: Sat, 3 Jun 2023 20:15:53 +0200 Subject: [PATCH 293/320] perf(ext/http): Migrate op_http_get_request_method_and_url to v8::Array (#19355) Tackles 3rd item from https://github.com/denoland/deno/issues/19330 list. Before: 113.9k After: 114.3k --- ext/http/http_next.rs | 55 +++++++++++++++++++++++++++++++++++-------- 1 file changed, 45 insertions(+), 10 deletions(-) diff --git a/ext/http/http_next.rs b/ext/http/http_next.rs index 289c387053..14b5457e5d 100644 --- a/ext/http/http_next.rs +++ b/ext/http/http_next.rs @@ -215,10 +215,11 @@ pub fn op_http_set_promise_complete(slab_id: SlabId, status: u16) { http.complete(); } -#[op] -pub fn op_http_get_request_method_and_url( +#[op(v8)] +pub fn op_http_get_request_method_and_url<'scope, HTTP>( + scope: &mut v8::HandleScope<'scope>, slab_id: SlabId, -) -> (String, Option, String, String, Option) +) -> serde_v8::Value<'scope> where HTTP: HttpPropertyExtractor, { @@ -231,20 +232,54 @@ where &request_parts.headers, ); + let method: v8::Local = v8::String::new_from_utf8( + scope, + request_parts.method.as_str().as_bytes(), + v8::NewStringType::Normal, + ) + .unwrap() + .into(); + + let authority: v8::Local = match request_properties.authority { + Some(authority) => v8::String::new_from_utf8( + scope, + authority.as_ref(), + v8::NewStringType::Normal, + ) + .unwrap() + .into(), + None => v8::undefined(scope).into(), + }; + // Only extract the path part - we handle authority elsewhere let path = match &request_parts.uri.path_and_query() { Some(path_and_query) => path_and_query.to_string(), None => "".to_owned(), }; - // TODO(mmastrac): Passing method can be optimized - ( - request_parts.method.as_str().to_owned(), - request_properties.authority, - path, - String::from(request_info.peer_address.as_ref()), - request_info.peer_port, + let path: v8::Local = + v8::String::new_from_utf8(scope, path.as_ref(), v8::NewStringType::Normal) + .unwrap() + .into(); + + let peer_address: v8::Local = v8::String::new_from_utf8( + scope, + request_info.peer_address.as_bytes(), + v8::NewStringType::Normal, ) + .unwrap() + .into(); + + let port: v8::Local = match request_info.peer_port { + Some(port) => v8::Integer::new(scope, port.into()).into(), + None => v8::undefined(scope).into(), + }; + + let vec = [method, authority, path, peer_address, port]; + let array = v8::Array::new_with_elements(scope, vec.as_slice()); + let array_value: v8::Local = array.into(); + + array_value.into() } #[op] From 34dac6c6efa75f38c29031a65db1ee3332a67259 Mon Sep 17 00:00:00 2001 From: Nayeem Rahman Date: Sat, 3 Jun 2023 21:22:32 +0100 Subject: [PATCH 294/320] refactor(core): remove force_op_registration and cleanup JsRuntimeForSnapshot (#19353) Addresses https://github.com/denoland/deno/pull/19308#discussion_r1212248194. Removes force_op_registration as it is no longer necessary. --- cli/lsp/tsc.rs | 3 - cli/ops/bench.rs | 3 - cli/ops/mod.rs | 3 - cli/ops/testing.rs | 3 - cli/tsc/mod.rs | 11 +- core/bindings.rs | 23 +- core/extensions.rs | 15 - core/runtime.rs | 390 +++++++----------- core/snapshot_util.rs | 38 +- ops/lib.rs | 2 - ops/optimizer_tests/async_nop.out | 1 - ops/optimizer_tests/async_result.out | 1 - ops/optimizer_tests/callback_options.out | 1 - ops/optimizer_tests/cow_str.out | 1 - ops/optimizer_tests/f64_slice.out | 1 - ops/optimizer_tests/incompatible_1.out | 1 - ops/optimizer_tests/issue16934.out | 1 - ops/optimizer_tests/issue16934_fast.out | 1 - .../op_blob_revoke_object_url.out | 1 - ops/optimizer_tests/op_ffi_ptr_value.out | 1 - ops/optimizer_tests/op_print.out | 1 - ops/optimizer_tests/op_state.out | 1 - ops/optimizer_tests/op_state_basic1.out | 1 - ops/optimizer_tests/op_state_generics.out | 1 - ops/optimizer_tests/op_state_result.out | 1 - ops/optimizer_tests/op_state_warning.out | 1 - .../op_state_with_transforms.out | 1 - ops/optimizer_tests/opstate_with_arity.out | 1 - ops/optimizer_tests/option_arg.out | 1 - ops/optimizer_tests/owned_string.out | 1 - .../param_mut_binding_warning.out | 1 - ops/optimizer_tests/raw_ptr.out | 1 - ops/optimizer_tests/serde_v8_value.out | 1 - ops/optimizer_tests/strings.out | 1 - ops/optimizer_tests/strings_result.out | 1 - ops/optimizer_tests/u64_result.out | 1 - ops/optimizer_tests/uint8array.out | 1 - ops/optimizer_tests/unit_result.out | 1 - ops/optimizer_tests/unit_result2.out | 1 - ops/optimizer_tests/unit_ret.out | 1 - ops/optimizer_tests/wasm_op.out | 1 - runtime/examples/extension_with_ops/main.rs | 8 +- runtime/ops/fs_events.rs | 3 - runtime/ops/http.rs | 3 - runtime/ops/os/mod.rs | 6 - runtime/ops/permissions.rs | 3 - runtime/ops/process.rs | 3 - runtime/ops/runtime.rs | 3 - runtime/ops/signal.rs | 3 - runtime/ops/tty.rs | 3 - runtime/ops/web_worker.rs | 3 - runtime/ops/worker_host.rs | 3 - 52 files changed, 161 insertions(+), 402 deletions(-) diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs index bfbb5cf9ac..0e52f8d873 100644 --- a/cli/lsp/tsc.rs +++ b/cli/lsp/tsc.rs @@ -3261,9 +3261,6 @@ deno_core::extension!(deno_tsc, options.performance, )); }, - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, ); /// Instruct a language server runtime to start the language server and provide diff --git a/cli/ops/bench.rs b/cli/ops/bench.rs index da0f3d959a..f569a8cbb4 100644 --- a/cli/ops/bench.rs +++ b/cli/ops/bench.rs @@ -42,9 +42,6 @@ deno_core::extension!(deno_bench, state.put(options.sender); state.put(BenchContainer::default()); }, - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, ); #[derive(Clone)] diff --git a/cli/ops/mod.rs b/cli/ops/mod.rs index d39f19270a..5066c44b9f 100644 --- a/cli/ops/mod.rs +++ b/cli/ops/mod.rs @@ -23,9 +23,6 @@ deno_core::extension!(deno_cli, state = |state, options| { state.put(options.npm_resolver); }, - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, ); #[op] diff --git a/cli/ops/testing.rs b/cli/ops/testing.rs index 3f9ade7c9e..b4d9b451a0 100644 --- a/cli/ops/testing.rs +++ b/cli/ops/testing.rs @@ -43,9 +43,6 @@ deno_core::extension!(deno_test, state.put(options.sender); state.put(TestContainer::default()); }, - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, ); #[derive(Clone)] diff --git a/cli/tsc/mod.rs b/cli/tsc/mod.rs index 2b8a210ab0..d9f9b8b531 100644 --- a/cli/tsc/mod.rs +++ b/cli/tsc/mod.rs @@ -117,13 +117,7 @@ pub fn get_types_declaration_file_text(unstable: bool) -> String { } fn get_asset_texts_from_new_runtime() -> Result, AnyError> { - deno_core::extension!( - deno_cli_tsc, - ops_fn = deno_ops, - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, - ); + deno_core::extension!(deno_cli_tsc, ops_fn = deno_ops); // the assets are stored within the typescript isolate, so take them out of there let mut runtime = JsRuntime::new(RuntimeOptions { @@ -780,9 +774,6 @@ pub fn exec(request: Request) -> Result { .unwrap(), )); }, - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, ); let startup_source = ascii_str!("globalThis.startup({ legacyFlag: false })"); diff --git a/core/bindings.rs b/core/bindings.rs index d91e4c309d..2be9b35b65 100644 --- a/core/bindings.rs +++ b/core/bindings.rs @@ -15,19 +15,10 @@ use crate::modules::ImportAssertionsKind; use crate::modules::ModuleMap; use crate::modules::ResolutionKind; use crate::ops::OpCtx; +use crate::runtime::InitMode; use crate::JsRealm; use crate::JsRuntime; -#[derive(Copy, Clone, Eq, PartialEq)] -pub(crate) enum BindingsMode { - /// We have no snapshot -- this is a pristine context. - New, - /// We have initialized before, are reloading a snapshot, and will snapshot. - Loaded, - /// We have initialized before, are reloading a snapshot, and will not snapshot again. - LoadedFinal, -} - pub(crate) fn external_references(ops: &[OpCtx]) -> v8::ExternalReferences { // Overallocate a bit, it's better than having to resize the vector. let mut references = Vec::with_capacity(4 + ops.len() * 4); @@ -127,7 +118,7 @@ pub(crate) fn initialize_context<'s>( scope: &mut v8::HandleScope<'s>, context: v8::Local<'s, v8::Context>, op_ctxs: &[OpCtx], - bindings_mode: BindingsMode, + init_mode: InitMode, ) -> v8::Local<'s, v8::Context> { let global = context.global(scope); @@ -137,17 +128,11 @@ pub(crate) fn initialize_context<'s>( codegen, "Deno.__op__ = function(opFns, callConsole, console) {{" ); - if bindings_mode == BindingsMode::New { + if init_mode == InitMode::New { _ = writeln!(codegen, "Deno.__op__console(callConsole, console);"); } for op_ctx in op_ctxs { if op_ctx.decl.enabled { - // If we're loading from a snapshot, we can skip registration for most ops - if bindings_mode == BindingsMode::LoadedFinal - && !op_ctx.decl.force_registration - { - continue; - } _ = writeln!( codegen, "Deno.__op__registerOp({}, opFns[{}], \"{}\");", @@ -182,7 +167,7 @@ pub(crate) fn initialize_context<'s>( let op_fn = op_ctx_function(scope, op_ctx); op_fns.set_index(scope, op_ctx.id as u32, op_fn.into()); } - if bindings_mode != BindingsMode::New { + if init_mode == InitMode::FromSnapshot { op_fn.call(scope, recv.into(), &[op_fns.into()]); } else { // Bind functions to Deno.core.* diff --git a/core/extensions.rs b/core/extensions.rs index ff86fec648..fa6d7851e7 100644 --- a/core/extensions.rs +++ b/core/extensions.rs @@ -72,7 +72,6 @@ pub struct OpDecl { pub is_async: bool, pub is_unstable: bool, pub is_v8: bool, - pub force_registration: bool, pub arg_count: u8, pub fast_fn: Option, } @@ -360,7 +359,6 @@ pub struct Extension { initialized: bool, enabled: bool, deps: Option<&'static [&'static str]>, - force_op_registration: bool, pub(crate) is_core: bool, } @@ -431,7 +429,6 @@ impl Extension { let mut ops = self.ops.take()?; for op in ops.iter_mut() { op.enabled = self.enabled && op.enabled; - op.force_registration = self.force_op_registration; } Some(ops) } @@ -485,7 +482,6 @@ pub struct ExtensionBuilder { event_loop_middleware: Option>, name: &'static str, deps: &'static [&'static str], - force_op_registration: bool, is_core: bool, } @@ -534,15 +530,6 @@ impl ExtensionBuilder { self } - /// Mark that ops from this extension should be added to `Deno.core.ops` - /// unconditionally. This is useful is some ops are not available - /// during snapshotting, as ops are not registered by default when a - /// `JsRuntime` is created with an existing snapshot. - pub fn force_op_registration(&mut self) -> &mut Self { - self.force_op_registration = true; - self - } - /// Consume the [`ExtensionBuilder`] and return an [`Extension`]. pub fn take(self) -> Extension { let js_files = Some(self.js); @@ -560,7 +547,6 @@ impl ExtensionBuilder { initialized: false, enabled: true, name: self.name, - force_op_registration: self.force_op_registration, deps, is_core: self.is_core, } @@ -583,7 +569,6 @@ impl ExtensionBuilder { enabled: true, name: self.name, deps, - force_op_registration: self.force_op_registration, is_core: self.is_core, } } diff --git a/core/runtime.rs b/core/runtime.rs index f95c4a8ef5..fdcb81e9e2 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -1,7 +1,6 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. use crate::bindings; -use crate::bindings::BindingsMode; use crate::error::generic_error; use crate::error::to_v8_type_error; use crate::error::JsError; @@ -24,8 +23,6 @@ use crate::realm::ContextState; use crate::realm::JsRealm; use crate::realm::JsRealmInner; use crate::snapshot_util; -use crate::snapshot_util::SnapshotOptions; -use crate::snapshot_util::SnapshottedData; use crate::source_map::SourceMapCache; use crate::source_map::SourceMapGetter; use crate::Extension; @@ -63,7 +60,6 @@ use std::sync::Mutex; use std::sync::Once; use std::task::Context; use std::task::Poll; -use v8::CreateParams; const STATE_DATA_OFFSET: u32 = 0; const MODULE_MAP_DATA_OFFSET: u32 = 1; @@ -121,7 +117,7 @@ impl DerefMut for ManuallyDropRc { /// This inner struct allows us to let the outer JsRuntime drop normally without a Drop impl, while we /// control dropping more closely here using ManuallyDrop. struct InnerIsolateState { - snapshotting: bool, + will_snapshot: bool, state: ManuallyDropRc>, v8_isolate: ManuallyDrop, } @@ -182,7 +178,7 @@ impl Drop for InnerIsolateState { // SAFETY: We gotta drop these unsafe { ManuallyDrop::drop(&mut self.state.0); - if self.snapshotting { + if self.will_snapshot { // Create the snapshot and just drop it. eprintln!("WARNING: v8::OwnedIsolate for snapshot was leaked"); } else { @@ -192,47 +188,46 @@ impl Drop for InnerIsolateState { } } +#[derive(Copy, Clone, Debug, Eq, PartialEq)] +pub(crate) enum InitMode { + /// We have no snapshot -- this is a pristine context. + New, + /// We are using a snapshot, thus certain initialization steps are skipped. + FromSnapshot, +} + +impl InitMode { + fn from_options(options: &RuntimeOptions) -> Self { + match options.startup_snapshot { + None => Self::New, + Some(_) => Self::FromSnapshot, + } + } +} + /// A single execution context of JavaScript. Corresponds roughly to the "Web /// Worker" concept in the DOM. //// -/// The JsRuntimeImpl future completes when there is an error or when all +/// The JsRuntime future completes when there is an error or when all /// pending ops have completed. /// -/// API consumers will want to use either the [`JsRuntime`] or [`JsRuntimeForSnapshot`] -/// type aliases. -pub struct JsRuntimeImpl { +/// Use [`JsRuntimeForSnapshot`] to be able to create a snapshot. +pub struct JsRuntime { inner: InnerIsolateState, module_map: Rc>, allocations: IsolateAllocations, extensions: Vec, event_loop_middlewares: Vec>, - bindings_mode: BindingsMode, + init_mode: InitMode, // Marks if this is considered the top-level runtime. Used only be inspector. is_main: bool, } -/// The runtime type that most users will use when not creating a snapshot. -pub struct JsRuntime(JsRuntimeImpl); - -impl Deref for JsRuntime { - type Target = JsRuntimeImpl; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl DerefMut for JsRuntime { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} - /// The runtime type used for snapshot creation. -pub struct JsRuntimeForSnapshot(JsRuntimeImpl); +pub struct JsRuntimeForSnapshot(JsRuntime); impl Deref for JsRuntimeForSnapshot { - type Target = JsRuntimeImpl; + type Target = JsRuntime; fn deref(&self) -> &Self::Target { &self.0 @@ -301,7 +296,7 @@ pub type SharedArrayBufferStore = pub type CompiledWasmModuleStore = CrossIsolateStore; -/// Internal state for JsRuntimeImpl which is stored in one of v8::Isolate's +/// Internal state for JsRuntime which is stored in one of v8::Isolate's /// embedder slots. pub struct JsRuntimeState { global_realm: Option, @@ -398,7 +393,7 @@ pub struct RuntimeOptions { /// executed tries to load modules. pub module_loader: Option>, - /// JsRuntimeImpl extensions, not to be confused with ES modules. + /// JsRuntime extensions, not to be confused with ES modules. /// Only ops registered by extensions will be initialized. If you need /// to execute JS code from extensions, pass source files in `js` or `esm` /// option on `ExtensionBuilder`. @@ -448,21 +443,60 @@ pub struct RuntimeSnapshotOptions { pub snapshot_module_load_cb: Option, } -trait JsRuntimeInternalTrait { - fn create_raw_isolate( - refs: &'static v8::ExternalReferences, - params: Option, - snapshot: SnapshotOptions, - ) -> v8::OwnedIsolate; -} +impl JsRuntime { + /// Only constructor, configuration is done through `options`. + pub fn new(mut options: RuntimeOptions) -> JsRuntime { + JsRuntime::init_v8(options.v8_platform.take(), cfg!(test)); + JsRuntime::new_inner(options, false, None) + } -impl JsRuntimeImpl { - fn init_v8(v8_platform: Option>) { + pub(crate) fn state_from( + isolate: &v8::Isolate, + ) -> Rc> { + let state_ptr = isolate.get_data(STATE_DATA_OFFSET); + let state_rc = + // SAFETY: We are sure that it's a valid pointer for whole lifetime of + // the runtime. + unsafe { Rc::from_raw(state_ptr as *const RefCell) }; + let state = state_rc.clone(); + std::mem::forget(state_rc); + state + } + + pub(crate) fn module_map_from( + isolate: &v8::Isolate, + ) -> Rc> { + let module_map_ptr = isolate.get_data(MODULE_MAP_DATA_OFFSET); + let module_map_rc = + // SAFETY: We are sure that it's a valid pointer for whole lifetime of + // the runtime. + unsafe { Rc::from_raw(module_map_ptr as *const RefCell) }; + let module_map = module_map_rc.clone(); + std::mem::forget(module_map_rc); + module_map + } + + pub(crate) fn event_loop_pending_state_from_scope( + scope: &mut v8::HandleScope, + ) -> EventLoopPendingState { + let state = JsRuntime::state_from(scope); + let module_map = JsRuntime::module_map_from(scope); + let state = EventLoopPendingState::new( + scope, + &mut state.borrow_mut(), + &module_map.borrow(), + ); + state + } + + fn init_v8( + v8_platform: Option>, + predictable: bool, + ) { static DENO_INIT: Once = Once::new(); static DENO_PREDICTABLE: AtomicBool = AtomicBool::new(false); static DENO_PREDICTABLE_SET: AtomicBool = AtomicBool::new(false); - let predictable = FOR_SNAPSHOT || cfg!(test); if DENO_PREDICTABLE_SET.load(Ordering::SeqCst) { let current = DENO_PREDICTABLE.load(Ordering::SeqCst); assert_eq!(current, predictable, "V8 may only be initialized once in either snapshotting or non-snapshotting mode. Either snapshotting or non-snapshotting mode may be used in a single process, not both."); @@ -473,15 +507,13 @@ impl JsRuntimeImpl { DENO_INIT.call_once(move || v8_init(v8_platform, predictable)); } - fn new_runtime( + fn new_inner( mut options: RuntimeOptions, - snapshot_options: SnapshotOptions, + will_snapshot: bool, maybe_load_callback: Option, - ) -> JsRuntimeImpl - where - JsRuntimeImpl: JsRuntimeInternalTrait, - { - let (op_state, ops) = Self::create_opstate(&mut options, &snapshot_options); + ) -> JsRuntime { + let init_mode = InitMode::from_options(&options); + let (op_state, ops) = Self::create_opstate(&mut options, init_mode); let op_state = Rc::new(RefCell::new(op_state)); // Collect event-loop middleware @@ -546,19 +578,56 @@ impl JsRuntimeImpl { // V8 takes ownership of external_references. let refs: &'static v8::ExternalReferences = Box::leak(Box::new(refs)); - let bindings_mode = match snapshot_options { - SnapshotOptions::None => bindings::BindingsMode::New, - SnapshotOptions::Create => bindings::BindingsMode::New, - SnapshotOptions::Load(_) => bindings::BindingsMode::LoadedFinal, - SnapshotOptions::CreateFromExisting(_) => bindings::BindingsMode::Loaded, + let mut isolate = if will_snapshot { + snapshot_util::create_snapshot_creator( + refs, + options.startup_snapshot.take(), + ) + } else { + let mut params = options + .create_params + .take() + .unwrap_or_default() + .embedder_wrapper_type_info_offsets( + V8_WRAPPER_TYPE_INDEX, + V8_WRAPPER_OBJECT_INDEX, + ) + .external_references(&**refs); + if let Some(snapshot) = options.startup_snapshot.take() { + params = match snapshot { + Snapshot::Static(data) => params.snapshot_blob(data), + Snapshot::JustCreated(data) => params.snapshot_blob(data), + Snapshot::Boxed(data) => params.snapshot_blob(data), + }; + } + v8::Isolate::new(params) }; - let snapshotting = snapshot_options.will_snapshot(); - - let (mut isolate, global_context, snapshotted_data) = Self::create_isolate( - refs, - options.create_params.take(), - snapshot_options, + isolate.set_capture_stack_trace_for_uncaught_exceptions(true, 10); + isolate.set_promise_reject_callback(bindings::promise_reject_callback); + isolate.set_host_initialize_import_meta_object_callback( + bindings::host_initialize_import_meta_object_callback, ); + isolate.set_host_import_module_dynamically_callback( + bindings::host_import_module_dynamically_callback, + ); + isolate.set_wasm_async_resolve_promise_callback( + bindings::wasm_async_resolve_promise_callback, + ); + + let (global_context, snapshotted_data) = { + let scope = &mut v8::HandleScope::new(&mut isolate); + let context = v8::Context::new(scope); + + // Get module map data from the snapshot + let snapshotted_data = if init_mode == InitMode::FromSnapshot { + Some(snapshot_util::get_snapshotted_data(scope, context)) + } else { + None + }; + + (v8::Global::new(scope, context), snapshotted_data) + }; + // SAFETY: this is first use of `isolate_ptr` so we are sure we're // not overwriting an existing pointer. isolate = unsafe { @@ -575,7 +644,7 @@ impl JsRuntimeImpl { scope, context, &context_state.borrow().op_ctxs, - bindings_mode, + init_mode, ); context.set_slot(scope, context_state.clone()); @@ -619,13 +688,13 @@ impl JsRuntimeImpl { drop(context_scope); - let mut js_runtime = JsRuntimeImpl { + let mut js_runtime = JsRuntime { inner: InnerIsolateState { - snapshotting, + will_snapshot, state: ManuallyDropRc(ManuallyDrop::new(state_rc)), v8_isolate: ManuallyDrop::new(isolate), }, - bindings_mode, + init_mode, allocations: IsolateAllocations::default(), event_loop_middlewares, extensions: options.extensions, @@ -641,51 +710,6 @@ impl JsRuntimeImpl { js_runtime } - /// Create a new [`v8::OwnedIsolate`] and its global [`v8::Context`] from optional parameters and snapshot. - fn create_isolate( - refs: &'static v8::ExternalReferences, - params: Option, - snapshot: SnapshotOptions, - ) -> ( - v8::OwnedIsolate, - v8::Global, - Option, - ) - where - JsRuntimeImpl: JsRuntimeInternalTrait, - { - let has_snapshot = snapshot.loaded(); - let mut isolate = Self::create_raw_isolate(refs, params, snapshot); - - isolate.set_capture_stack_trace_for_uncaught_exceptions(true, 10); - isolate.set_promise_reject_callback(bindings::promise_reject_callback); - isolate.set_host_initialize_import_meta_object_callback( - bindings::host_initialize_import_meta_object_callback, - ); - isolate.set_host_import_module_dynamically_callback( - bindings::host_import_module_dynamically_callback, - ); - isolate.set_wasm_async_resolve_promise_callback( - bindings::wasm_async_resolve_promise_callback, - ); - - let (context, snapshotted_data) = { - let scope = &mut v8::HandleScope::new(&mut isolate); - let context = v8::Context::new(scope); - - // Get module map data from the snapshot - let snapshotted_data = if has_snapshot { - Some(snapshot_util::get_snapshotted_data(scope, context)) - } else { - None - }; - - (v8::Global::new(scope, context), snapshotted_data) - }; - - (isolate, context, snapshotted_data) - } - #[cfg(test)] #[inline] pub(crate) fn module_map(&self) -> &Rc> { @@ -728,7 +752,7 @@ impl JsRuntimeImpl { /// Creates a new realm (V8 context) in this JS execution context, /// pre-initialized with all of the extensions that were passed in - /// [`RuntimeOptions::extensions`] when the [`JsRuntimeImpl`] was + /// [`RuntimeOptions::extensions`] when the [`JsRuntime`] was /// constructed. pub fn create_realm(&mut self) -> Result { let realm = { @@ -768,7 +792,7 @@ impl JsRuntimeImpl { scope, context, &context_state.borrow().op_ctxs, - self.bindings_mode, + self.init_mode, ); context.set_slot(scope, context_state.clone()); let realm = JsRealmInner::new( @@ -946,10 +970,10 @@ impl JsRuntimeImpl { /// Initializes ops of provided Extensions fn create_opstate( options: &mut RuntimeOptions, - snapshot_options: &SnapshotOptions, + init_mode: InitMode, ) -> (OpState, Vec) { // Add built-in extension - if snapshot_options.loaded() { + if init_mode == InitMode::FromSnapshot { options .extensions .insert(0, crate::ops_builtin::core::init_ops()); @@ -1470,78 +1494,15 @@ impl JsRuntimeImpl { } } -impl JsRuntime { - /// Only constructor, configuration is done through `options`. - pub fn new(mut options: RuntimeOptions) -> JsRuntime { - JsRuntimeImpl::::init_v8(options.v8_platform.take()); - - let snapshot_options = snapshot_util::SnapshotOptions::new_from( - options.startup_snapshot.take(), - false, - ); - - JsRuntime(JsRuntimeImpl::::new_runtime( - options, - snapshot_options, - None, - )) - } - - pub(crate) fn state_from( - isolate: &v8::Isolate, - ) -> Rc> { - let state_ptr = isolate.get_data(STATE_DATA_OFFSET); - let state_rc = - // SAFETY: We are sure that it's a valid pointer for whole lifetime of - // the runtime. - unsafe { Rc::from_raw(state_ptr as *const RefCell) }; - let state = state_rc.clone(); - std::mem::forget(state_rc); - state - } - - pub(crate) fn module_map_from( - isolate: &v8::Isolate, - ) -> Rc> { - let module_map_ptr = isolate.get_data(MODULE_MAP_DATA_OFFSET); - let module_map_rc = - // SAFETY: We are sure that it's a valid pointer for whole lifetime of - // the runtime. - unsafe { Rc::from_raw(module_map_ptr as *const RefCell) }; - let module_map = module_map_rc.clone(); - std::mem::forget(module_map_rc); - module_map - } - - pub(crate) fn event_loop_pending_state_from_scope( - scope: &mut v8::HandleScope, - ) -> EventLoopPendingState { - let state = JsRuntime::state_from(scope); - let module_map = JsRuntime::module_map_from(scope); - let state = EventLoopPendingState::new( - scope, - &mut state.borrow_mut(), - &module_map.borrow(), - ); - state - } -} - impl JsRuntimeForSnapshot { pub fn new( mut options: RuntimeOptions, runtime_snapshot_options: RuntimeSnapshotOptions, ) -> JsRuntimeForSnapshot { - JsRuntimeImpl::::init_v8(options.v8_platform.take()); - - let snapshot_options = snapshot_util::SnapshotOptions::new_from( - options.startup_snapshot.take(), - true, - ); - - JsRuntimeForSnapshot(JsRuntimeImpl::::new_runtime( + JsRuntime::init_v8(options.v8_platform.take(), true); + JsRuntimeForSnapshot(JsRuntime::new_inner( options, - snapshot_options, + true, runtime_snapshot_options.snapshot_module_load_cb, )) } @@ -1590,42 +1551,6 @@ impl JsRuntimeForSnapshot { } } -impl JsRuntimeInternalTrait for JsRuntimeImpl { - fn create_raw_isolate( - refs: &'static v8::ExternalReferences, - _params: Option, - snapshot: SnapshotOptions, - ) -> v8::OwnedIsolate { - snapshot_util::create_snapshot_creator(refs, snapshot) - } -} - -impl JsRuntimeInternalTrait for JsRuntimeImpl { - fn create_raw_isolate( - refs: &'static v8::ExternalReferences, - params: Option, - snapshot: SnapshotOptions, - ) -> v8::OwnedIsolate { - let mut params = params - .unwrap_or_default() - .embedder_wrapper_type_info_offsets( - V8_WRAPPER_TYPE_INDEX, - V8_WRAPPER_OBJECT_INDEX, - ) - .external_references(&**refs); - - if let Some(snapshot) = snapshot.snapshot() { - params = match snapshot { - Snapshot::Static(data) => params.snapshot_blob(data), - Snapshot::JustCreated(data) => params.snapshot_blob(data), - Snapshot::Boxed(data) => params.snapshot_blob(data), - }; - } - - v8::Isolate::new(params) - } -} - fn get_stalled_top_level_await_message_for_module( scope: &mut v8::HandleScope, module_id: ModuleId, @@ -1731,7 +1656,7 @@ where F: FnMut(usize, usize) -> usize, { // SAFETY: The data is a pointer to the Rust callback function. It is stored - // in `JsRuntimeImpl::allocations` and thus is guaranteed to outlive the isolate. + // in `JsRuntime::allocations` and thus is guaranteed to outlive the isolate. let callback = unsafe { &mut *(data as *mut F) }; callback(current_heap_limit, initial_heap_limit) } @@ -1800,7 +1725,7 @@ pub(crate) fn exception_to_err_result( } // Related to module loading -impl JsRuntimeImpl { +impl JsRuntime { pub(crate) fn instantiate_module( &mut self, id: ModuleId, @@ -1916,11 +1841,11 @@ impl JsRuntimeImpl { /// Evaluates an already instantiated ES module. /// /// Returns a receiver handle that resolves when module promise resolves. - /// Implementors must manually call [`JsRuntimeImpl::run_event_loop`] to drive + /// Implementors must manually call [`JsRuntime::run_event_loop`] to drive /// module evaluation future. /// /// `Error` can usually be downcast to `JsError` and should be awaited and - /// checked after [`JsRuntimeImpl::run_event_loop`] completion. + /// checked after [`JsRuntime::run_event_loop`] completion. /// /// This function panics if module has not been instantiated. pub fn mod_evaluate( @@ -1953,7 +1878,7 @@ impl JsRuntimeImpl { // Because that promise is created internally by V8, when error occurs during // module evaluation the promise is rejected, and since the promise has no rejection // handler it will result in call to `bindings::promise_reject_callback` adding - // the promise to pending promise rejection table - meaning JsRuntimeImpl will return + // the promise to pending promise rejection table - meaning JsRuntime will return // error on next poll(). // // This situation is not desirable as we want to manually return error at the @@ -2362,7 +2287,7 @@ impl JsRuntimeImpl { /// The module will be marked as "main", and because of that /// "import.meta.main" will return true when checked inside that module. /// - /// User must call [`JsRuntimeImpl::mod_evaluate`] with returned `ModuleId` + /// User must call [`JsRuntime::mod_evaluate`] with returned `ModuleId` /// manually after load is finished. pub async fn load_main_module( &mut self, @@ -2417,7 +2342,7 @@ impl JsRuntimeImpl { /// This method is meant to be used when loading some utility code that /// might be later imported by the main module (ie. an entry point module). /// - /// User must call [`JsRuntimeImpl::mod_evaluate`] with returned `ModuleId` + /// User must call [`JsRuntime::mod_evaluate`] with returned `ModuleId` /// manually after load is finished. pub async fn load_side_module( &mut self, @@ -2563,7 +2488,7 @@ pub fn queue_fast_async_op( ) { let runtime_state = match ctx.runtime_state.upgrade() { Some(rc_state) => rc_state, - // at least 1 Rc is held by the JsRuntimeImpl. + // at least 1 Rc is held by the JsRuntime. None => unreachable!(), }; let get_class = { @@ -2661,7 +2586,7 @@ pub fn queue_async_op<'s>( ) -> Option> { let runtime_state = match ctx.runtime_state.upgrade() { Some(rc_state) => rc_state, - // at least 1 Rc is held by the JsRuntimeImpl. + // at least 1 Rc is held by the JsRuntime. None => unreachable!(), }; @@ -3559,8 +3484,8 @@ pub mod tests { } } - fn create_module( - runtime: &mut JsRuntimeImpl, + fn create_module( + runtime: &mut JsRuntime, i: usize, main: bool, ) -> ModuleInfo { @@ -3603,10 +3528,7 @@ pub mod tests { } } - fn assert_module_map( - runtime: &mut JsRuntimeImpl, - modules: &Vec, - ) { + fn assert_module_map(runtime: &mut JsRuntime, modules: &Vec) { let module_map = runtime.module_map.borrow(); assert_eq!(module_map.handles.len(), modules.len()); assert_eq!(module_map.info.len(), modules.len()); @@ -4679,13 +4601,7 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { Ok(String::from("Test")) } - deno_core::extension!( - test_ext, - ops = [op_test], - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, - ); + deno_core::extension!(test_ext, ops = [op_test]); let mut runtime = JsRuntime::new(RuntimeOptions { startup_snapshot: Some(Snapshot::Boxed(snapshot)), extensions: vec![test_ext::init_ops()], diff --git a/core/snapshot_util.rs b/core/snapshot_util.rs index 05a196f50c..88c2731477 100644 --- a/core/snapshot_util.rs +++ b/core/snapshot_util.rs @@ -153,40 +153,6 @@ fn data_error_to_panic(err: v8::DataError) -> ! { } } -pub(crate) enum SnapshotOptions { - Load(Snapshot), - CreateFromExisting(Snapshot), - Create, - None, -} - -impl SnapshotOptions { - pub fn new_from(snapshot: Option, will_snapshot: bool) -> Self { - match (snapshot, will_snapshot) { - (Some(snapshot), true) => Self::CreateFromExisting(snapshot), - (None, true) => Self::Create, - (Some(snapshot), false) => Self::Load(snapshot), - (None, false) => Self::None, - } - } - - pub fn loaded(&self) -> bool { - matches!(self, Self::Load(_) | Self::CreateFromExisting(_)) - } - - pub fn will_snapshot(&self) -> bool { - matches!(self, Self::Create | Self::CreateFromExisting(_)) - } - - pub fn snapshot(self) -> Option { - match self { - Self::CreateFromExisting(snapshot) => Some(snapshot), - Self::Load(snapshot) => Some(snapshot), - _ => None, - } - } -} - pub(crate) struct SnapshottedData { pub module_map_data: v8::Global, pub module_handles: Vec>, @@ -257,9 +223,9 @@ pub(crate) fn set_snapshotted_data( /// Returns an isolate set up for snapshotting. pub(crate) fn create_snapshot_creator( external_refs: &'static v8::ExternalReferences, - maybe_startup_snapshot: SnapshotOptions, + maybe_startup_snapshot: Option, ) -> v8::OwnedIsolate { - if let Some(snapshot) = maybe_startup_snapshot.snapshot() { + if let Some(snapshot) = maybe_startup_snapshot { match snapshot { Snapshot::Static(data) => { v8::Isolate::snapshot_creator_from_existing_snapshot( diff --git a/ops/lib.rs b/ops/lib.rs index d4fa0bb824..d7c8b06402 100644 --- a/ops/lib.rs +++ b/ops/lib.rs @@ -143,7 +143,6 @@ impl Op { is_async: #is_async, is_unstable: #is_unstable, is_v8: #is_v8, - force_registration: false, // TODO(mmastrac) arg_count: 0, } @@ -206,7 +205,6 @@ impl Op { is_async: #is_async, is_unstable: #is_unstable, is_v8: #is_v8, - force_registration: false, arg_count: #arg_count as u8, } } diff --git a/ops/optimizer_tests/async_nop.out b/ops/optimizer_tests/async_nop.out index 3765e611a8..d59967a451 100644 --- a/ops/optimizer_tests/async_nop.out +++ b/ops/optimizer_tests/async_nop.out @@ -40,7 +40,6 @@ impl op_void_async { is_async: true, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 1usize as u8, } } diff --git a/ops/optimizer_tests/async_result.out b/ops/optimizer_tests/async_result.out index ca6d13c2e8..6f61a9697c 100644 --- a/ops/optimizer_tests/async_result.out +++ b/ops/optimizer_tests/async_result.out @@ -40,7 +40,6 @@ impl op_async_result { is_async: true, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 2usize as u8, } } diff --git a/ops/optimizer_tests/callback_options.out b/ops/optimizer_tests/callback_options.out index 656124a807..d46d46765d 100644 --- a/ops/optimizer_tests/callback_options.out +++ b/ops/optimizer_tests/callback_options.out @@ -40,7 +40,6 @@ impl op_fallback { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 1usize as u8, } } diff --git a/ops/optimizer_tests/cow_str.out b/ops/optimizer_tests/cow_str.out index ebb2108a21..d52df86ee6 100644 --- a/ops/optimizer_tests/cow_str.out +++ b/ops/optimizer_tests/cow_str.out @@ -40,7 +40,6 @@ impl op_cow_str { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 1usize as u8, } } diff --git a/ops/optimizer_tests/f64_slice.out b/ops/optimizer_tests/f64_slice.out index 811aee288f..403ec8fa39 100644 --- a/ops/optimizer_tests/f64_slice.out +++ b/ops/optimizer_tests/f64_slice.out @@ -40,7 +40,6 @@ impl op_f64_buf { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 1usize as u8, } } diff --git a/ops/optimizer_tests/incompatible_1.out b/ops/optimizer_tests/incompatible_1.out index 59eb600bc6..0e8c98fd0a 100644 --- a/ops/optimizer_tests/incompatible_1.out +++ b/ops/optimizer_tests/incompatible_1.out @@ -30,7 +30,6 @@ impl op_sync_serialize_object_with_numbers_as_keys { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 1usize as u8, } } diff --git a/ops/optimizer_tests/issue16934.out b/ops/optimizer_tests/issue16934.out index 35bd383390..355add5e08 100644 --- a/ops/optimizer_tests/issue16934.out +++ b/ops/optimizer_tests/issue16934.out @@ -30,7 +30,6 @@ impl send_stdin { is_async: true, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 2usize as u8, } } diff --git a/ops/optimizer_tests/issue16934_fast.out b/ops/optimizer_tests/issue16934_fast.out index 1291f9cabf..b6e80b5749 100644 --- a/ops/optimizer_tests/issue16934_fast.out +++ b/ops/optimizer_tests/issue16934_fast.out @@ -30,7 +30,6 @@ impl send_stdin { is_async: true, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 2usize as u8, } } diff --git a/ops/optimizer_tests/op_blob_revoke_object_url.out b/ops/optimizer_tests/op_blob_revoke_object_url.out index 1a10a2b0a8..3165430df1 100644 --- a/ops/optimizer_tests/op_blob_revoke_object_url.out +++ b/ops/optimizer_tests/op_blob_revoke_object_url.out @@ -30,7 +30,6 @@ impl op_blob_revoke_object_url { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 1usize as u8, } } diff --git a/ops/optimizer_tests/op_ffi_ptr_value.out b/ops/optimizer_tests/op_ffi_ptr_value.out index f3da0dfce5..3e4b2571da 100644 --- a/ops/optimizer_tests/op_ffi_ptr_value.out +++ b/ops/optimizer_tests/op_ffi_ptr_value.out @@ -40,7 +40,6 @@ impl op_ffi_ptr_value { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 2usize as u8, } } diff --git a/ops/optimizer_tests/op_print.out b/ops/optimizer_tests/op_print.out index e0fecd6b29..d79cdfd620 100644 --- a/ops/optimizer_tests/op_print.out +++ b/ops/optimizer_tests/op_print.out @@ -30,7 +30,6 @@ impl op_print { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 2usize as u8, } } diff --git a/ops/optimizer_tests/op_state.out b/ops/optimizer_tests/op_state.out index 300dd6fc2f..1d83ae4315 100644 --- a/ops/optimizer_tests/op_state.out +++ b/ops/optimizer_tests/op_state.out @@ -40,7 +40,6 @@ impl op_set_exit_code { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 1usize as u8, } } diff --git a/ops/optimizer_tests/op_state_basic1.out b/ops/optimizer_tests/op_state_basic1.out index 2452e886c0..c1ea447c53 100644 --- a/ops/optimizer_tests/op_state_basic1.out +++ b/ops/optimizer_tests/op_state_basic1.out @@ -40,7 +40,6 @@ impl foo { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 2usize as u8, } } diff --git a/ops/optimizer_tests/op_state_generics.out b/ops/optimizer_tests/op_state_generics.out index 3faaa4bf16..24596256ac 100644 --- a/ops/optimizer_tests/op_state_generics.out +++ b/ops/optimizer_tests/op_state_generics.out @@ -46,7 +46,6 @@ impl op_foo { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 0usize as u8, } } diff --git a/ops/optimizer_tests/op_state_result.out b/ops/optimizer_tests/op_state_result.out index 137eeeac04..4c58de8d6e 100644 --- a/ops/optimizer_tests/op_state_result.out +++ b/ops/optimizer_tests/op_state_result.out @@ -40,7 +40,6 @@ impl foo { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 2usize as u8, } } diff --git a/ops/optimizer_tests/op_state_warning.out b/ops/optimizer_tests/op_state_warning.out index ce677f0fa8..97d76aa975 100644 --- a/ops/optimizer_tests/op_state_warning.out +++ b/ops/optimizer_tests/op_state_warning.out @@ -40,7 +40,6 @@ impl op_listen { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 0usize as u8, } } diff --git a/ops/optimizer_tests/op_state_with_transforms.out b/ops/optimizer_tests/op_state_with_transforms.out index 4347f63e45..3bbb7289fc 100644 --- a/ops/optimizer_tests/op_state_with_transforms.out +++ b/ops/optimizer_tests/op_state_with_transforms.out @@ -46,7 +46,6 @@ impl op_now { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 1usize as u8, } } diff --git a/ops/optimizer_tests/opstate_with_arity.out b/ops/optimizer_tests/opstate_with_arity.out index a1ae081270..88651ce765 100644 --- a/ops/optimizer_tests/opstate_with_arity.out +++ b/ops/optimizer_tests/opstate_with_arity.out @@ -40,7 +40,6 @@ impl op_add_4 { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 4usize as u8, } } diff --git a/ops/optimizer_tests/option_arg.out b/ops/optimizer_tests/option_arg.out index adfc8da19d..f00937a745 100644 --- a/ops/optimizer_tests/option_arg.out +++ b/ops/optimizer_tests/option_arg.out @@ -30,7 +30,6 @@ impl op_try_close { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 1usize as u8, } } diff --git a/ops/optimizer_tests/owned_string.out b/ops/optimizer_tests/owned_string.out index d8c0842ac5..d186e5108b 100644 --- a/ops/optimizer_tests/owned_string.out +++ b/ops/optimizer_tests/owned_string.out @@ -40,7 +40,6 @@ impl op_string_length { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 1usize as u8, } } diff --git a/ops/optimizer_tests/param_mut_binding_warning.out b/ops/optimizer_tests/param_mut_binding_warning.out index e99606b377..5435b21db6 100644 --- a/ops/optimizer_tests/param_mut_binding_warning.out +++ b/ops/optimizer_tests/param_mut_binding_warning.out @@ -30,7 +30,6 @@ impl op_read_sync { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 2usize as u8, } } diff --git a/ops/optimizer_tests/raw_ptr.out b/ops/optimizer_tests/raw_ptr.out index 3eefb5e7f4..a1bacbfc83 100644 --- a/ops/optimizer_tests/raw_ptr.out +++ b/ops/optimizer_tests/raw_ptr.out @@ -51,7 +51,6 @@ impl op_ffi_ptr_of { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 2usize as u8, } } diff --git a/ops/optimizer_tests/serde_v8_value.out b/ops/optimizer_tests/serde_v8_value.out index 867d89e43c..1a3d1ed31c 100644 --- a/ops/optimizer_tests/serde_v8_value.out +++ b/ops/optimizer_tests/serde_v8_value.out @@ -40,7 +40,6 @@ impl op_is_proxy { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 1usize as u8, } } diff --git a/ops/optimizer_tests/strings.out b/ops/optimizer_tests/strings.out index 523736d70e..a1e684caf3 100644 --- a/ops/optimizer_tests/strings.out +++ b/ops/optimizer_tests/strings.out @@ -40,7 +40,6 @@ impl op_string_length { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 1usize as u8, } } diff --git a/ops/optimizer_tests/strings_result.out b/ops/optimizer_tests/strings_result.out index aae8b356bc..46e27e7629 100644 --- a/ops/optimizer_tests/strings_result.out +++ b/ops/optimizer_tests/strings_result.out @@ -30,7 +30,6 @@ impl op_string_length { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 1usize as u8, } } diff --git a/ops/optimizer_tests/u64_result.out b/ops/optimizer_tests/u64_result.out index a0d7465125..46ccd53e1a 100644 --- a/ops/optimizer_tests/u64_result.out +++ b/ops/optimizer_tests/u64_result.out @@ -30,7 +30,6 @@ impl op_bench_now { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 0usize as u8, } } diff --git a/ops/optimizer_tests/uint8array.out b/ops/optimizer_tests/uint8array.out index 124f2ac576..31915d2fed 100644 --- a/ops/optimizer_tests/uint8array.out +++ b/ops/optimizer_tests/uint8array.out @@ -40,7 +40,6 @@ impl op_import_spki_x25519 { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 2usize as u8, } } diff --git a/ops/optimizer_tests/unit_result.out b/ops/optimizer_tests/unit_result.out index 9a46ee0874..cab67c0ea9 100644 --- a/ops/optimizer_tests/unit_result.out +++ b/ops/optimizer_tests/unit_result.out @@ -40,7 +40,6 @@ impl op_unit_result { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 0usize as u8, } } diff --git a/ops/optimizer_tests/unit_result2.out b/ops/optimizer_tests/unit_result2.out index c2e6708a03..3d84797911 100644 --- a/ops/optimizer_tests/unit_result2.out +++ b/ops/optimizer_tests/unit_result2.out @@ -40,7 +40,6 @@ impl op_set_nodelay { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 2usize as u8, } } diff --git a/ops/optimizer_tests/unit_ret.out b/ops/optimizer_tests/unit_ret.out index 538674068e..523ae6504d 100644 --- a/ops/optimizer_tests/unit_ret.out +++ b/ops/optimizer_tests/unit_ret.out @@ -40,7 +40,6 @@ impl op_unit { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 0usize as u8, } } diff --git a/ops/optimizer_tests/wasm_op.out b/ops/optimizer_tests/wasm_op.out index cc8e3b8472..5a8996cd03 100644 --- a/ops/optimizer_tests/wasm_op.out +++ b/ops/optimizer_tests/wasm_op.out @@ -40,7 +40,6 @@ impl op_wasm { is_async: false, is_unstable: false, is_v8: false, - force_registration: false, arg_count: 1usize as u8, } } diff --git a/runtime/examples/extension_with_ops/main.rs b/runtime/examples/extension_with_ops/main.rs index 47feaeaeb5..1feb4ba279 100644 --- a/runtime/examples/extension_with_ops/main.rs +++ b/runtime/examples/extension_with_ops/main.rs @@ -11,13 +11,7 @@ use deno_runtime::permissions::PermissionsContainer; use deno_runtime::worker::MainWorker; use deno_runtime::worker::WorkerOptions; -deno_core::extension!( - hello_runtime, - ops = [op_hello], - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, -); +deno_core::extension!(hello_runtime, ops = [op_hello]); #[op] fn op_hello(text: &str) { diff --git a/runtime/ops/fs_events.rs b/runtime/ops/fs_events.rs index 27e76b3d34..2668431ebf 100644 --- a/runtime/ops/fs_events.rs +++ b/runtime/ops/fs_events.rs @@ -31,9 +31,6 @@ use tokio::sync::mpsc; deno_core::extension!( deno_fs_events, ops = [op_fs_events_open, op_fs_events_poll], - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, ); struct FsEventsResource { diff --git a/runtime/ops/http.rs b/runtime/ops/http.rs index 767fc3ae01..eb27112570 100644 --- a/runtime/ops/http.rs +++ b/runtime/ops/http.rs @@ -30,9 +30,6 @@ use tokio::net::UnixStream; deno_core::extension!( deno_http_runtime, ops = [op_http_start, op_http_upgrade], - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, ); #[op] diff --git a/runtime/ops/os/mod.rs b/runtime/ops/os/mod.rs index b997a89d9d..043dec7000 100644 --- a/runtime/ops/os/mod.rs +++ b/runtime/ops/os/mod.rs @@ -48,9 +48,6 @@ deno_core::extension!( state = |state, options| { state.put::(options.exit_code); }, - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - } ); deno_core::extension!( @@ -63,9 +60,6 @@ deno_core::extension!( }, _ => op, }, - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - } ); #[op] diff --git a/runtime/ops/permissions.rs b/runtime/ops/permissions.rs index 6f7b98a304..663b1d2409 100644 --- a/runtime/ops/permissions.rs +++ b/runtime/ops/permissions.rs @@ -18,9 +18,6 @@ deno_core::extension!( op_revoke_permission, op_request_permission, ], - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, ); #[derive(Deserialize)] diff --git a/runtime/ops/process.rs b/runtime/ops/process.rs index a2eace8b6a..44429fdab7 100644 --- a/runtime/ops/process.rs +++ b/runtime/ops/process.rs @@ -112,9 +112,6 @@ deno_core::extension!( deprecated::op_run_status, deprecated::op_kill, ], - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, ); /// Second member stores the pid separately from the RefCell. It's needed for diff --git a/runtime/ops/runtime.rs b/runtime/ops/runtime.rs index 9f2e48d7aa..3f60c74379 100644 --- a/runtime/ops/runtime.rs +++ b/runtime/ops/runtime.rs @@ -13,9 +13,6 @@ deno_core::extension!( state = |state, options| { state.put::(options.main_module); }, - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, ); #[op] diff --git a/runtime/ops/signal.rs b/runtime/ops/signal.rs index ba9a2a1785..934192c777 100644 --- a/runtime/ops/signal.rs +++ b/runtime/ops/signal.rs @@ -32,9 +32,6 @@ use tokio::signal::windows::CtrlC; deno_core::extension!( deno_signal, ops = [op_signal_bind, op_signal_unbind, op_signal_poll], - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, ); #[cfg(unix)] diff --git a/runtime/ops/tty.rs b/runtime/ops/tty.rs index 7f24daec4b..b4e4d73400 100644 --- a/runtime/ops/tty.rs +++ b/runtime/ops/tty.rs @@ -79,9 +79,6 @@ deno_core::extension!( #[cfg(unix)] state.put(TtyModeStore::default()); }, - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, ); // ref: diff --git a/runtime/ops/web_worker.rs b/runtime/ops/web_worker.rs index 7952a03f26..e62642fdd6 100644 --- a/runtime/ops/web_worker.rs +++ b/runtime/ops/web_worker.rs @@ -25,9 +25,6 @@ deno_core::extension!( op_worker_get_type, op_worker_sync_fetch, ], - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, ); #[op] diff --git a/runtime/ops/worker_host.rs b/runtime/ops/worker_host.rs index d5285ec890..f96ae38e8a 100644 --- a/runtime/ops/worker_host.rs +++ b/runtime/ops/worker_host.rs @@ -119,9 +119,6 @@ deno_core::extension!( FormatJsErrorFnHolder(options.format_js_error_fn); state.put::(format_js_error_fn_holder); }, - customizer = |ext: &mut deno_core::ExtensionBuilder| { - ext.force_op_registration(); - }, ); #[derive(Deserialize)] From adf41edda12a26a84cb8b4252404aae2a9e7ae03 Mon Sep 17 00:00:00 2001 From: Koen <41021050+vrugtehagel@users.noreply.github.com> Date: Mon, 5 Jun 2023 02:03:44 +0200 Subject: [PATCH 295/320] fix(ext/web): Copy EventTarget list before dispatch (#19360) Related issue: https://github.com/denoland/deno/issues/19358. This is a regression that seems to have been introduced in https://github.com/denoland/deno/pull/18905. It looks to have been a performance optimization. The issue is probably easiest described with some code: ```ts const target = new EventTarget(); const event = new Event("foo"); target.addEventListener("foo", () => { console.log('base'); target.addEventListener("foo", () => { console.log('nested'); }); }); target.dispatchEvent(event); ``` Essentially, the second event listener is being attached while the `foo` event is still being dispatched. It should then not fire that second event listener, but Deno currently does. --- cli/tests/unit/event_target_test.ts | 14 ++++++++++++++ ext/web/02_event.js | 5 +++-- 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/cli/tests/unit/event_target_test.ts b/cli/tests/unit/event_target_test.ts index 49bd354aa2..c7acab364c 100644 --- a/cli/tests/unit/event_target_test.ts +++ b/cli/tests/unit/event_target_test.ts @@ -245,6 +245,20 @@ Deno.test(function eventTargetDispatchShouldSetTargetInListener() { assertEquals(called, true); }); +Deno.test(function eventTargetDispatchShouldFireCurrentListenersOnly() { + const target = new EventTarget(); + const event = new Event("foo"); + let callCount = 0; + target.addEventListener("foo", () => { + ++callCount; + target.addEventListener("foo", () => { + ++callCount; + }); + }); + target.dispatchEvent(event); + assertEquals(callCount, 1); +}); + Deno.test(function eventTargetAddEventListenerGlobalAbort() { return new Promise((resolve) => { const c = new AbortController(); diff --git a/ext/web/02_event.js b/ext/web/02_event.js index e7553ea89b..142fa66b2b 100644 --- a/ext/web/02_event.js +++ b/ext/web/02_event.js @@ -737,13 +737,14 @@ function innerInvokeEventListeners( } let handlers = targetListeners[type]; + const handlersLength = handlers.length; // Copy event listeners before iterating since the list can be modified during the iteration. - if (handlers.length > 1) { + if (handlersLength > 1) { handlers = ArrayPrototypeSlice(targetListeners[type]); } - for (let i = 0; i < handlers.length; i++) { + for (let i = 0; i < handlersLength; i++) { const listener = handlers[i]; let capture, once, passive; From 21c2c01ebed902c70763bb9319c3ec48c4cb5284 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Mon, 5 Jun 2023 10:52:40 +0200 Subject: [PATCH 296/320] perf: optimize RegExp usage in JS (#19364) Towards https://github.com/denoland/deno/issues/19330 Shows about 1% improvement in the HTTP benchmark. --- ext/fetch/20_headers.js | 16 ++++++++-------- ext/fetch/23_request.js | 4 ++-- ext/fetch/23_response.js | 4 ++-- ext/web/01_mimesniff.js | 3 ++- ext/webidl/00_webidl.js | 3 ++- ext/websocket/01_websocket.js | 5 +++-- 6 files changed, 19 insertions(+), 16 deletions(-) diff --git a/ext/fetch/20_headers.js b/ext/fetch/20_headers.js index 89b9e1a2bc..6d934a7c1c 100644 --- a/ext/fetch/20_headers.js +++ b/ext/fetch/20_headers.js @@ -30,7 +30,7 @@ const { ArrayPrototypeFilter, ObjectEntries, ObjectHasOwn, - RegExpPrototypeTest, + RegExpPrototypeExec, SafeArrayIterator, SafeRegExp, Symbol, @@ -102,10 +102,10 @@ function appendHeader(headers, name, value) { value = normalizeHeaderValue(value); // 2. - if (!RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, name)) { + if (RegExpPrototypeExec(HTTP_TOKEN_CODE_POINT_RE, name) === null) { throw new TypeError("Header name is not valid."); } - if (RegExpPrototypeTest(ILLEGAL_VALUE_CHARS, value)) { + if (RegExpPrototypeExec(ILLEGAL_VALUE_CHARS, value) !== null) { throw new TypeError("Header value is not valid."); } @@ -282,7 +282,7 @@ class Headers { webidl.requiredArguments(arguments.length, 1, prefix); name = webidl.converters["ByteString"](name, prefix, "Argument 1"); - if (!RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, name)) { + if (RegExpPrototypeExec(HTTP_TOKEN_CODE_POINT_RE, name) === null) { throw new TypeError("Header name is not valid."); } if (this[_guard] == "immutable") { @@ -307,7 +307,7 @@ class Headers { webidl.requiredArguments(arguments.length, 1, prefix); name = webidl.converters["ByteString"](name, prefix, "Argument 1"); - if (!RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, name)) { + if (RegExpPrototypeExec(HTTP_TOKEN_CODE_POINT_RE, name) === null) { throw new TypeError("Header name is not valid."); } @@ -323,7 +323,7 @@ class Headers { webidl.requiredArguments(arguments.length, 1, prefix); name = webidl.converters["ByteString"](name, prefix, "Argument 1"); - if (!RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, name)) { + if (RegExpPrototypeExec(HTTP_TOKEN_CODE_POINT_RE, name) === null) { throw new TypeError("Header name is not valid."); } @@ -351,10 +351,10 @@ class Headers { value = normalizeHeaderValue(value); // 2. - if (!RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, name)) { + if (RegExpPrototypeExec(HTTP_TOKEN_CODE_POINT_RE, name) === null) { throw new TypeError("Header name is not valid."); } - if (RegExpPrototypeTest(ILLEGAL_VALUE_CHARS, value)) { + if (RegExpPrototypeExec(ILLEGAL_VALUE_CHARS, value) !== null) { throw new TypeError("Header value is not valid."); } diff --git a/ext/fetch/23_request.js b/ext/fetch/23_request.js index 4c46ebe750..daf77a834e 100644 --- a/ext/fetch/23_request.js +++ b/ext/fetch/23_request.js @@ -36,7 +36,7 @@ const { ArrayPrototypeSplice, ObjectKeys, ObjectPrototypeIsPrototypeOf, - RegExpPrototypeTest, + RegExpPrototypeExec, StringPrototypeStartsWith, Symbol, SymbolFor, @@ -227,7 +227,7 @@ function validateAndNormalizeMethod(m) { } // Regular path - if (!RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, m)) { + if (RegExpPrototypeExec(HTTP_TOKEN_CODE_POINT_RE, m) === null) { throw new TypeError("Method is not valid."); } const upperCase = byteUpperCase(m); diff --git a/ext/fetch/23_response.js b/ext/fetch/23_response.js index 52ebc91fe6..dc4e754342 100644 --- a/ext/fetch/23_response.js +++ b/ext/fetch/23_response.js @@ -37,7 +37,7 @@ const { ObjectDefineProperties, ObjectPrototypeIsPrototypeOf, RangeError, - RegExpPrototypeTest, + RegExpPrototypeExec, SafeArrayIterator, SafeRegExp, Symbol, @@ -179,7 +179,7 @@ function initializeAResponse(response, init, bodyWithType) { // 2. if ( init.statusText && - !RegExpPrototypeTest(REASON_PHRASE_RE, init.statusText) + RegExpPrototypeExec(REASON_PHRASE_RE, init.statusText) === null ) { throw new TypeError("Status text is not valid."); } diff --git a/ext/web/01_mimesniff.js b/ext/web/01_mimesniff.js index ad89f33cd7..7d402e0801 100644 --- a/ext/web/01_mimesniff.js +++ b/ext/web/01_mimesniff.js @@ -13,6 +13,7 @@ const { MapPrototypeHas, MapPrototypeSet, RegExpPrototypeTest, + RegExpPrototypeExec, SafeMap, SafeMapIterator, StringPrototypeReplaceAll, @@ -197,7 +198,7 @@ function serializeMimeType(mimeType) { for (const param of new SafeMapIterator(mimeType.parameters)) { serialization += `;${param[0]}=`; let value = param[1]; - if (!RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, value)) { + if (RegExpPrototypeExec(HTTP_TOKEN_CODE_POINT_RE, value) === null) { value = StringPrototypeReplaceAll(value, "\\", "\\\\"); value = StringPrototypeReplaceAll(value, '"', '\\"'); value = `"${value}"`; diff --git a/ext/webidl/00_webidl.js b/ext/webidl/00_webidl.js index dfaa774e29..ca1c7c6064 100644 --- a/ext/webidl/00_webidl.js +++ b/ext/webidl/00_webidl.js @@ -59,6 +59,7 @@ const { ReflectHas, ReflectOwnKeys, RegExpPrototypeTest, + RegExpPrototypeExec, SafeRegExp, SafeSet, SetPrototypeEntries, @@ -406,7 +407,7 @@ converters.DOMString = function (V, prefix, context, opts = {}) { const IS_BYTE_STRING = new SafeRegExp(/^[\x00-\xFF]*$/); converters.ByteString = (V, prefix, context, opts) => { const x = converters.DOMString(V, prefix, context, opts); - if (!RegExpPrototypeTest(IS_BYTE_STRING, x)) { + if (RegExpPrototypeExec(IS_BYTE_STRING, x) === null) { throw makeException( TypeError, "is not a valid ByteString", diff --git a/ext/websocket/01_websocket.js b/ext/websocket/01_websocket.js index f7dd516ff0..c4c686b9c7 100644 --- a/ext/websocket/01_websocket.js +++ b/ext/websocket/01_websocket.js @@ -33,7 +33,7 @@ const { ObjectDefineProperties, ObjectPrototypeIsPrototypeOf, PromisePrototypeThen, - RegExpPrototypeTest, + RegExpPrototypeExec, SafeSet, SetPrototypeGetSize, // TODO(lucacasonato): add SharedArrayBuffer to primordials @@ -256,7 +256,8 @@ class WebSocket extends EventTarget { if ( ArrayPrototypeSome( protocols, - (protocol) => !RegExpPrototypeTest(HTTP_TOKEN_CODE_POINT_RE, protocol), + (protocol) => + RegExpPrototypeExec(HTTP_TOKEN_CODE_POINT_RE, protocol) === null, ) ) { throw new DOMException( From 77a950aac417ba5e9bf1a48b0ec8934291376a8c Mon Sep 17 00:00:00 2001 From: Mathias Lafeldt Date: Mon, 5 Jun 2023 11:22:32 +0200 Subject: [PATCH 297/320] feat(runtime): support creating workers using custom v8 params (#19339) In order to limit the memory usage of isolates via heap_limits. --- cli/worker.rs | 1 + runtime/worker.rs | 6 ++++++ 2 files changed, 7 insertions(+) diff --git a/cli/worker.rs b/cli/worker.rs index 6edf427ea5..b8bb6e9416 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -449,6 +449,7 @@ impl CliMainWorkerFactory { }, extensions, startup_snapshot: Some(crate::js::deno_isolate_init()), + create_params: None, unsafely_ignore_certificate_errors: shared .options .unsafely_ignore_certificate_errors diff --git a/runtime/worker.rs b/runtime/worker.rs index 0e62decb46..10375818d0 100644 --- a/runtime/worker.rs +++ b/runtime/worker.rs @@ -95,6 +95,10 @@ pub struct WorkerOptions { /// V8 snapshot that should be loaded on startup. pub startup_snapshot: Option, + + /// Optional isolate creation parameters, such as heap limits. + pub create_params: Option, + pub unsafely_ignore_certificate_errors: Option>, pub root_cert_store_provider: Option>, pub seed: Option, @@ -181,6 +185,7 @@ impl Default for WorkerOptions { blob_store: Default::default(), extensions: Default::default(), startup_snapshot: Default::default(), + create_params: Default::default(), bootstrap: Default::default(), stdio: Default::default(), } @@ -321,6 +326,7 @@ impl MainWorker { let mut js_runtime = JsRuntime::new(RuntimeOptions { module_loader: Some(options.module_loader.clone()), startup_snapshot: Some(startup_snapshot), + create_params: options.create_params, source_map_getter: options.source_map_getter, get_error_class_fn: options.get_error_class_fn, shared_array_buffer_store: options.shared_array_buffer_store.clone(), From 08bd23970dbce4ccf8103abf27e4cfa1b747705b Mon Sep 17 00:00:00 2001 From: Leo Kettmeir Date: Mon, 5 Jun 2023 12:25:47 +0200 Subject: [PATCH 298/320] feat: add more options to Deno.inspect (#19337) For https://github.com/denoland/deno_std/issues/3404 --------- Co-authored-by: Yoshiya Hinosawa --- cli/tests/integration/lsp_tests.rs | 2 +- cli/tests/unit/console_test.ts | 24 ++++++++++++++++++++ cli/tsc/dts/lib.deno.ns.d.ts | 8 +++++++ ext/console/01_console.js | 5 +++- ext/node/polyfills/internal/util/inspect.mjs | 1 + 5 files changed, 38 insertions(+), 2 deletions(-) diff --git a/cli/tests/integration/lsp_tests.rs b/cli/tests/integration/lsp_tests.rs index eee83c4a2d..fa8cb6a3c3 100644 --- a/cli/tests/integration/lsp_tests.rs +++ b/cli/tests/integration/lsp_tests.rs @@ -4715,7 +4715,7 @@ fn lsp_completions_auto_import() { "source": "./b.ts", "data": { "exportName": "foo", - "exportMapKey": "foo|6810|file:///a/b", + "exportMapKey": "foo|6812|file:///a/b", "moduleSpecifier": "./b.ts", "fileName": "file:///a/b.ts" }, diff --git a/cli/tests/unit/console_test.ts b/cli/tests/unit/console_test.ts index c4f2f64a4b..b177b956bb 100644 --- a/cli/tests/unit/console_test.ts +++ b/cli/tests/unit/console_test.ts @@ -2278,3 +2278,27 @@ Deno.test(function inspectAnonymousFunctions() { "[AsyncGeneratorFunction (anonymous)]", ); }); + +Deno.test(function inspectBreakLengthOption() { + assertEquals( + Deno.inspect("123456789\n".repeat(3), { breakLength: 34 }), + `"123456789\\n123456789\\n123456789\\n"`, + ); + assertEquals( + Deno.inspect("123456789\n".repeat(3), { breakLength: 33 }), + `"123456789\\n" + + "123456789\\n" + + "123456789\\n"`, + ); +}); + +Deno.test(function inspectEscapeSequencesFalse() { + assertEquals( + Deno.inspect("foo\nbar", { escapeSequences: true }), + '"foo\\nbar"', + ); // default behavior + assertEquals( + Deno.inspect("foo\nbar", { escapeSequences: false }), + '"foo\nbar"', + ); +}); diff --git a/cli/tsc/dts/lib.deno.ns.d.ts b/cli/tsc/dts/lib.deno.ns.d.ts index a7d6adab83..0247eda9c5 100644 --- a/cli/tsc/dts/lib.deno.ns.d.ts +++ b/cli/tsc/dts/lib.deno.ns.d.ts @@ -4240,6 +4240,14 @@ declare namespace Deno { * * @default {4} */ depth?: number; + /** The maximum length for an inspection to take up a single line. + * + * @default {80} */ + breakLength?: number; + /** Whether or not to escape sequences. + * + * @default {true} */ + escapeSequences?: boolean; /** The maximum number of iterable entries to print. * * @default {100} */ diff --git a/ext/console/01_console.js b/ext/console/01_console.js index dbbc549cad..11b6c549ce 100644 --- a/ext/console/01_console.js +++ b/ext/console/01_console.js @@ -2427,6 +2427,7 @@ const denoInspectDefaultOptions = { colors: false, showProxy: false, breakLength: 80, + escapeSequences: true, compact: 3, sorted: false, getters: false, @@ -2500,7 +2501,9 @@ function quoteString(string, ctx) { ctx.quotes[0]; const escapePattern = new SafeRegExp(`(?=[${quote}\\\\])`, "g"); string = StringPrototypeReplace(string, escapePattern, "\\"); - string = replaceEscapeSequences(string); + if (ctx.escapeSequences) { + string = replaceEscapeSequences(string); + } return `${quote}${string}${quote}`; } diff --git a/ext/node/polyfills/internal/util/inspect.mjs b/ext/node/polyfills/internal/util/inspect.mjs index 671ab2acf4..2d34db9c71 100644 --- a/ext/node/polyfills/internal/util/inspect.mjs +++ b/ext/node/polyfills/internal/util/inspect.mjs @@ -134,6 +134,7 @@ const inspectDefaultOptions = { colors: false, showProxy: false, breakLength: 80, + escapeSequences: true, compact: 3, sorted: false, getters: false, From 3d582156b6c17ef5adef1500cdfc80783f6afe68 Mon Sep 17 00:00:00 2001 From: VlkrS <47375452+VlkrS@users.noreply.github.com> Date: Mon, 5 Jun 2023 14:24:19 +0200 Subject: [PATCH 299/320] feat(runtime): Add an OpenBSD implementation for rss() (#19221) In case you would consider including specific implementations and not only build fixes, here's the implementation of the ```rss()``` function for OpenBSD. --- runtime/ops/os/mod.rs | 47 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/runtime/ops/os/mod.rs b/runtime/ops/os/mod.rs index 043dec7000..bbadee993a 100644 --- a/runtime/ops/os/mod.rs +++ b/runtime/ops/os/mod.rs @@ -380,6 +380,53 @@ fn rss() -> usize { task_info.resident_size as usize } +#[cfg(target_os = "openbsd")] +fn rss() -> usize { + // Uses OpenBSD's KERN_PROC_PID sysctl(2) + // to retrieve information about the current + // process, part of which is the RSS (p_vm_rssize) + + // SAFETY: libc call (get PID of own process) + let pid = unsafe { libc::getpid() }; + // SAFETY: libc call (get system page size) + let pagesize = unsafe { libc::sysconf(libc::_SC_PAGESIZE) } as usize; + // KERN_PROC_PID returns a struct libc::kinfo_proc + let mut kinfoproc = std::mem::MaybeUninit::::uninit(); + let mut size = std::mem::size_of_val(&kinfoproc) as libc::size_t; + let mut mib = [ + libc::CTL_KERN, + libc::KERN_PROC, + libc::KERN_PROC_PID, + pid, + // mib is an array of integers, size is of type size_t + // conversion is safe, because the size of a libc::kinfo_proc + // structure will not exceed i32::MAX + size.try_into().unwrap(), + 1, + ]; + // SAFETY: libc call, mib has been statically initialized, + // kinfoproc is a valid pointer to a libc::kinfo_proc struct + let res = unsafe { + libc::sysctl( + mib.as_mut_ptr(), + mib.len() as _, + kinfoproc.as_mut_ptr() as *mut libc::c_void, + &mut size, + std::ptr::null_mut(), + 0, + ) + }; + + if res == 0 { + // SAFETY: sysctl returns 0 on success and kinfoproc is initialized + // p_vm_rssize contains size in pages -> multiply with pagesize to + // get size in bytes. + pagesize * unsafe { (*kinfoproc.as_mut_ptr()).p_vm_rssize as usize } + } else { + 0 + } +} + #[cfg(windows)] fn rss() -> usize { use winapi::shared::minwindef::DWORD; From d2047f1337ccb5e27598308bf5fefa913eeaa34f Mon Sep 17 00:00:00 2001 From: nasa Date: Mon, 5 Jun 2023 21:43:04 +0900 Subject: [PATCH 300/320] feat(node_compat): Add a close method to the FileHandle class. (#19357) ## WHY ref: https://github.com/denoland/deno/issues/19165 The FileHandle class has many missing methods compared to node. Add these. ## WHAT - Add close method --------- Co-authored-by: Matt Mastracci --- cli/tests/unit_node/_fs/_fs_handle_test.ts | 2 +- ext/node/polyfills/internal/fs/handle.ts | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/cli/tests/unit_node/_fs/_fs_handle_test.ts b/cli/tests/unit_node/_fs/_fs_handle_test.ts index c1e5ef8713..165608e1ce 100644 --- a/cli/tests/unit_node/_fs/_fs_handle_test.ts +++ b/cli/tests/unit_node/_fs/_fs_handle_test.ts @@ -16,5 +16,5 @@ Deno.test("readFileSuccess", async function () { assert(data instanceof Uint8Array); assertEquals(new TextDecoder().decode(data as Uint8Array), "hello world"); - Deno.close(fileHandle.fd); + await fileHandle.close(); }); diff --git a/ext/node/polyfills/internal/fs/handle.ts b/ext/node/polyfills/internal/fs/handle.ts index a369a4a4d7..a1ee263ead 100644 --- a/ext/node/polyfills/internal/fs/handle.ts +++ b/ext/node/polyfills/internal/fs/handle.ts @@ -24,6 +24,11 @@ export class FileHandle extends EventEmitter { ): Promise { return promises.readFile(this, opt); } + + close(): Promise { + // Note that Deno.close is not async + return Promise.resolve(Deno.close(this.fd)); + } } export default { From 11dd5a0ae73b4d3612de6422893a25232f930b84 Mon Sep 17 00:00:00 2001 From: Levente Kurusa Date: Mon, 5 Jun 2023 14:52:02 +0200 Subject: [PATCH 301/320] fix(ext/crypto): fix JWK import of Ed25519 (#19279) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes: #18049 --------- Co-authored-by: Bartek Iwańczuk --- ext/crypto/00_crypto.js | 14 ++++++- ext/crypto/lib.rs | 9 +++-- tools/wpt/expectation.json | 80 ++++++++++++++++++++++++++++++++++++-- 3 files changed, 93 insertions(+), 10 deletions(-) diff --git a/ext/crypto/00_crypto.js b/ext/crypto/00_crypto.js index 19e669acd0..d88aef219d 100644 --- a/ext/crypto/00_crypto.js +++ b/ext/crypto/00_crypto.js @@ -2319,7 +2319,12 @@ function importKeyEd25519( // 9. if (jwk.d !== undefined) { // https://www.rfc-editor.org/rfc/rfc8037#section-2 - const privateKeyData = ops.op_crypto_base64url_decode(jwk.d); + let privateKeyData; + try { + privateKeyData = ops.op_crypto_base64url_decode(jwk.d); + } catch (_) { + throw new DOMException("invalid private key data", "DataError"); + } const handle = {}; WeakMapPrototypeSet(KEY_STORE, handle, privateKeyData); @@ -2337,7 +2342,12 @@ function importKeyEd25519( ); } else { // https://www.rfc-editor.org/rfc/rfc8037#section-2 - const publicKeyData = ops.op_crypto_base64url_decode(jwk.x); + let publicKeyData; + try { + publicKeyData = ops.op_crypto_base64url_decode(jwk.x); + } catch (_) { + throw new DOMException("invalid public key data", "DataError"); + } const handle = {}; WeakMapPrototypeSet(KEY_STORE, handle, publicKeyData); diff --git a/ext/crypto/lib.rs b/ext/crypto/lib.rs index 05349bf680..dc5faf5e7e 100644 --- a/ext/crypto/lib.rs +++ b/ext/crypto/lib.rs @@ -116,10 +116,11 @@ deno_core::extension!(deno_crypto, ); #[op] -pub fn op_crypto_base64url_decode(data: String) -> ZeroCopyBuf { - let data: Vec = - base64::decode_config(data, base64::URL_SAFE_NO_PAD).unwrap(); - data.into() +pub fn op_crypto_base64url_decode( + data: String, +) -> Result { + let data: Vec = base64::decode_config(data, base64::URL_SAFE_NO_PAD)?; + Ok(data.into()) } #[op] diff --git a/tools/wpt/expectation.json b/tools/wpt/expectation.json index 57b8ba75ab..ed29c43e4c 100644 --- a/tools/wpt/expectation.json +++ b/tools/wpt/expectation.json @@ -1159,12 +1159,84 @@ "Good parameters: X448 bits (pkcs8, buffer(72), {name: X448}, false, [deriveBits])", "Good parameters: X448 bits (jwk, object(crv, d, x, kty), {name: X448}, false, [deriveBits])" ], - "okp_importKey_failures_Ed25519.https.any.html": false, - "okp_importKey_failures_Ed25519.https.any.worker.html": false, + "okp_importKey_failures_Ed25519.https.any.html": [ + "Bad key length: importKey(raw, {name: Ed25519}, true, [verify])", + "Bad key length: importKey(raw, {name: Ed25519}, false, [verify])", + "Bad key length: importKey(raw, {name: Ed25519}, true, [verify, verify])", + "Bad key length: importKey(raw, {name: Ed25519}, false, [verify, verify])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: Ed25519}, true, [sign])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: Ed25519}, false, [sign])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: Ed25519}, true, [sign, sign])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: Ed25519}, false, [sign, sign])", + "Invalid key pair: importKey(jwk(private), {name: Ed25519}, true, [sign])", + "Invalid key pair: importKey(jwk(private), {name: Ed25519}, true, [sign, sign])" + ], + "okp_importKey_failures_Ed25519.https.any.worker.html": [ + "Bad key length: importKey(raw, {name: Ed25519}, true, [verify])", + "Bad key length: importKey(raw, {name: Ed25519}, false, [verify])", + "Bad key length: importKey(raw, {name: Ed25519}, true, [verify, verify])", + "Bad key length: importKey(raw, {name: Ed25519}, false, [verify, verify])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: Ed25519}, true, [sign])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: Ed25519}, false, [sign])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: Ed25519}, true, [sign, sign])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: Ed25519}, false, [sign, sign])", + "Invalid key pair: importKey(jwk(private), {name: Ed25519}, true, [sign])", + "Invalid key pair: importKey(jwk(private), {name: Ed25519}, true, [sign, sign])" + ], "okp_importKey_failures_Ed448.https.any.html": false, "okp_importKey_failures_Ed448.https.any.worker.html": false, - "okp_importKey_failures_X25519.https.any.html": false, - "okp_importKey_failures_X25519.https.any.worker.html": false, + "okp_importKey_failures_X25519.https.any.html": [ + "Bad key length: importKey(raw, {name: X25519}, true, [])", + "Bad key length: importKey(raw, {name: X25519}, false, [])", + "Bad key length: importKey(jwk (public) , {name: X25519}, true, [])", + "Bad key length: importKey(jwk (public) , {name: X25519}, false, [])", + "Bad key length: importKey(jwk(private), {name: X25519}, true, [deriveKey])", + "Bad key length: importKey(jwk(private), {name: X25519}, false, [deriveKey])", + "Bad key length: importKey(jwk(private), {name: X25519}, true, [deriveBits, deriveKey])", + "Bad key length: importKey(jwk(private), {name: X25519}, false, [deriveBits, deriveKey])", + "Bad key length: importKey(jwk(private), {name: X25519}, true, [deriveBits])", + "Bad key length: importKey(jwk(private), {name: X25519}, false, [deriveBits])", + "Bad key length: importKey(jwk(private), {name: X25519}, true, [deriveKey, deriveBits, deriveKey, deriveBits])", + "Bad key length: importKey(jwk(private), {name: X25519}, false, [deriveKey, deriveBits, deriveKey, deriveBits])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, true, [deriveKey])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, false, [deriveKey])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, true, [deriveBits, deriveKey])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, false, [deriveBits, deriveKey])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, true, [deriveBits])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, false, [deriveBits])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, true, [deriveKey, deriveBits, deriveKey, deriveBits])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, false, [deriveKey, deriveBits, deriveKey, deriveBits])", + "Invalid key pair: importKey(jwk(private), {name: X25519}, true, [deriveKey])", + "Invalid key pair: importKey(jwk(private), {name: X25519}, true, [deriveBits, deriveKey])", + "Invalid key pair: importKey(jwk(private), {name: X25519}, true, [deriveBits])", + "Invalid key pair: importKey(jwk(private), {name: X25519}, true, [deriveKey, deriveBits, deriveKey, deriveBits])" + ], + "okp_importKey_failures_X25519.https.any.worker.html": [ + "Bad key length: importKey(raw, {name: X25519}, true, [])", + "Bad key length: importKey(raw, {name: X25519}, false, [])", + "Bad key length: importKey(jwk (public) , {name: X25519}, true, [])", + "Bad key length: importKey(jwk (public) , {name: X25519}, false, [])", + "Bad key length: importKey(jwk(private), {name: X25519}, true, [deriveKey])", + "Bad key length: importKey(jwk(private), {name: X25519}, false, [deriveKey])", + "Bad key length: importKey(jwk(private), {name: X25519}, true, [deriveBits, deriveKey])", + "Bad key length: importKey(jwk(private), {name: X25519}, false, [deriveBits, deriveKey])", + "Bad key length: importKey(jwk(private), {name: X25519}, true, [deriveBits])", + "Bad key length: importKey(jwk(private), {name: X25519}, false, [deriveBits])", + "Bad key length: importKey(jwk(private), {name: X25519}, true, [deriveKey, deriveBits, deriveKey, deriveBits])", + "Bad key length: importKey(jwk(private), {name: X25519}, false, [deriveKey, deriveBits, deriveKey, deriveBits])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, true, [deriveKey])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, false, [deriveKey])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, true, [deriveBits, deriveKey])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, false, [deriveBits, deriveKey])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, true, [deriveBits])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, false, [deriveBits])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, true, [deriveKey, deriveBits, deriveKey, deriveBits])", + "Missing JWK 'x' parameter: importKey(jwk(private), {name: X25519}, false, [deriveKey, deriveBits, deriveKey, deriveBits])", + "Invalid key pair: importKey(jwk(private), {name: X25519}, true, [deriveKey])", + "Invalid key pair: importKey(jwk(private), {name: X25519}, true, [deriveBits, deriveKey])", + "Invalid key pair: importKey(jwk(private), {name: X25519}, true, [deriveBits])", + "Invalid key pair: importKey(jwk(private), {name: X25519}, true, [deriveKey, deriveBits, deriveKey, deriveBits])" + ], "okp_importKey_failures_X448.https.any.html": false, "okp_importKey_failures_X448.https.any.worker.html": false }, From d54ef02dfe4455c043c3ccb3d1d15e5fcc302756 Mon Sep 17 00:00:00 2001 From: Kenta Moriuchi Date: Tue, 6 Jun 2023 04:57:01 +0900 Subject: [PATCH 302/320] chore: update deno_lint to 0.46.0 (#19372) --- Cargo.lock | 4 ++-- cli/Cargo.toml | 2 +- ext/cache/01_cache.js | 2 ++ ext/console/01_console.js | 3 ++- ext/fetch/21_formdata.js | 3 ++- ext/fetch/22_body.js | 1 + ext/http/00_serve.js | 6 ++++-- ext/url/00_url.js | 1 + ext/web/06_streams.js | 1 + ext/web/09_file.js | 1 + ext/websocket/01_websocket.js | 1 + third_party | 2 +- 12 files changed, 19 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4ded396c9f..6b2c8a9065 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1101,9 +1101,9 @@ dependencies = [ [[package]] name = "deno_lint" -version = "0.45.0" +version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3867178bfb6579aaf9ed79599d3181d134f13dfcd38fdd93cae7d53a37bece8d" +checksum = "afc515e82ae97f2cc562ec482251700e96570c8fde997579629f4666e70066d7" dependencies = [ "anyhow", "deno_ast", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 885ef1c863..df045a754a 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -45,7 +45,7 @@ deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] deno_doc = "0.62.0" deno_emit = "0.20.0" deno_graph = "=0.48.1" -deno_lint = { version = "0.45.0", features = ["docs"] } +deno_lint = { version = "0.46.0", features = ["docs"] } deno_lockfile.workspace = true deno_npm.workspace = true deno_runtime = { workspace = true, features = ["dont_create_runtime_snapshot", "include_js_files_for_snapshotting"] } diff --git a/ext/cache/01_cache.js b/ext/cache/01_cache.js index 9b5404acbb..9476420efa 100644 --- a/ext/cache/01_cache.js +++ b/ext/cache/01_cache.js @@ -128,6 +128,7 @@ class Cache { "op_cache_put", { cacheId: this[_id], + // deno-lint-ignore prefer-primordials requestUrl: reqUrl.toString(), responseHeaders: innerResponse.headerList, requestHeaders: innerRequest.headerList, @@ -243,6 +244,7 @@ class Cache { "op_cache_match", { cacheId: this[_id], + // deno-lint-ignore prefer-primordials requestUrl: url.toString(), requestHeaders: innerRequest.headerList, }, diff --git a/ext/console/01_console.js b/ext/console/01_console.js index 11b6c549ce..6cf3c6dcac 100644 --- a/ext/console/01_console.js +++ b/ext/console/01_console.js @@ -37,6 +37,7 @@ const { Error, ErrorCaptureStackTrace, ErrorPrototype, + ErrorPrototypeToString, FunctionPrototypeBind, FunctionPrototypeCall, FunctionPrototypeToString, @@ -1578,7 +1579,7 @@ function inspectError(value, ctx) { if (stack?.includes("\n at")) { finalMessage += stack; } else { - finalMessage += `[${stack || value.toString()}]`; + finalMessage += `[${stack || ErrorPrototypeToString(value)}]`; } } finalMessage += ArrayPrototypeJoin( diff --git a/ext/fetch/21_formdata.js b/ext/fetch/21_formdata.js index 1ddd5f6564..1f0f00088f 100644 --- a/ext/fetch/21_formdata.js +++ b/ext/fetch/21_formdata.js @@ -31,6 +31,7 @@ const { SafeRegExp, Symbol, StringFromCharCode, + StringPrototypeCharCodeAt, StringPrototypeTrim, StringPrototypeSlice, StringPrototypeSplit, @@ -368,7 +369,7 @@ function parseContentDisposition(value) { function decodeLatin1StringAsUtf8(latin1String) { const buffer = new Uint8Array(latin1String.length); for (let i = 0; i < latin1String.length; i++) { - buffer[i] = latin1String.charCodeAt(i); + buffer[i] = StringPrototypeCharCodeAt(latin1String, i); } return core.decode(buffer); } diff --git a/ext/fetch/22_body.js b/ext/fetch/22_body.js index 82703af761..9fe00b1445 100644 --- a/ext/fetch/22_body.js +++ b/ext/fetch/22_body.js @@ -424,6 +424,7 @@ function extractBody(object) { ObjectPrototypeIsPrototypeOf(URLSearchParamsPrototype, object) ) { // TODO(@satyarohith): not sure what primordial here. + // deno-lint-ignore prefer-primordials source = object.toString(); contentType = "application/x-www-form-urlencoded;charset=UTF-8"; } else if (ObjectPrototypeIsPrototypeOf(ReadableStreamPrototype, object)) { diff --git a/ext/http/00_serve.js b/ext/http/00_serve.js index dbdc227056..ba8080e27b 100644 --- a/ext/http/00_serve.js +++ b/ext/http/00_serve.js @@ -37,6 +37,8 @@ import { import { listen, TcpConn } from "ext:deno_net/01_net.js"; import { listenTls } from "ext:deno_net/02_tls.js"; const { + ArrayPrototypeFlat, + ArrayPrototypePush, ObjectPrototypeIsPrototypeOf, PromisePrototypeCatch, SafeSet, @@ -337,7 +339,7 @@ class InnerRequest { const headers = []; const reqHeaders = op_http_get_request_headers(this.#slabId); for (let i = 0; i < reqHeaders.length; i += 2) { - headers.push([reqHeaders[i], reqHeaders[i + 1]]); + ArrayPrototypePush(headers, [reqHeaders[i], reqHeaders[i + 1]]); } return headers; } @@ -575,7 +577,7 @@ function mapToCallback(context, callback, onError) { if (headers.length == 1) { op_http_set_response_header(req, headers[0][0], headers[0][1]); } else { - op_http_set_response_headers(req, headers.flat()); + op_http_set_response_headers(req, ArrayPrototypeFlat(headers)); } } diff --git a/ext/url/00_url.js b/ext/url/00_url.js index b4bc34b927..49dd2c46f3 100644 --- a/ext/url/00_url.js +++ b/ext/url/00_url.js @@ -149,6 +149,7 @@ class URLSearchParams { if (url === null) { return; } + // deno-lint-ignore prefer-primordials url[_updateUrlSearch](this.toString()); } diff --git a/ext/web/06_streams.js b/ext/web/06_streams.js index c0cbb30498..21207c3729 100644 --- a/ext/web/06_streams.js +++ b/ext/web/06_streams.js @@ -1261,6 +1261,7 @@ function readableByteStreamControllerEnqueueClonedChunkToQueue( ); } else { // TODO(lucacasonato): add SharedArrayBuffer to primordials + // deno-lint-ignore prefer-primordials cloneResult = buffer.slice(byteOffset, byteOffset + byteLength); } } catch (e) { diff --git a/ext/web/09_file.js b/ext/web/09_file.js index d65a512f93..79a9c41b29 100644 --- a/ext/web/09_file.js +++ b/ext/web/09_file.js @@ -326,6 +326,7 @@ class Blob { relativeStart -= size; relativeEnd -= size; } else { + // deno-lint-ignore prefer-primordials const chunk = part.slice( relativeStart, MathMin(part.size, relativeEnd), diff --git a/ext/websocket/01_websocket.js b/ext/websocket/01_websocket.js index c4c686b9c7..e71cae44a8 100644 --- a/ext/websocket/01_websocket.js +++ b/ext/websocket/01_websocket.js @@ -344,6 +344,7 @@ class WebSocket extends EventTarget { if (ObjectPrototypeIsPrototypeOf(BlobPrototype, data)) { PromisePrototypeThen( + // deno-lint-ignore prefer-primordials data.slice().arrayBuffer(), (ab) => sendTypedArray( diff --git a/third_party b/third_party index ee59830ca2..7882a6c776 160000 --- a/third_party +++ b/third_party @@ -1 +1 @@ -Subproject commit ee59830ca23fd0aa423a3905005835c586e73e77 +Subproject commit 7882a6c7763efbda55318e019b239bec7704765f From 8829a1d3620cab23f7ac724feeae60c9de6ebad2 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Mon, 5 Jun 2023 19:03:39 -0400 Subject: [PATCH 303/320] fix: upgrade to deno_ast 0.27 (#19375) Closes #19148 --- .dprint.json | 6 +- .github/workflows/ci.generate.ts | 2 +- .github/workflows/ci.yml | 6 +- Cargo.lock | 198 +++++++++--------- Cargo.toml | 2 +- cli/Cargo.toml | 18 +- cli/standalone/mod.rs | 3 + .../testdata/coverage/complex_expected.lcov | 7 +- .../testdata/coverage/complex_expected.out | 4 +- third_party | 2 +- 10 files changed, 127 insertions(+), 121 deletions(-) diff --git a/.dprint.json b/.dprint.json index 0e10e067a4..1decc7863e 100644 --- a/.dprint.json +++ b/.dprint.json @@ -56,9 +56,9 @@ "ext/websocket/autobahn/reports" ], "plugins": [ - "https://plugins.dprint.dev/typescript-0.84.4.wasm", - "https://plugins.dprint.dev/json-0.17.2.wasm", - "https://plugins.dprint.dev/markdown-0.15.2.wasm", + "https://plugins.dprint.dev/typescript-0.85.0.wasm", + "https://plugins.dprint.dev/json-0.17.3.wasm", + "https://plugins.dprint.dev/markdown-0.15.3.wasm", "https://plugins.dprint.dev/toml-0.5.4.wasm", "https://plugins.dprint.dev/exec-0.3.5.json@d687dda57be0fe9a0088ccdaefa5147649ff24127d8b3ea227536c68ee7abeab" ] diff --git a/.github/workflows/ci.generate.ts b/.github/workflows/ci.generate.ts index ef37374c4d..cb32e6415d 100755 --- a/.github/workflows/ci.generate.ts +++ b/.github/workflows/ci.generate.ts @@ -5,7 +5,7 @@ import * as yaml from "https://deno.land/std@0.173.0/encoding/yaml.ts"; // Bump this number when you want to purge the cache. // Note: the tools/release/01_bump_crate_versions.ts script will update this version // automatically via regex, so ensure that this line maintains this format. -const cacheVersion = 32; +const cacheVersion = 33; const Runners = (() => { const ubuntuRunner = "ubuntu-22.04"; diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 80d20f1725..4bb3ee964d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -293,7 +293,7 @@ jobs: ~/.cargo/registry/index ~/.cargo/registry/cache ~/.cargo/git/db - key: '32-cargo-home-${{ matrix.os }}-${{ hashFiles(''Cargo.lock'') }}' + key: '33-cargo-home-${{ matrix.os }}-${{ hashFiles(''Cargo.lock'') }}' if: '!(github.event_name == ''pull_request'' && matrix.skip_pr)' - name: Restore cache build output (PR) uses: actions/cache/restore@v3 @@ -305,7 +305,7 @@ jobs: !./target/*/*.zip !./target/*/*.tar.gz key: never_saved - restore-keys: '32-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-' + restore-keys: '33-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-' - name: Apply and update mtime cache if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (!startsWith(github.ref, ''refs/tags/''))' uses: ./.github/mtime_cache @@ -589,7 +589,7 @@ jobs: !./target/*/gn_out !./target/*/*.zip !./target/*/*.tar.gz - key: '32-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' + key: '33-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' publish-canary: name: publish canary runs-on: ubuntu-22.04 diff --git a/Cargo.lock b/Cargo.lock index 6b2c8a9065..4bab387ffc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -73,6 +73,17 @@ dependencies = [ "version_check", ] +[[package]] +name = "ahash" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" +dependencies = [ + "cfg-if", + "once_cell", + "version_check", +] + [[package]] name = "aho-corasick" version = "0.7.20" @@ -150,9 +161,9 @@ dependencies = [ [[package]] name = "ast_node" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52f7fd7740c5752c16281a1c1f9442b1e69ba41738acde85dc604aaf3ce41890" +checksum = "c704e2f6ee1a98223f5a7629a6ef0f3decb3b552ed282889dc957edff98ce1e6" dependencies = [ "pmutil", "proc-macro2 1.0.56", @@ -395,9 +406,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.12.0" +version = "3.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" +checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1" [[package]] name = "byteorder" @@ -690,7 +701,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc" dependencies = [ "cfg-if", - "hashbrown", + "hashbrown 0.12.3", "lock_api", "once_cell", "parking_lot_core 0.9.7", @@ -753,7 +764,7 @@ dependencies = [ "glob", "http", "hyper 0.14.26", - "import_map 0.15.0", + "import_map", "indexmap", "jsonc-parser", "junction", @@ -801,9 +812,9 @@ dependencies = [ [[package]] name = "deno_ast" -version = "0.26.0" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84b4db18773938f4613617d384b6579983c46fbe9962da7390a9fc7525ccbe9c" +checksum = "db178e9f423fe41ff3580e32c43bc13726a5730360ef04e50b84de683a24f7d9" dependencies = [ "anyhow", "base64 0.13.1", @@ -932,15 +943,15 @@ dependencies = [ [[package]] name = "deno_doc" -version = "0.62.0" +version = "0.63.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a6e4c826679e4b0dd4f00b23e6c45343ce14903c3df2c210d094ee969312b8a" +checksum = "499936300106c3c67caae87e29def3df5ea9385db6ed7428f154972f70ed39fa" dependencies = [ "cfg-if", "deno_ast", "deno_graph", "futures", - "import_map 0.13.0", + "import_map", "lazy_static", "regex", "serde", @@ -950,9 +961,9 @@ dependencies = [ [[package]] name = "deno_emit" -version = "0.20.0" +version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e00ae58a811d155fc8c256ce54f35c752ee8c7dc777f0675971735d2783bd5e6" +checksum = "6bdc024d2c1e5ec56ef6f923be2c2fea4662d596b0a68074ccf89991b38a05e7" dependencies = [ "anyhow", "base64 0.13.1", @@ -960,7 +971,9 @@ dependencies = [ "deno_graph", "escape8259", "futures", + "import_map", "parking_lot 0.11.2", + "url", ] [[package]] @@ -1015,9 +1028,9 @@ dependencies = [ [[package]] name = "deno_graph" -version = "0.48.1" +version = "0.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcdbc17bfe49a41dd596ba2a96106b3eae3bd0812e1b63a6fe5883166c1b6fef" +checksum = "a7e07fdff6c7dc1a9a7c03ce69435fda4b53641d2d6d3d3ed6d29cf67fefd3ea" dependencies = [ "anyhow", "data-url", @@ -1101,9 +1114,9 @@ dependencies = [ [[package]] name = "deno_lint" -version = "0.46.0" +version = "0.47.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afc515e82ae97f2cc562ec482251700e96570c8fde997579629f4666e70066d7" +checksum = "044678646b9b5b01a8f6bcb19d106de8ed465e98eed1a49488ac86f807fc37b0" dependencies = [ "anyhow", "deno_ast", @@ -1546,9 +1559,9 @@ checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f" [[package]] name = "dprint-core" -version = "0.60.0" +version = "0.62.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c762da282ebc7635f7918898e26d50e2b282378977dc7b3786364ac12065a71" +checksum = "d966e6047321db5f011567c1819b89972a457ab49c2f4b56f074e67a59214112" dependencies = [ "anyhow", "bumpalo", @@ -1560,9 +1573,9 @@ dependencies = [ [[package]] name = "dprint-plugin-json" -version = "0.17.2" +version = "0.17.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "602d5b9cc4657bdf06d27fa6c22dfcfdb85a19ab555f2834cf7b01ba8001a1f6" +checksum = "00905c12671f1be023a8e12915b97a701a6561bacf39221ad314884c99f55c74" dependencies = [ "anyhow", "dprint-core", @@ -1573,9 +1586,9 @@ dependencies = [ [[package]] name = "dprint-plugin-markdown" -version = "0.15.2" +version = "0.15.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5095e6471bc71892fd4fe3f74205a6d2e22bd3be9b09758fd23bff67b5ec15fd" +checksum = "f20e5763efd89925412ac0f525a25bbba9066b2ba924eae87ab8a7120df0744c" dependencies = [ "anyhow", "dprint-core", @@ -1587,9 +1600,9 @@ dependencies = [ [[package]] name = "dprint-plugin-typescript" -version = "0.84.4" +version = "0.85.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9945b1fae98529bd905d66b3c5efd45408b928cd10b7a3e0764049cf9aaf2167" +checksum = "e49896ccb3ca2bfe03d96316b7acb0ce83dcfce69adc5b228f16fa6b30fb674b" dependencies = [ "anyhow", "deno_ast", @@ -1600,9 +1613,9 @@ dependencies = [ [[package]] name = "dprint-swc-ext" -version = "0.9.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c3359a644cca781aece7d7c16bfa80fb35ac83da4e1014a28600debd1ef2a7e" +checksum = "dd4dda8a1b920e8be367aeaad035753d21bb69b3c50515afb41ab1eefbb886b5" dependencies = [ "bumpalo", "num-bigint", @@ -1827,15 +1840,16 @@ dependencies = [ [[package]] name = "eszip" -version = "0.41.0" +version = "0.43.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a808622e30489ade8c36b6e706bc819c4c75420b5aca4d1a6b996bea0d995aef" +checksum = "9034d1749b91ac4fab0ed7b1d849f9f933099d1c1d021d42d6f54dd265b27d83" dependencies = [ "anyhow", "base64 0.21.0", "deno_ast", "deno_graph", "futures", + "hashlink", "serde", "serde_json", "sha2", @@ -2241,17 +2255,23 @@ name = "hashbrown" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "hashbrown" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" dependencies = [ - "ahash", + "ahash 0.8.3", ] [[package]] name = "hashlink" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69fe1fcf8b4278d860ad0548329f892a3631fb63f82574df68275f34cdbe0ffa" +checksum = "0761a1b9491c4f2e3d66aa0f62d0fba0af9a0e2852e4d48ea506632a4b56e6aa" dependencies = [ - "hashbrown", + "hashbrown 0.13.2", ] [[package]] @@ -2455,20 +2475,6 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb56e1aa765b4b4f3aadfab769793b7087bb03a4ea4920644a6d238e2df5b9ed" -[[package]] -name = "import_map" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64dbcf9b111359e69cf9a0004e9d1c9f6697ea620d378006e9452f5e54267e45" -dependencies = [ - "cfg-if", - "indexmap", - "log", - "serde", - "serde_json", - "url", -] - [[package]] name = "import_map" version = "0.15.0" @@ -2490,7 +2496,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399" dependencies = [ "autocfg", - "hashbrown", + "hashbrown 0.12.3", "serde", ] @@ -3670,9 +3676,9 @@ dependencies = [ [[package]] name = "pulldown-cmark" -version = "0.9.2" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d9cc634bc78768157b5cbfe988ffcd1dcba95cd2b2f03a88316c08c6d00ed63" +checksum = "77a1a2f1f0a7ecff9c31abbe177637be0e97a0aef46cf8738ece09327985d998" dependencies = [ "bitflags 1.3.2", "memchr", @@ -4551,9 +4557,9 @@ checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" [[package]] name = "swc_atoms" -version = "0.5.3" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "593c2f3e4cea60ddc4179ed731cabebe7eacec209d9e76a3bbcff4b2b020e3f5" +checksum = "93d0307dc4bfd107d49c7528350c372758cfca94fb503629b9a056e6a1572860" dependencies = [ "once_cell", "rustc-hash", @@ -4565,11 +4571,11 @@ dependencies = [ [[package]] name = "swc_bundler" -version = "0.213.23" +version = "0.214.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6153a93eeb264274dfdf6aff3d73fdd098a5b9ef85f85241bdbd8e4149afdcb7" +checksum = "503d03f4eb44ae6a57154e4459d9a7050c56e0aa9ec798a93d21ca07a8dd0409" dependencies = [ - "ahash", + "ahash 0.7.6", "anyhow", "crc", "indexmap", @@ -4596,11 +4602,11 @@ dependencies = [ [[package]] name = "swc_common" -version = "0.31.4" +version = "0.31.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b557014d62318e08070c2a3d5eb0278ff73749dd69db53c39a4de4bcd301d6a" +checksum = "19c774005489d2907fb67909cf42af926e72edee1366512777c605ba2ef19c94" dependencies = [ - "ahash", + "ahash 0.7.6", "ast_node", "better_scoped_tls", "cfg-if", @@ -4649,9 +4655,9 @@ dependencies = [ [[package]] name = "swc_ecma_ast" -version = "0.103.4" +version = "0.104.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5206233430a6763e2759da76cfc596a64250793f70cd94cace1f82fdcc4d702c" +checksum = "b5cf9dd351d0c285dcd36535267953a18995d4dda0cbe34ac9d1df61aa415b26" dependencies = [ "bitflags 2.1.0", "is-macro", @@ -4666,9 +4672,9 @@ dependencies = [ [[package]] name = "swc_ecma_codegen" -version = "0.138.11" +version = "0.139.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf45c899625d5132f2993a464a79f2ec7c79854b74fd3c55d1408b76d7d7750c" +checksum = "11c6af8e6d6714ecd7ef5cfba322aa1b436f78d9a82b0c3ff16aeaf97b65cd6d" dependencies = [ "memchr", "num-bigint", @@ -4698,9 +4704,9 @@ dependencies = [ [[package]] name = "swc_ecma_dep_graph" -version = "0.105.10" +version = "0.106.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92813e2f77cdf4ad870f0474eee6574f4aba10504dd3730e694d03684a7a68ab" +checksum = "f3130754aba396ad8d849f3fd1a9949d71a33c0943dfd86b23ac595e81fabd0c" dependencies = [ "swc_atoms", "swc_common", @@ -4710,11 +4716,11 @@ dependencies = [ [[package]] name = "swc_ecma_loader" -version = "0.43.6" +version = "0.43.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1d985c6e7111fef3c0103b0414db0d792cb04b492601c94ccae2d494ffdf764" +checksum = "fe45f1e5dcc1b005544ff78253b787dea5dfd5e2f712b133964cdc3545c954a4" dependencies = [ - "ahash", + "ahash 0.7.6", "anyhow", "pathdiff", "serde", @@ -4724,9 +4730,9 @@ dependencies = [ [[package]] name = "swc_ecma_parser" -version = "0.133.10" +version = "0.134.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce724a8fdc90548d882dec3b0288c0698059ce12a59bbfdeea0384f3d52f009" +checksum = "f0a3fcfe3d83dd445cbd9321882e47b467594433d9a21c4d6c37a27f534bb89e" dependencies = [ "either", "lexical", @@ -4744,9 +4750,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_base" -version = "0.126.13" +version = "0.127.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c4236f8b9bea9d3d43cacab34b6e3c925c3f12585382b8f661cb994b987b688" +checksum = "6232e641bef05c462bc7da34a3771f9b3f1f3352349ae0cd72b8eee8b0f5d5e0" dependencies = [ "better_scoped_tls", "bitflags 2.1.0", @@ -4767,9 +4773,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_classes" -version = "0.115.13" +version = "0.116.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd5b13763feba98586887a92801603c413897805c70ed82e49e4acc1f90683c2" +checksum = "f086829a3e645382f5609c9c6dce1d29e5204b3c81f82fe8d65d3bf17bcca68b" dependencies = [ "swc_atoms", "swc_common", @@ -4794,11 +4800,11 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_optimization" -version = "0.186.20" +version = "0.187.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "456966f04224d2125551e0e35c164abe45183cbdd5238753294343814be102d3" +checksum = "8d27c12926427f235d149e60f9a9e67a2181fe1eb418c12b53b8e0778c5052a2" dependencies = [ - "ahash", + "ahash 0.7.6", "dashmap", "indexmap", "once_cell", @@ -4819,9 +4825,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_proposal" -version = "0.160.16" +version = "0.161.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d21de731e3ff1ea451ac8c377a7130ebf6dbf6ffd18e744c15f86e685e0abd9a" +checksum = "416fbb84f84644ef0e81df80bf44fd575bbb297a78887e359e16a61f6dc5af86" dependencies = [ "either", "rustc-hash", @@ -4839,11 +4845,11 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_react" -version = "0.172.19" +version = "0.173.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0df18263e6c0804a1a08abd29e87af763dce1bec4b500497a0b62c22df07b2d" +checksum = "d39a0de45fa34ee797a1c80497c8b9dcb6cf6e56b455c163453399894c58a812" dependencies = [ - "ahash", + "ahash 0.7.6", "base64 0.13.1", "dashmap", "indexmap", @@ -4864,9 +4870,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_typescript" -version = "0.176.19" +version = "0.177.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1a3f356bc2b902c13fc1e39bb66c10f350c46bfe93bae5c05402863d94bd307" +checksum = "340cc027a6e87966715005b94e3a7ac95baf76c80b8aedad8afdd1c134740c80" dependencies = [ "serde", "swc_atoms", @@ -4880,9 +4886,9 @@ dependencies = [ [[package]] name = "swc_ecma_utils" -version = "0.116.10" +version = "0.117.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b462a1b6fc788ee956479adcbb05c282cb142a66a3b016b571fff0538a381196" +checksum = "ad791bbfdafcebd878584021e050964c8ab68aba7eeac9d0ee4afba4c284a629" dependencies = [ "indexmap", "num_cpus", @@ -4898,9 +4904,9 @@ dependencies = [ [[package]] name = "swc_ecma_visit" -version = "0.89.4" +version = "0.90.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecb23a4a1d77997f54e9b3a4e68d1441e5e8a25ad1a476bbb3b5a620d6562a86" +checksum = "6ce3ac941ae1d6c7e683aa375fc71fbf58df58b441f614d757fbb10554936ca2" dependencies = [ "num-bigint", "swc_atoms", @@ -4924,9 +4930,9 @@ dependencies = [ [[package]] name = "swc_fast_graph" -version = "0.19.4" +version = "0.19.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "992a92e087f7b2dc9aa626a6bee26530abbffba3572adf3894ccb55d2480f596" +checksum = "6291149aec4ba55076fd54a12ceb84cac1f703b2f571c3b2f19aa66ab9ec3009" dependencies = [ "indexmap", "petgraph", @@ -4936,11 +4942,11 @@ dependencies = [ [[package]] name = "swc_graph_analyzer" -version = "0.20.5" +version = "0.20.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9e02ee852ffd7eb1ee42c081b615c2fb40a2876c4631637486207f493d806c6" +checksum = "6575adec8b200801d429ffa79166224a6e298292a1b307750f4763aec5aa16c3" dependencies = [ - "ahash", + "ahash 0.7.6", "auto_impl", "petgraph", "swc_fast_graph", @@ -4961,9 +4967,9 @@ dependencies = [ [[package]] name = "swc_visit" -version = "0.5.5" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1d5999f23421c8e21a0f2bc53a0b9e8244f3b421de89471561af2fbe40b9cca" +checksum = "5f412dd4fbc58f509a04e64f5c8038333142fc139e8232f01b883db0094b3b51" dependencies = [ "either", "swc_visit_macros", @@ -4971,9 +4977,9 @@ dependencies = [ [[package]] name = "swc_visit_macros" -version = "0.5.6" +version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebeed7eb0f545f48ad30f5aab314e5208b735bcea1d1464f26e20f06db904989" +checksum = "4cfc226380ba54a5feed2c12f3ccd33f1ae8e959160290e5d2d9b4e918b6472a" dependencies = [ "Inflector", "pmutil", diff --git a/Cargo.toml b/Cargo.toml index a2a0f73a1b..e235697569 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -42,7 +42,7 @@ repository = "https://github.com/denoland/deno" [workspace.dependencies] v8 = { version = "0.73.0", default-features = false } -deno_ast = { version = "0.26.0", features = ["transpiling"] } +deno_ast = { version = "0.27.0", features = ["transpiling"] } deno_core = { version = "0.189.0", path = "./core" } deno_ops = { version = "0.67.0", path = "./ops" } diff --git a/cli/Cargo.toml b/cli/Cargo.toml index df045a754a..c5a5ee677d 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -42,15 +42,16 @@ winres.workspace = true [dependencies] deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "dep_graph", "module_specifier", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] } deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] } -deno_doc = "0.62.0" -deno_emit = "0.20.0" -deno_graph = "=0.48.1" -deno_lint = { version = "0.46.0", features = ["docs"] } +deno_doc = "=0.63.1" +deno_emit = "=0.24.0" +deno_graph = "=0.49.0" +deno_lint = { version = "=0.47.0", features = ["docs"] } deno_lockfile.workspace = true deno_npm.workspace = true deno_runtime = { workspace = true, features = ["dont_create_runtime_snapshot", "include_js_files_for_snapshotting"] } deno_semver.workspace = true -deno_task_shell = "0.12.0" +deno_task_shell = "=0.12.0" +eszip = "=0.43.0" napi_sym.workspace = true async-trait.workspace = true @@ -65,12 +66,11 @@ clap_complete_fig = "=4.1.2" console_static_text.workspace = true data-url.workspace = true dissimilar = "=1.0.4" -dprint-plugin-json = "=0.17.2" -dprint-plugin-markdown = "=0.15.2" -dprint-plugin-typescript = "=0.84.4" +dprint-plugin-json = "=0.17.3" +dprint-plugin-markdown = "=0.15.3" +dprint-plugin-typescript = "=0.85.0" encoding_rs.workspace = true env_logger = "=0.9.0" -eszip = "=0.41.0" fancy-regex = "=0.10.0" fastwebsockets.workspace = true flate2.workspace = true diff --git a/cli/standalone/mod.rs b/cli/standalone/mod.rs index 1ecb48cd7c..5536f32fca 100644 --- a/cli/standalone/mod.rs +++ b/cli/standalone/mod.rs @@ -201,6 +201,9 @@ impl ModuleLoader for EmbeddedModuleLoader { match module.kind { eszip::ModuleKind::JavaScript => ModuleType::JavaScript, eszip::ModuleKind::Json => ModuleType::Json, + eszip::ModuleKind::Jsonc => { + return Err(type_error("jsonc modules not supported")) + } }, code, &module_specifier, diff --git a/cli/tests/testdata/coverage/complex_expected.lcov b/cli/tests/testdata/coverage/complex_expected.lcov index cfa40b2323..94b86465ae 100644 --- a/cli/tests/testdata/coverage/complex_expected.lcov +++ b/cli/tests/testdata/coverage/complex_expected.lcov @@ -20,7 +20,6 @@ DA:17,2 DA:18,2 DA:19,2 DA:20,2 -DA:21,2 DA:22,2 DA:23,2 DA:24,2 @@ -34,7 +33,6 @@ DA:32,1 DA:33,1 DA:34,1 DA:35,1 -DA:36,1 DA:37,2 DA:38,2 DA:39,2 @@ -46,7 +44,6 @@ DA:46,0 DA:47,0 DA:48,0 DA:49,0 -DA:50,0 DA:51,0 DA:52,0 DA:53,0 @@ -65,6 +62,6 @@ DA:70,1 DA:71,0 DA:73,1 DA:74,1 -LH:39 -LF:54 +LH:37 +LF:51 end_of_record diff --git a/cli/tests/testdata/coverage/complex_expected.out b/cli/tests/testdata/coverage/complex_expected.out index b1910af607..3d5f6a0ab6 100644 --- a/cli/tests/testdata/coverage/complex_expected.out +++ b/cli/tests/testdata/coverage/complex_expected.out @@ -1,9 +1,9 @@ -cover [WILDCARD]/coverage/complex.ts ... 72.222% (39/54) +cover [WILDCARD]/coverage/complex.ts ... 72.549% (37/51) 46 | export function unused( 47 | foo: string, 48 | bar: string, 49 | baz: string, - 50 | ): Complex { +-----|----- 51 | return complex( 52 | foo, 53 | bar, diff --git a/third_party b/third_party index 7882a6c776..fd270b7927 160000 --- a/third_party +++ b/third_party @@ -1 +1 @@ -Subproject commit 7882a6c7763efbda55318e019b239bec7704765f +Subproject commit fd270b79276bb2bed365f3fb2e4ba6acaff3234b From 5c55f2b4fb9f386d5589e4cbd4c513ecb1bae50b Mon Sep 17 00:00:00 2001 From: David Sherret Date: Mon, 5 Jun 2023 20:35:39 -0400 Subject: [PATCH 304/320] chore: upgrade to Rust 1.70.0 (#19345) Co-authored-by: linbingquan <695601626@qq.com> --- .github/workflows/ci.generate.ts | 20 +++++++-------- .github/workflows/ci.yml | 24 +++++++++--------- Cargo.lock | 8 +++--- cli/lsp/capabilities.rs | 10 ++++---- cli/tools/upgrade.rs | 2 +- ext/ffi/Cargo.toml | 2 +- ext/net/lib.rs | 12 ++++----- ext/net/ops.rs | 42 +++++++++----------------------- ext/web/lib.rs | 2 +- runtime/permissions/mod.rs | 5 ++-- rust-toolchain.toml | 2 +- 11 files changed, 56 insertions(+), 73 deletions(-) diff --git a/.github/workflows/ci.generate.ts b/.github/workflows/ci.generate.ts index cb32e6415d..59bada4fc4 100755 --- a/.github/workflows/ci.generate.ts +++ b/.github/workflows/ci.generate.ts @@ -5,7 +5,7 @@ import * as yaml from "https://deno.land/std@0.173.0/encoding/yaml.ts"; // Bump this number when you want to purge the cache. // Note: the tools/release/01_bump_crate_versions.ts script will update this version // automatically via regex, so ensure that this line maintains this format. -const cacheVersion = 33; +const cacheVersion = 34; const Runners = (() => { const ubuntuRunner = "ubuntu-22.04"; @@ -24,16 +24,16 @@ const prCacheKeyPrefix = `${cacheVersion}-cargo-target-\${{ matrix.os }}-\${{ matrix.profile }}-\${{ matrix.job }}-`; const installPkgsCommand = - "sudo apt-get install --no-install-recommends debootstrap clang-15 lld-15"; + "sudo apt-get install --no-install-recommends debootstrap clang-16 lld-16"; const sysRootStep = { name: "Set up incremental LTO and sysroot build", run: `# Avoid running man-db triggers, which sometimes takes several minutes # to complete. sudo apt-get remove --purge -y man-db -# Install clang-15, lld-15, and debootstrap. -echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-15 main" | - sudo dd of=/etc/apt/sources.list.d/llvm-toolchain-jammy-15.list +# Install clang-16, lld-16, and debootstrap. +echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-16 main" | + sudo dd of=/etc/apt/sources.list.d/llvm-toolchain-jammy-16.list curl https://apt.llvm.org/llvm-snapshot.gpg.key | gpg --dearmor | sudo dd of=/etc/apt/trusted.gpg.d/llvm-snapshot.gpg @@ -70,8 +70,8 @@ CARGO_PROFILE_RELEASE_INCREMENTAL=false CARGO_PROFILE_RELEASE_LTO=false RUSTFLAGS<<__1 -C linker-plugin-lto=true - -C linker=clang-15 - -C link-arg=-fuse-ld=lld-15 + -C linker=clang-16 + -C link-arg=-fuse-ld=lld-16 -C link-arg=--sysroot=/sysroot -C link-arg=-ldl -C link-arg=-Wl,--allow-shlib-undefined @@ -81,8 +81,8 @@ RUSTFLAGS<<__1 __1 RUSTDOCFLAGS<<__1 -C linker-plugin-lto=true - -C linker=clang-15 - -C link-arg=-fuse-ld=lld-15 + -C linker=clang-16 + -C link-arg=-fuse-ld=lld-16 -C link-arg=--sysroot=/sysroot -C link-arg=-ldl -C link-arg=-Wl,--allow-shlib-undefined @@ -90,7 +90,7 @@ RUSTDOCFLAGS<<__1 -C link-arg=-Wl,--thinlto-cache-policy,cache_size_bytes=700m \${{ env.RUSTFLAGS }} __1 -CC=clang-15 +CC=clang-16 CFLAGS=-flto=thin --sysroot=/sysroot __0`, }; diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4bb3ee964d..3ac0c2e243 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -210,15 +210,15 @@ jobs: # to complete. sudo apt-get remove --purge -y man-db - # Install clang-15, lld-15, and debootstrap. - echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-15 main" | - sudo dd of=/etc/apt/sources.list.d/llvm-toolchain-jammy-15.list + # Install clang-16, lld-16, and debootstrap. + echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-16 main" | + sudo dd of=/etc/apt/sources.list.d/llvm-toolchain-jammy-16.list curl https://apt.llvm.org/llvm-snapshot.gpg.key | gpg --dearmor | sudo dd of=/etc/apt/trusted.gpg.d/llvm-snapshot.gpg sudo apt-get update # this was unreliable sometimes, so try again if it fails - sudo apt-get install --no-install-recommends debootstrap clang-15 lld-15 || echo 'Failed. Trying again.' && sudo apt-get clean && sudo apt-get update && sudo apt-get install --no-install-recommends debootstrap clang-15 lld-15 + sudo apt-get install --no-install-recommends debootstrap clang-16 lld-16 || echo 'Failed. Trying again.' && sudo apt-get clean && sudo apt-get update && sudo apt-get install --no-install-recommends debootstrap clang-16 lld-16 # Create ubuntu-16.04 sysroot environment, which is used to avoid # depending on a very recent version of glibc. @@ -249,8 +249,8 @@ jobs: CARGO_PROFILE_RELEASE_LTO=false RUSTFLAGS<<__1 -C linker-plugin-lto=true - -C linker=clang-15 - -C link-arg=-fuse-ld=lld-15 + -C linker=clang-16 + -C link-arg=-fuse-ld=lld-16 -C link-arg=--sysroot=/sysroot -C link-arg=-ldl -C link-arg=-Wl,--allow-shlib-undefined @@ -260,8 +260,8 @@ jobs: __1 RUSTDOCFLAGS<<__1 -C linker-plugin-lto=true - -C linker=clang-15 - -C link-arg=-fuse-ld=lld-15 + -C linker=clang-16 + -C link-arg=-fuse-ld=lld-16 -C link-arg=--sysroot=/sysroot -C link-arg=-ldl -C link-arg=-Wl,--allow-shlib-undefined @@ -269,7 +269,7 @@ jobs: -C link-arg=-Wl,--thinlto-cache-policy,cache_size_bytes=700m ${{ env.RUSTFLAGS }} __1 - CC=clang-15 + CC=clang-16 CFLAGS=-flto=thin --sysroot=/sysroot __0 - name: Log versions @@ -293,7 +293,7 @@ jobs: ~/.cargo/registry/index ~/.cargo/registry/cache ~/.cargo/git/db - key: '33-cargo-home-${{ matrix.os }}-${{ hashFiles(''Cargo.lock'') }}' + key: '34-cargo-home-${{ matrix.os }}-${{ hashFiles(''Cargo.lock'') }}' if: '!(github.event_name == ''pull_request'' && matrix.skip_pr)' - name: Restore cache build output (PR) uses: actions/cache/restore@v3 @@ -305,7 +305,7 @@ jobs: !./target/*/*.zip !./target/*/*.tar.gz key: never_saved - restore-keys: '33-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-' + restore-keys: '34-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-' - name: Apply and update mtime cache if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (!startsWith(github.ref, ''refs/tags/''))' uses: ./.github/mtime_cache @@ -589,7 +589,7 @@ jobs: !./target/*/gn_out !./target/*/*.zip !./target/*/*.tar.gz - key: '33-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' + key: '34-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' publish-canary: name: publish canary runs-on: ubuntu-22.04 diff --git a/Cargo.lock b/Cargo.lock index 4bab387ffc..f0721f433c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2787,9 +2787,9 @@ checksum = "99227334921fae1a979cf0bfdfcc6b3e5ce376ef57e16fb6fb3ea2ed6095f80c" [[package]] name = "libffi" -version = "3.1.0" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6cb06d5b4c428f3cd682943741c39ed4157ae989fffe1094a08eaf7c4014cf60" +checksum = "ce826c243048e3d5cec441799724de52e2d42f820468431fc3fceee2341871e2" dependencies = [ "libc", "libffi-sys", @@ -2797,9 +2797,9 @@ dependencies = [ [[package]] name = "libffi-sys" -version = "2.1.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11c6f11e063a27ffe040a9d15f0b661bf41edc2383b7ae0e0ad5a7e7d53d9da3" +checksum = "f36115160c57e8529781b4183c2bb51fdc1f6d6d1ed345591d84be7703befb3c" dependencies = [ "cc", ] diff --git a/cli/lsp/capabilities.rs b/cli/lsp/capabilities.rs index e56aa6b873..355f7b6c51 100644 --- a/cli/lsp/capabilities.rs +++ b/cli/lsp/capabilities.rs @@ -1,10 +1,10 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. -///! -///! Provides information about what capabilities that are supported by the -///! language server, which helps determine what messages are sent from the -///! client. -///! +//! +//! Provides information about what capabilities that are supported by the +//! language server, which helps determine what messages are sent from the +//! client. +//! use deno_core::serde_json::json; use tower_lsp::lsp_types::*; diff --git a/cli/tools/upgrade.rs b/cli/tools/upgrade.rs index a6bc2975bb..b371731c31 100644 --- a/cli/tools/upgrade.rs +++ b/cli/tools/upgrade.rs @@ -334,7 +334,7 @@ pub async fn upgrade( }; let current_is_most_recent = if upgrade_flags.canary { - let latest_hash = latest_version.clone(); + let latest_hash = &latest_version; crate::version::GIT_COMMIT_HASH == latest_hash } else if !crate::version::is_canary() { let current = Version::parse_standard(crate::version::deno()).unwrap(); diff --git a/ext/ffi/Cargo.toml b/ext/ffi/Cargo.toml index 0173144775..ae62c20631 100644 --- a/ext/ffi/Cargo.toml +++ b/ext/ffi/Cargo.toml @@ -17,7 +17,7 @@ path = "lib.rs" deno_core.workspace = true dlopen.workspace = true dynasmrt = "1.2.3" -libffi = "3.1.0" +libffi = "3.2.0" serde.workspace = true serde-value = "0.7" serde_json = "1.0" diff --git a/ext/net/lib.rs b/ext/net/lib.rs index 912b0723ea..0e3778d5a8 100644 --- a/ext/net/lib.rs +++ b/ext/net/lib.rs @@ -98,12 +98,12 @@ deno_core::extension!(deno_net, ops::op_node_unstable_net_listen_udp

    , ops::op_net_recv_udp, ops::op_net_send_udp

    , - ops::op_net_join_multi_v4_udp

    , - ops::op_net_join_multi_v6_udp

    , - ops::op_net_leave_multi_v4_udp

    , - ops::op_net_leave_multi_v6_udp

    , - ops::op_net_set_multi_loopback_udp

    , - ops::op_net_set_multi_ttl_udp

    , + ops::op_net_join_multi_v4_udp, + ops::op_net_join_multi_v6_udp, + ops::op_net_leave_multi_v4_udp, + ops::op_net_leave_multi_v6_udp, + ops::op_net_set_multi_loopback_udp, + ops::op_net_set_multi_ttl_udp, ops::op_dns_resolve

    , ops::op_set_nodelay, ops::op_set_keepalive, diff --git a/ext/net/ops.rs b/ext/net/ops.rs index 2264df8679..05aa416b4b 100644 --- a/ext/net/ops.rs +++ b/ext/net/ops.rs @@ -159,15 +159,12 @@ where } #[op] -async fn op_net_join_multi_v4_udp( +async fn op_net_join_multi_v4_udp( state: Rc>, rid: ResourceId, address: String, multi_interface: String, -) -> Result<(), AnyError> -where - NP: NetPermissions + 'static, -{ +) -> Result<(), AnyError> { let resource = state .borrow_mut() .resource_table @@ -184,15 +181,12 @@ where } #[op] -async fn op_net_join_multi_v6_udp( +async fn op_net_join_multi_v6_udp( state: Rc>, rid: ResourceId, address: String, multi_interface: u32, -) -> Result<(), AnyError> -where - NP: NetPermissions + 'static, -{ +) -> Result<(), AnyError> { let resource = state .borrow_mut() .resource_table @@ -208,15 +202,12 @@ where } #[op] -async fn op_net_leave_multi_v4_udp( +async fn op_net_leave_multi_v4_udp( state: Rc>, rid: ResourceId, address: String, multi_interface: String, -) -> Result<(), AnyError> -where - NP: NetPermissions + 'static, -{ +) -> Result<(), AnyError> { let resource = state .borrow_mut() .resource_table @@ -233,15 +224,12 @@ where } #[op] -async fn op_net_leave_multi_v6_udp( +async fn op_net_leave_multi_v6_udp( state: Rc>, rid: ResourceId, address: String, multi_interface: u32, -) -> Result<(), AnyError> -where - NP: NetPermissions + 'static, -{ +) -> Result<(), AnyError> { let resource = state .borrow_mut() .resource_table @@ -257,15 +245,12 @@ where } #[op] -async fn op_net_set_multi_loopback_udp( +async fn op_net_set_multi_loopback_udp( state: Rc>, rid: ResourceId, is_v4_membership: bool, loopback: bool, -) -> Result<(), AnyError> -where - NP: NetPermissions + 'static, -{ +) -> Result<(), AnyError> { let resource = state .borrow_mut() .resource_table @@ -283,14 +268,11 @@ where } #[op] -async fn op_net_set_multi_ttl_udp( +async fn op_net_set_multi_ttl_udp( state: Rc>, rid: ResourceId, ttl: u32, -) -> Result<(), AnyError> -where - NP: NetPermissions + 'static, -{ +) -> Result<(), AnyError> { let resource = state .borrow_mut() .resource_table diff --git a/ext/web/lib.rs b/ext/web/lib.rs index adbc9f2622..b1e0dd5d8d 100644 --- a/ext/web/lib.rs +++ b/ext/web/lib.rs @@ -142,7 +142,7 @@ fn op_base64_atob(mut s: ByteString) -> Result { fn forgiving_base64_decode_inplace( input: &mut [u8], ) -> Result { - let error: _ = + let error = || DomExceptionInvalidCharacterError::new("Failed to decode base64"); let decoded = base64_simd::forgiving_decode_inplace(input).map_err(|_| error())?; diff --git a/runtime/permissions/mod.rs b/runtime/permissions/mod.rs index f1d0362cad..e3ddd8cf49 100644 --- a/runtime/permissions/mod.rs +++ b/runtime/permissions/mod.rs @@ -18,6 +18,7 @@ use deno_core::ModuleSpecifier; use deno_core::OpState; use log; use once_cell::sync::Lazy; +use std::borrow::Cow; use std::collections::HashSet; use std::fmt; use std::hash::Hash; @@ -872,8 +873,8 @@ impl UnaryPermission { .ok_or_else(|| uri_error("Missing host"))? .to_string(); let display_host = match url.port() { - None => hostname.clone(), - Some(port) => format!("{hostname}:{port}"), + None => Cow::Borrowed(&hostname), + Some(port) => Cow::Owned(format!("{hostname}:{port}")), }; let host = &(&hostname, url.port_or_known_default()); let (result, prompted, is_allow_all) = self.query(Some(host)).check( diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 434cca4575..f15cd1c929 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,3 +1,3 @@ [toolchain] -channel = "1.68.2" +channel = "1.70.0" components = ["rustfmt", "clippy"] From 0bbdbace02d8b17a02bd3c631b82f508d0effa4a Mon Sep 17 00:00:00 2001 From: Matt Mastracci Date: Tue, 6 Jun 2023 03:01:28 -0600 Subject: [PATCH 305/320] refactor(core): ensureFastOps is an op-generating proxy (#19377) Startup benchmark shows no changes (within 1ms, identical system/user times). --- core/01_core.js | 41 ++++++++----------- .../http_bench_json_ops.js | 5 +-- core/runtime.rs | 4 +- ext/fs/30_fs.js | 8 +--- ext/http/00_serve.js | 20 +-------- ext/http/01_http.js | 2 +- ext/node/polyfills/internal/crypto/random.ts | 6 +-- ext/web/02_timers.js | 5 +-- ext/websocket/01_websocket.js | 9 +--- ext/websocket/02_websocketstream.js | 8 +--- 10 files changed, 26 insertions(+), 82 deletions(-) diff --git a/core/01_core.js b/core/01_core.js index c3033fcf9a..13aa17c7ed 100644 --- a/core/01_core.js +++ b/core/01_core.js @@ -22,6 +22,7 @@ PromiseReject, PromiseResolve, PromisePrototypeThen, + Proxy, RangeError, ReferenceError, ReflectHas, @@ -762,19 +763,19 @@ for (let i = 0; i < 10; i++) { setUpAsyncStub(opName); } - function generateAsyncOpHandler(/* opNames... */) { - const fastOps = {}; - for (const opName of new SafeArrayIterator(arguments)) { - if (ops[opName] === undefined) { - throw new Error(`Unknown or disabled op '${opName}'`); - } - if (asyncOps[opName] !== undefined) { - fastOps[opName] = setUpAsyncStub(opName); - } else { - fastOps[opName] = ops[opName]; - } - } - return fastOps; + function ensureFastOps() { + return new Proxy({}, { + get(_target, opName) { + if (ops[opName] === undefined) { + throw new Error(`Unknown or disabled op '${opName}'`); + } + if (asyncOps[opName] !== undefined) { + return setUpAsyncStub(opName); + } else { + return ops[opName]; + } + }, + }); } const { @@ -787,22 +788,12 @@ for (let i = 0; i < 10; i++) { op_read_sync: readSync, op_write_sync: writeSync, op_shutdown: shutdown, - } = generateAsyncOpHandler( - "op_close", - "op_try_close", - "op_read", - "op_read_all", - "op_write", - "op_write_all", - "op_read_sync", - "op_write_sync", - "op_shutdown", - ); + } = ensureFastOps(); // Extra Deno.core.* exports const core = ObjectAssign(globalThis.Deno.core, { asyncStub, - generateAsyncOpHandler, + ensureFastOps, opAsync, resources, metrics, diff --git a/core/examples/http_bench_json_ops/http_bench_json_ops.js b/core/examples/http_bench_json_ops/http_bench_json_ops.js index 6cf2a8be24..a840e4e9f9 100644 --- a/core/examples/http_bench_json_ops/http_bench_json_ops.js +++ b/core/examples/http_bench_json_ops/http_bench_json_ops.js @@ -9,10 +9,7 @@ const { op_listen } = Deno.core.ops; const { op_accept, op_read_socket, -} = Deno.core.generateAsyncOpHandler( - "op_accept", - "op_read_socket", -); +} = Deno.core.ensureFastOps(); const requestBuf = new Uint8Array(64 * 1024); const responseBuf = new Uint8Array( diff --git a/core/runtime.rs b/core/runtime.rs index fdcb81e9e2..a27717a8b4 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -2852,7 +2852,7 @@ pub mod tests { .execute_script_static( "filename.js", r#" - const { op_test } = Deno.core.generateAsyncOpHandler("op_test"); + const { op_test } = Deno.core.ensureFastOps(); let zero_copy_a = new Uint8Array([0]); op_test(null, zero_copy_a); "#, @@ -4942,7 +4942,7 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { throw new Error(); } - const { op_test_async } = Deno.core.generateAsyncOpHandler("op_test_async"); + const { op_test_async } = Deno.core.ensureFastOps(); if (op_test_async.name !== "op_test_async") { throw new Error(); } diff --git a/ext/fs/30_fs.js b/ext/fs/30_fs.js index f14fcd5d1b..f7c07f26a7 100644 --- a/ext/fs/30_fs.js +++ b/ext/fs/30_fs.js @@ -10,13 +10,7 @@ const { op_fs_truncate_async, op_fs_link_async, op_fs_flock_async, -} = Deno.core.generateAsyncOpHandler( - "op_fs_chmod_async", - "op_fs_ftruncate_async", - "op_fs_truncate_async", - "op_fs_link_async", - "op_fs_flock_async", -); +} = Deno.core.ensureFastOps(); const primordials = globalThis.__bootstrap.primordials; const { ArrayPrototypeFilter, diff --git a/ext/http/00_serve.js b/ext/http/00_serve.js index ba8080e27b..fa55079e77 100644 --- a/ext/http/00_serve.js +++ b/ext/http/00_serve.js @@ -70,25 +70,7 @@ const { op_http_upgrade_websocket_next, op_http_try_wait, op_http_wait, -} = core.generateAsyncOpHandler( - "op_http_get_request_headers", - "op_http_get_request_method_and_url", - "op_http_read_request_body", - "op_http_serve", - "op_http_serve_on", - "op_http_set_promise_complete", - "op_http_set_response_body_bytes", - "op_http_set_response_body_resource", - "op_http_set_response_body_stream", - "op_http_set_response_body_text", - "op_http_set_response_header", - "op_http_set_response_headers", - "op_http_set_response_trailers", - "op_http_upgrade_raw", - "op_http_upgrade_websocket_next", - "op_http_try_wait", - "op_http_wait", -); +} = core.ensureFastOps(); const _upgraded = Symbol("_upgraded"); function internalServerError() { diff --git a/ext/http/01_http.js b/ext/http/01_http.js index 92fd8e2858..f9a8d2cdbc 100644 --- a/ext/http/01_http.js +++ b/ext/http/01_http.js @@ -6,7 +6,7 @@ const core = globalThis.Deno.core; const internals = globalThis.__bootstrap.internals; const primordials = globalThis.__bootstrap.primordials; const { BadResourcePrototype, InterruptedPrototype, ops } = core; -const { op_http_write } = Deno.core.generateAsyncOpHandler("op_http_write"); +const { op_http_write } = Deno.core.ensureFastOps(); import * as webidl from "ext:deno_webidl/00_webidl.js"; import { InnerBody } from "ext:deno_fetch/22_body.js"; import { Event, setEventTargetData } from "ext:deno_web/02_event.js"; diff --git a/ext/node/polyfills/internal/crypto/random.ts b/ext/node/polyfills/internal/crypto/random.ts index 4890e158ad..9156ab4e1a 100644 --- a/ext/node/polyfills/internal/crypto/random.ts +++ b/ext/node/polyfills/internal/crypto/random.ts @@ -37,11 +37,7 @@ const { op_node_gen_prime_async, op_node_check_prime_bytes_async, op_node_check_prime_async, -} = Deno.core.generateAsyncOpHandler( - "op_node_gen_prime_async", - "op_node_check_prime_bytes_async", - "op_node_check_prime_async", -); +} = Deno.core.ensureFastOps(); export type LargeNumberLike = | ArrayBufferView diff --git a/ext/web/02_timers.js b/ext/web/02_timers.js index 7603f67668..19ebfaa0e4 100644 --- a/ext/web/02_timers.js +++ b/ext/web/02_timers.js @@ -27,10 +27,7 @@ const { import * as webidl from "ext:deno_webidl/00_webidl.js"; import { reportException } from "ext:deno_web/02_event.js"; import { assert } from "ext:deno_web/00_infra.js"; -const { op_sleep, op_void_async_deferred } = core.generateAsyncOpHandler( - "op_sleep", - "op_void_async_deferred", -); +const { op_sleep, op_void_async_deferred } = core.ensureFastOps(); const hrU8 = new Uint8Array(8); const hr = new Uint32Array(TypedArrayPrototypeGetBuffer(hrU8)); diff --git a/ext/websocket/01_websocket.js b/ext/websocket/01_websocket.js index e71cae44a8..01dd265792 100644 --- a/ext/websocket/01_websocket.js +++ b/ext/websocket/01_websocket.js @@ -57,14 +57,7 @@ const { op_ws_send_text, op_ws_next_event, op_ws_send_ping, -} = core.generateAsyncOpHandler( - "op_ws_create", - "op_ws_close", - "op_ws_send_binary", - "op_ws_send_text", - "op_ws_next_event", - "op_ws_send_ping", -); +} = core.ensureFastOps(); webidl.converters["sequence or DOMString"] = ( V, diff --git a/ext/websocket/02_websocketstream.js b/ext/websocket/02_websocketstream.js index 2c5df262ac..00d5bdaecf 100644 --- a/ext/websocket/02_websocketstream.js +++ b/ext/websocket/02_websocketstream.js @@ -39,13 +39,7 @@ const { op_ws_next_event, op_ws_create, op_ws_close, -} = core.generateAsyncOpHandler( - "op_ws_send_text", - "op_ws_send_binary", - "op_ws_next_event", - "op_ws_create", - "op_ws_close", -); +} = core.ensureFastOps(); webidl.converters.WebSocketStreamOptions = webidl.createDictionaryConverter( "WebSocketStreamOptions", From 2052ba343c0b222cf638e32f15622a237e423317 Mon Sep 17 00:00:00 2001 From: ud2 Date: Tue, 6 Jun 2023 17:06:00 +0800 Subject: [PATCH 306/320] fix(ext/console): fix inspecting large ArrayBuffers (#19373) --- cli/tests/unit/console_test.ts | 25 ++++++++++++++++++++++ ext/console/01_console.js | 39 ++++++++++++++++++++-------------- 2 files changed, 48 insertions(+), 16 deletions(-) diff --git a/cli/tests/unit/console_test.ts b/cli/tests/unit/console_test.ts index b177b956bb..4cedf35846 100644 --- a/cli/tests/unit/console_test.ts +++ b/cli/tests/unit/console_test.ts @@ -2193,6 +2193,31 @@ Deno.test(function inspectEmptyUint8Array() { ); }); +Deno.test(function inspectLargeArrayBuffer() { + const arrayBuffer = new ArrayBuffer(2 ** 32 + 1); + assertEquals( + Deno.inspect(arrayBuffer), + `ArrayBuffer { + [Uint8Contents]: <00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ... 4294967197 more bytes>, + byteLength: 4294967297 +}`, + ); + structuredClone(arrayBuffer, { transfer: [arrayBuffer] }); + assertEquals( + Deno.inspect(arrayBuffer), + "ArrayBuffer { (detached), byteLength: 0 }", + ); + + const sharedArrayBuffer = new SharedArrayBuffer(2 ** 32 + 1); + assertEquals( + Deno.inspect(sharedArrayBuffer), + `SharedArrayBuffer { + [Uint8Contents]: <00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ... 4294967197 more bytes>, + byteLength: 4294967297 +}`, + ); +}); + Deno.test(function inspectStringAbbreviation() { const LONG_STRING = "This is a really long string which will be abbreviated with ellipsis."; diff --git a/ext/console/01_console.js b/ext/console/01_console.js index 6cf3c6dcac..fbc36ca9c7 100644 --- a/ext/console/01_console.js +++ b/ext/console/01_console.js @@ -248,6 +248,17 @@ defineColorAlias("doubleunderline", "doubleUnderline"); // https://tc39.es/ecma262/#sec-get-sharedarraybuffer.prototype.bytelength let _getSharedArrayBufferByteLength; +function getSharedArrayBufferByteLength(value) { + // TODO(kt3k): add SharedArrayBuffer to primordials + _getSharedArrayBufferByteLength ??= ObjectGetOwnPropertyDescriptor( + // deno-lint-ignore prefer-primordials + SharedArrayBuffer.prototype, + "byteLength", + ).get; + + return FunctionPrototypeCall(_getSharedArrayBufferByteLength, value); +} + function isObjectLike(value) { return value !== null && typeof value === "object"; } @@ -428,15 +439,8 @@ export function isSetIterator( export function isSharedArrayBuffer( value, ) { - // TODO(kt3k): add SharedArrayBuffer to primordials - _getSharedArrayBufferByteLength ??= ObjectGetOwnPropertyDescriptor( - // deno-lint-ignore prefer-primordials - SharedArrayBuffer.prototype, - "byteLength", - ).get; - try { - FunctionPrototypeCall(_getSharedArrayBufferByteLength, value); + getSharedArrayBufferByteLength(value); return true; } catch { return false; @@ -1608,7 +1612,7 @@ const hexSliceLookupTable = function () { }(); function hexSlice(buf, start, end) { - const len = buf.length; + const len = TypedArrayPrototypeGetLength(buf); if (!start || start < 0) { start = 0; } @@ -1624,21 +1628,24 @@ function hexSlice(buf, start, end) { const arrayBufferRegExp = new SafeRegExp("(.{2})", "g"); function formatArrayBuffer(ctx, value) { + let valLen; + try { + valLen = ArrayBufferPrototypeGetByteLength(value); + } catch { + valLen = getSharedArrayBufferByteLength(value); + } + const len = MathMin(MathMax(0, ctx.maxArrayLength), valLen); let buffer; try { - buffer = new Uint8Array(value); + buffer = new Uint8Array(value, 0, len); } catch { return [ctx.stylize("(detached)", "special")]; } let str = StringPrototypeTrim( - StringPrototypeReplace( - hexSlice(buffer, 0, MathMin(ctx.maxArrayLength, buffer.length)), - arrayBufferRegExp, - "$1 ", - ), + StringPrototypeReplace(hexSlice(buffer), arrayBufferRegExp, "$1 "), ); - const remaining = buffer.length - ctx.maxArrayLength; + const remaining = valLen - len; if (remaining > 0) { str += ` ... ${remaining} more byte${remaining > 1 ? "s" : ""}`; } From 42991017e9af59d6a5cb6b523228c62f1c32380e Mon Sep 17 00:00:00 2001 From: Matt Mastracci Date: Tue, 6 Jun 2023 04:29:55 -0600 Subject: [PATCH 307/320] feat(ext/node): Very basic node:http2 support (#19344) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This commit adds basic support for "node:http2" module. Not all APIs have been yet implemented, but this change already allows to use this module for some basic functions. The "grpc" package is still not working, but it's a good stepping stone. --------- Co-authored-by: Bartek Iwańczuk --- cli/tests/integration/node_unit_tests.rs | 1 + cli/tests/unit_node/http2_test.ts | 104 ++++++ ext/http/00_serve.js | 8 +- ext/node/polyfills/http2.ts | 384 ++++++++++++++++++----- ext/node/polyfills/net.ts | 34 +- 5 files changed, 444 insertions(+), 87 deletions(-) create mode 100644 cli/tests/unit_node/http2_test.ts diff --git a/cli/tests/integration/node_unit_tests.rs b/cli/tests/integration/node_unit_tests.rs index f62c8761cf..363e5dfa34 100644 --- a/cli/tests/integration/node_unit_tests.rs +++ b/cli/tests/integration/node_unit_tests.rs @@ -53,6 +53,7 @@ util::unit_test_factory!( crypto_sign_test = crypto / crypto_sign_test, fs_test, http_test, + http2_test, _randomBytes_test = internal / _randomBytes_test, _randomFill_test = internal / _randomFill_test, _randomInt_test = internal / _randomInt_test, diff --git a/cli/tests/unit_node/http2_test.ts b/cli/tests/unit_node/http2_test.ts new file mode 100644 index 0000000000..543543cbdc --- /dev/null +++ b/cli/tests/unit_node/http2_test.ts @@ -0,0 +1,104 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +import * as http2 from "node:http2"; +import * as net from "node:net"; +import { deferred } from "../../../test_util/std/async/deferred.ts"; +import { assertEquals } from "https://deno.land/std@v0.42.0/testing/asserts.ts"; + +const { + HTTP2_HEADER_AUTHORITY, + HTTP2_HEADER_METHOD, + HTTP2_HEADER_PATH, + HTTP2_HEADER_STATUS, +} = http2.constants; + +Deno.test("[node/http2 client]", async () => { + // Create a server to respond to the HTTP2 requests + const portPromise = deferred(); + const reqPromise = deferred(); + const ready = deferred(); + const ac = new AbortController(); + const server = Deno.serve({ + port: 0, + signal: ac.signal, + onListen: ({ port }: { port: number }) => portPromise.resolve(port), + handler: async (req: Request) => { + reqPromise.resolve(req); + await ready; + return new Response("body", { + status: 401, + headers: { "resp-header-name": "resp-header-value" }, + }); + }, + }); + + const port = await portPromise; + + // Get a session + const sessionPromise = deferred(); + const session = http2.connect( + `localhost:${port}`, + {}, + sessionPromise.resolve.bind(sessionPromise), + ); + const session2 = await sessionPromise; + assertEquals(session, session2); + + // Write a request, including a body + const stream = session.request({ + [HTTP2_HEADER_AUTHORITY]: `localhost:${port}`, + [HTTP2_HEADER_METHOD]: "POST", + [HTTP2_HEADER_PATH]: "/path", + "req-header-name": "req-header-value", + }); + stream.write("body"); + stream.end(); + + // Check the request + const req = await reqPromise; + assertEquals(req.headers.get("req-header-name"), "req-header-value"); + assertEquals(await req.text(), "body"); + + ready.resolve(); + + // Read a response + const headerPromise = new Promise>(( + resolve, + ) => stream.on("headers", resolve)); + const headers = await headerPromise; + assertEquals(headers["resp-header-name"], "resp-header-value"); + assertEquals(headers[HTTP2_HEADER_STATUS], "401"); + + ac.abort(); + await server.finished; +}); + +Deno.test("[node/http2 server]", async () => { + const server = http2.createServer(); + server.listen(0); + const port = ( server.address()).port; + const sessionPromise = new Promise((resolve) => + server.on("session", resolve) + ); + + const responsePromise = fetch(`http://localhost:${port}/path`, { + method: "POST", + body: "body", + }); + + const session = await sessionPromise; + const stream = await new Promise((resolve) => + session.on("stream", resolve) + ); + const _headers = await new Promise((resolve) => + stream.on("headers", resolve) + ); + const _data = await new Promise((resolve) => stream.on("data", resolve)); + const _end = await new Promise((resolve) => stream.on("end", resolve)); + stream.respond(); + stream.end(); + const resp = await responsePromise; + await resp.text(); + + await new Promise((resolve) => server.close(resolve)); +}); diff --git a/ext/http/00_serve.js b/ext/http/00_serve.js index fa55079e77..c5a5c0e189 100644 --- a/ext/http/00_serve.js +++ b/ext/http/00_serve.js @@ -748,4 +748,10 @@ internals.upgradeHttpRaw = upgradeHttpRaw; internals.serveHttpOnListener = serveHttpOnListener; internals.serveHttpOnConnection = serveHttpOnConnection; -export { serve, upgradeHttpRaw }; +export { + addTrailers, + serve, + serveHttpOnConnection, + serveHttpOnListener, + upgradeHttpRaw, +}; diff --git a/ext/node/polyfills/http2.ts b/ext/node/polyfills/http2.ts index 90b1be1a2f..a5d945efea 100644 --- a/ext/node/polyfills/http2.ts +++ b/ext/node/polyfills/http2.ts @@ -1,12 +1,21 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. // Copyright Joyent and Node contributors. All rights reserved. MIT license. -import { notImplemented } from "ext:deno_node/_utils.ts"; +import { notImplemented, warnNotImplemented } from "ext:deno_node/_utils.ts"; import { EventEmitter } from "ext:deno_node/events.ts"; import { Buffer } from "ext:deno_node/buffer.ts"; -import { Socket } from "ext:deno_node/net.ts"; +import { Server, Socket, TCP } from "ext:deno_node/net.ts"; import { TypedArray } from "ext:deno_node/internal/util/types.ts"; +import { setStreamTimeout } from "ext:deno_node/internal/stream_base_commons.ts"; import { FileHandle } from "ext:deno_node/fs/promises.ts"; +import { kStreamBaseField } from "ext:deno_node/internal_binding/stream_wrap.ts"; +import { addTrailers, serveHttpOnConnection } from "ext:deno_http/00_serve.js"; +import { type Deferred, deferred } from "ext:deno_node/_util/async.ts"; +import { nextTick } from "ext:deno_node/_next_tick.ts"; +import { TextEncoder } from "ext:deno_web/08_text_encoding.js"; + +const ENCODER = new TextEncoder(); +type Http2Headers = Record; export class Http2Session extends EventEmitter { constructor() { @@ -19,11 +28,10 @@ export class Http2Session extends EventEmitter { } close(_callback?: () => void) { - notImplemented("Http2Session.close"); + warnNotImplemented("Http2Session.close"); } get closed(): boolean { - notImplemented("Http2Session.closed"); return false; } @@ -37,7 +45,6 @@ export class Http2Session extends EventEmitter { } get destroyed(): boolean { - notImplemented("Http2Session.destroyed"); return false; } @@ -78,7 +85,7 @@ export class Http2Session extends EventEmitter { } ref() { - notImplemented("Http2Session.ref"); + warnNotImplemented("Http2Session.ref"); } get remoteSettings(): Record { @@ -90,17 +97,15 @@ export class Http2Session extends EventEmitter { notImplemented("Http2Session.setLocalWindowSize"); } - setTimeout(_msecs: number, _callback: () => void) { - notImplemented("Http2Session.setTimeout"); + setTimeout(msecs: number, callback?: () => void) { + setStreamTimeout(this, msecs, callback); } get socket(): Socket /*| TlsSocket*/ { - notImplemented("Http2Session.socket"); - return null; + return {}; } get state(): Record { - notImplemented("Http2Session.state"); return {}; } @@ -114,7 +119,7 @@ export class Http2Session extends EventEmitter { } unref() { - notImplemented("Http2Session.unref"); + warnNotImplemented("Http2Session.unref"); } } @@ -136,21 +141,131 @@ export class ServerHttp2Session extends Http2Session { } export class ClientHttp2Session extends Http2Session { - constructor() { + constructor( + _authority: string | URL, + _options: Record, + callback: (session: Http2Session) => void, + ) { super(); + if (callback) { + this.on("connect", callback); + } + nextTick(() => this.emit("connect", this)); } request( - _headers: Record, + headers: Http2Headers, _options?: Record, ): ClientHttp2Stream { - notImplemented("ClientHttp2Session.request"); - return new ClientHttp2Stream(); + const reqHeaders: string[][] = []; + const controllerPromise: Deferred< + ReadableStreamDefaultController + > = deferred(); + const body = new ReadableStream({ + start(controller) { + controllerPromise.resolve(controller); + }, + }); + const request: RequestInit = { headers: reqHeaders, body }; + let authority = null; + let path = null; + for (const [name, value] of Object.entries(headers)) { + if (name == constants.HTTP2_HEADER_PATH) { + path = String(value); + } else if (name == constants.HTTP2_HEADER_METHOD) { + request.method = String(value); + } else if (name == constants.HTTP2_HEADER_AUTHORITY) { + authority = String(value); + } else { + reqHeaders.push([name, String(value)]); + } + } + + const fetchPromise = fetch(`http://${authority}${path}`, request); + const readerPromise = deferred(); + const headersPromise = deferred(); + (async () => { + const fetch = await fetchPromise; + readerPromise.resolve(fetch.body); + + const headers: Http2Headers = {}; + for (const [key, value] of fetch.headers) { + headers[key] = value; + } + headers[constants.HTTP2_HEADER_STATUS] = String(fetch.status); + + headersPromise.resolve(headers); + })(); + return new ClientHttp2Stream( + this, + headersPromise, + controllerPromise, + readerPromise, + ); } } -export class Http2Stream { - constructor() { +export class Http2Stream extends EventEmitter { + #session: Http2Session; + #headers: Deferred; + #controllerPromise: Deferred>; + #readerPromise: Deferred>; + #closed: boolean; + _response: Response; + + constructor( + session: Http2Session, + headers: Promise, + controllerPromise: Promise>, + readerPromise: Promise>, + ) { + super(); + this.#session = session; + this.#headers = headers; + this.#controllerPromise = controllerPromise; + this.#readerPromise = readerPromise; + this.#closed = false; + nextTick(() => { + (async () => { + const headers = await this.#headers; + this.emit("headers", headers); + })(); + (async () => { + const reader = await this.#readerPromise; + if (reader) { + for await (const data of reader) { + this.emit("data", new Buffer(data)); + } + } + this.emit("end"); + })(); + }); + } + + // TODO(mmastrac): Implement duplex + end() { + (async () => { + const controller = await this.#controllerPromise; + controller.close(); + })(); + } + + write(buffer, callback?: () => void) { + (async () => { + const controller = await this.#controllerPromise; + if (typeof buffer === "string") { + controller.enqueue(ENCODER.encode(buffer)); + } else { + controller.enqueue(buffer); + } + callback?.(); + })(); + } + + resume() { + } + + pause() { } get aborted(): boolean { @@ -164,16 +279,15 @@ export class Http2Stream { } close(_code: number, _callback: () => void) { - notImplemented("Http2Stream.close"); + this.#closed = true; + this.emit("close"); } get closed(): boolean { - notImplemented("Http2Stream.closed"); - return false; + return this.#closed; } get destroyed(): boolean { - notImplemented("Http2Stream.destroyed"); return false; } @@ -197,7 +311,7 @@ export class Http2Stream { } get rstCode(): number { - notImplemented("Http2Stream.rstCode"); + // notImplemented("Http2Stream.rstCode"); return 0; } @@ -217,12 +331,11 @@ export class Http2Stream { } get session(): Http2Session { - notImplemented("Http2Stream.session"); - return new Http2Session(); + return this.#session; } - setTimeout(_msecs: number, _callback: () => void) { - notImplemented("Http2Stream.setTimeout"); + setTimeout(msecs: number, callback?: () => void) { + setStreamTimeout(this, msecs, callback); } get state(): Record { @@ -231,28 +344,52 @@ export class Http2Stream { } sendTrailers(_headers: Record) { - notImplemented("Http2Stream.sendTrailers"); + addTrailers(this._response, [["grpc-status", "0"], ["grpc-message", "OK"]]); } } export class ClientHttp2Stream extends Http2Stream { - constructor() { - super(); + constructor( + session: Http2Session, + headers: Promise, + controllerPromise: Deferred>, + readerPromise: Deferred>, + ) { + super(session, headers, controllerPromise, readerPromise); } } export class ServerHttp2Stream extends Http2Stream { - constructor() { - super(); + _promise: Deferred; + #body: ReadableStream; + #waitForTrailers: boolean; + #headersSent: boolean; + + constructor( + session: Http2Session, + headers: Promise, + controllerPromise: Promise>, + reader: ReadableStream, + body: ReadableStream, + ) { + super(session, headers, controllerPromise, Promise.resolve(reader)); + this._promise = new deferred(); + this.#body = body; } additionalHeaders(_headers: Record) { notImplemented("ServerHttp2Stream.additionalHeaders"); } + end(): void { + super.end(); + if (this.#waitForTrailers) { + this.emit("wantTrailers"); + } + } + get headersSent(): boolean { - notImplemented("ServerHttp2Stream.headersSent"); - return false; + return this.#headersSent; } get pushAllowed(): boolean { @@ -269,10 +406,26 @@ export class ServerHttp2Stream extends Http2Stream { } respond( - _headers: Record, - _options: Record, + headers: Http2Headers, + options: Record, ) { - notImplemented("ServerHttp2Stream.respond"); + this.#headersSent = true; + const response: ResponseInit = {}; + if (headers) { + for (const [name, value] of Object.entries(headers)) { + if (name == constants.HTTP2_HEADER_STATUS) { + response.status = Number(value); + } + } + } + if (options?.endStream) { + this._promise.resolve(this._response = new Response("", response)); + } else { + this.#waitForTrailers = options?.waitForTrailers; + this._promise.resolve( + this._response = new Response(this.#body, response), + ); + } } respondWithFD( @@ -292,56 +445,145 @@ export class ServerHttp2Stream extends Http2Stream { } } -export class Http2Server { - constructor() { +export class Http2Server extends Server { + #options: Record = {}; + #abortController; + #server; + timeout = 0; + + constructor( + options: Record, + requestListener: () => unknown, + ) { + super(options); + this.#abortController = new AbortController(); + this.on( + "connection", + (conn: Deno.Conn) => { + try { + const session = new ServerHttp2Session(); + this.emit("session", session); + this.#server = serveHttpOnConnection( + conn, + this.#abortController.signal, + async (req: Request) => { + try { + const controllerPromise: Deferred< + ReadableStreamDefaultController + > = deferred(); + const body = new ReadableStream({ + start(controller) { + controllerPromise.resolve(controller); + }, + }); + const headers: Http2Headers = {}; + for (const [name, value] of req.headers) { + headers[name] = value; + } + headers[constants.HTTP2_HEADER_PATH] = + new URL(req.url).pathname; + const stream = new ServerHttp2Stream( + session, + Promise.resolve(headers), + controllerPromise, + req.body, + body, + ); + session.emit("stream", stream, headers); + this.emit("stream", stream, headers); + return await stream._promise; + } catch (e) { + console.log("Error in serveHttpOnConnection", e); + } + return new Response(""); + }, + () => { + console.log("error"); + }, + () => {}, + ); + } catch (e) { + console.log("Error in Http2Server", e); + } + }, + ); + this.on( + "newListener", + (event) => console.log(`Event in newListener: ${event}`), + ); + this.#options = options; + if (typeof requestListener === "function") { + this.on("request", requestListener); + } } - close(_callback?: () => unknown) { - notImplemented("Http2Server.close"); + // Prevent the TCP server from wrapping this in a socket, since we need it to serve HTTP + _createSocket(clientHandle: TCP) { + return clientHandle[kStreamBaseField]; } - setTimeout(_msecs: number, _callback?: () => unknown) { - notImplemented("Http2Server.setTimeout"); + close(callback?: () => unknown) { + if (callback) { + this.on("close", callback); + } + this.#abortController.abort(); + super.close(); } - get timeout(): number { - notImplemented("Http2Server.timeout"); - return 0; + setTimeout(msecs: number, callback?: () => unknown) { + this.timeout = msecs; + if (callback !== undefined) { + this.on("timeout", callback); + } } - updateSettings(_settings: Record) { - notImplemented("Http2Server.updateSettings"); + updateSettings(settings: Record) { + this.#options.settings = { ...this.#options.settings, ...settings }; } } -export class Http2SecureServer { - constructor() { +export class Http2SecureServer extends Server { + #options: Record = {}; + timeout = 0; + + constructor( + options: Record, + requestListener: () => unknown, + ) { + super(options, function () { + notImplemented("connectionListener"); + }); + this.#options = options; + if (typeof requestListener === "function") { + this.on("request", requestListener); + } } close(_callback?: () => unknown) { notImplemented("Http2SecureServer.close"); } - setTimeout(_msecs: number, _callback?: () => unknown) { - notImplemented("Http2SecureServer.setTimeout"); + setTimeout(msecs: number, callback?: () => unknown) { + this.timeout = msecs; + if (callback !== undefined) { + this.on("timeout", callback); + } } - get timeout(): number { - notImplemented("Http2SecureServer.timeout"); - return 0; - } - - updateSettings(_settings: Record) { - notImplemented("Http2SecureServer.updateSettings"); + updateSettings(settings: Record) { + this.#options.settings = { ...this.#options.settings, ...settings }; } } export function createServer( - _options: Record, - _onRequestHandler: () => unknown, + options: Record, + onRequestHandler: () => unknown, ): Http2Server { - notImplemented("http2.createServer"); - return new Http2Server(); + if (typeof options === "function") { + onRequestHandler = options; + options = {}; + } + return new Http2Server(options, onRequestHandler); } export function createSecureServer( @@ -353,11 +595,11 @@ export function createSecureServer( } export function connect( - _authority: string | URL, - _options: Record, + authority: string | URL, + options: Record, + callback: (session: ClientHttp2Session) => void, ): ClientHttp2Session { - notImplemented("http2.connect"); - return new ClientHttp2Session(); + return new ClientHttp2Session(authority, options, callback); } export const constants = { @@ -681,8 +923,8 @@ export class Http2ServerRequest { return ""; } - setTimeout(_msecs: number, _callback: () => unknown) { - notImplemented("Http2ServerRequest.setTimeout"); + setTimeout(msecs: number, callback?: () => unknown) { + this.stream.setTimeout(callback, msecs); } get socket(): Socket /*| TlsSocket*/ { @@ -781,8 +1023,8 @@ export class Http2ServerResponse { notImplemented("Http2ServerResponse.setHeader"); } - setTimeout(_msecs: number, _callback: () => unknown) { - notImplemented("Http2ServerResponse.setTimeout"); + setTimeout(msecs: number, callback?: () => unknown) { + this.stream.setTimeout(msecs, callback); } get socket(): Socket /*| TlsSocket*/ { diff --git a/ext/node/polyfills/net.ts b/ext/node/polyfills/net.ts index 2c2f5f9448..79845adb2e 100644 --- a/ext/node/polyfills/net.ts +++ b/ext/node/polyfills/net.ts @@ -1834,21 +1834,8 @@ function _onconnection(this: any, err: number, clientHandle?: Handle) { return; } - const socket = new Socket({ - handle: clientHandle, - allowHalfOpen: self.allowHalfOpen, - pauseOnCreate: self.pauseOnConnect, - readable: true, - writable: true, - }); - - // TODO(@bartlomieju): implement noDelay and setKeepAlive - - self._connections++; - socket.server = self; - socket._server = self; - - DTRACE_NET_SERVER_CONNECTION(socket); + const socket = self._createSocket(clientHandle); + this._connections++; self.emit("connection", socket); if (netServerSocketChannel.hasSubscribers) { @@ -2369,6 +2356,23 @@ export class Server extends EventEmitter { return !!this._handle; } + _createSocket(clientHandle) { + const socket = new Socket({ + handle: clientHandle, + allowHalfOpen: this.allowHalfOpen, + pauseOnCreate: this.pauseOnConnect, + readable: true, + writable: true, + }); + + // TODO(@bartlomieju): implement noDelay and setKeepAlive + + socket.server = this; + socket._server = this; + + DTRACE_NET_SERVER_CONNECTION(socket); + } + _listen2 = _setupListenHandle; _emitCloseIfDrained() { From df76a062fa93899a614194a34f7b7c281f9dbac5 Mon Sep 17 00:00:00 2001 From: Matt Mastracci Date: Tue, 6 Jun 2023 07:58:18 -0600 Subject: [PATCH 308/320] perf(ext/websocket): Make send sync for non-stream websockets (#19376) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit No need to go through the async machinery for `send(String | Buffer)` -- we can fire and forget, and then route any send errors into the async call we're already making (`op_ws_next_event`). Early benchmark on MacOS: Before: 155.8k msg/sec After: 166.2k msg/sec (+6.6%) Co-authored-by: Bartek Iwańczuk --- ext/websocket/01_websocket.js | 56 +++++------------- ext/websocket/02_websocketstream.js | 8 +-- ext/websocket/lib.rs | 88 +++++++++++++++++++++++++---- 3 files changed, 93 insertions(+), 59 deletions(-) diff --git a/ext/websocket/01_websocket.js b/ext/websocket/01_websocket.js index 01dd265792..a38af036aa 100644 --- a/ext/websocket/01_websocket.js +++ b/ext/websocket/01_websocket.js @@ -23,12 +23,10 @@ const primordials = globalThis.__bootstrap.primordials; const { ArrayBufferPrototype, ArrayBufferIsView, - ArrayBufferPrototypeGetByteLength, ArrayPrototypeJoin, ArrayPrototypeMap, ArrayPrototypeSome, DataView, - DataViewPrototypeGetByteLength, ErrorPrototypeToString, ObjectDefineProperties, ObjectPrototypeIsPrototypeOf, @@ -46,7 +44,6 @@ const { PromisePrototypeCatch, SymbolFor, TypedArrayPrototypeGetByteLength, - TypedArrayPrototypeGetSymbolToStringTag, } = primordials; const op_ws_check_permission_and_cancel_handle = core.ops.op_ws_check_permission_and_cancel_handle; @@ -57,6 +54,7 @@ const { op_ws_send_text, op_ws_next_event, op_ws_send_ping, + op_ws_get_buffered_amount, } = core.ensureFastOps(); webidl.converters["sequence or DOMString"] = ( @@ -111,7 +109,6 @@ const _role = Symbol("[[role]]"); const _extensions = Symbol("[[extensions]]"); const _protocol = Symbol("[[protocol]]"); const _binaryType = Symbol("[[binaryType]]"); -const _bufferedAmount = Symbol("[[bufferedAmount]]"); const _eventLoop = Symbol("[[eventLoop]]"); const _server = Symbol("[[server]]"); @@ -179,10 +176,13 @@ class WebSocket extends EventTarget { } } - [_bufferedAmount] = 0; get bufferedAmount() { webidl.assertBranded(this, WebSocketPrototype); - return this[_bufferedAmount]; + if (this[_readyState] === OPEN) { + return op_ws_get_buffered_amount(this[_rid]); + } else { + return 0; + } } constructor(url, protocols = []) { @@ -318,55 +318,25 @@ class WebSocket extends EventTarget { throw new DOMException("readyState not OPEN", "InvalidStateError"); } - /** - * @param {ArrayBufferView} view - * @param {number} byteLength - */ - const sendTypedArray = (view, byteLength) => { - this[_bufferedAmount] += byteLength; - PromisePrototypeThen( - op_ws_send_binary( - this[_rid], - view, - ), - () => { - this[_bufferedAmount] -= byteLength; - }, - ); - }; - if (ObjectPrototypeIsPrototypeOf(BlobPrototype, data)) { PromisePrototypeThen( // deno-lint-ignore prefer-primordials data.slice().arrayBuffer(), (ab) => - sendTypedArray( + op_ws_send_binary( + this[_rid], new DataView(ab), - ArrayBufferPrototypeGetByteLength(ab), ), ); } else if (ArrayBufferIsView(data)) { - if (TypedArrayPrototypeGetSymbolToStringTag(data) === undefined) { - // DataView - sendTypedArray(data, DataViewPrototypeGetByteLength(data)); - } else { - // TypedArray - sendTypedArray(data, TypedArrayPrototypeGetByteLength(data)); - } + op_ws_send_binary(this[_rid], data); } else if (ObjectPrototypeIsPrototypeOf(ArrayBufferPrototype, data)) { - sendTypedArray(data, ArrayBufferPrototypeGetByteLength(data)); + op_ws_send_binary(this[_rid], data); } else { const string = String(data); - const d = core.encode(string); - this[_bufferedAmount] += TypedArrayPrototypeGetByteLength(d); - PromisePrototypeThen( - op_ws_send_text( - this[_rid], - string, - ), - () => { - this[_bufferedAmount] -= TypedArrayPrototypeGetByteLength(d); - }, + op_ws_send_text( + this[_rid], + string, ); } } diff --git a/ext/websocket/02_websocketstream.js b/ext/websocket/02_websocketstream.js index 00d5bdaecf..be1001eb60 100644 --- a/ext/websocket/02_websocketstream.js +++ b/ext/websocket/02_websocketstream.js @@ -34,8 +34,8 @@ const { Uint8ArrayPrototype, } = primordials; const { - op_ws_send_text, - op_ws_send_binary, + op_ws_send_text_async, + op_ws_send_binary_async, op_ws_next_event, op_ws_create, op_ws_close, @@ -210,11 +210,11 @@ class WebSocketStream { const writable = new WritableStream({ write: async (chunk) => { if (typeof chunk === "string") { - await op_ws_send_text(this[_rid], chunk); + await op_ws_send_text_async(this[_rid], chunk); } else if ( ObjectPrototypeIsPrototypeOf(Uint8ArrayPrototype, chunk) ) { - await op_ws_send_binary(this[_rid], chunk); + await op_ws_send_binary_async(this[_rid], chunk); } else { throw new TypeError( "A chunk may only be either a string or an Uint8Array", diff --git a/ext/websocket/lib.rs b/ext/websocket/lib.rs index f2101b413f..af987c1e4b 100644 --- a/ext/websocket/lib.rs +++ b/ext/websocket/lib.rs @@ -281,8 +281,10 @@ where } let resource = ServerWebSocket { + buffered: Cell::new(0), + errored: Cell::new(None), ws: AsyncRefCell::new(FragmentCollector::new(stream)), - closed: Rc::new(Cell::new(false)), + closed: Cell::new(false), tx_lock: AsyncRefCell::new(()), }; let mut state = state.borrow_mut(); @@ -315,18 +317,20 @@ pub enum MessageKind { } pub struct ServerWebSocket { + buffered: Cell, + errored: Cell>, ws: AsyncRefCell>, - closed: Rc>, + closed: Cell, tx_lock: AsyncRefCell<()>, } impl ServerWebSocket { #[inline] pub async fn write_frame( - self: Rc, + self: &Rc, frame: Frame, ) -> Result<(), AnyError> { - let _lock = RcRef::map(&self, |r| &r.tx_lock).borrow_mut().await; + let _lock = RcRef::map(self, |r| &r.tx_lock).borrow_mut().await; // SAFETY: fastwebsockets only needs a mutable reference to the WebSocket // to populate the write buffer. We encounter an await point when writing // to the socket after the frame has already been written to the buffer. @@ -361,8 +365,10 @@ pub fn ws_create_server_stream( ws.set_auto_pong(true); let ws_resource = ServerWebSocket { + buffered: Cell::new(0), + errored: Cell::new(None), ws: AsyncRefCell::new(FragmentCollector::new(ws)), - closed: Rc::new(Cell::new(false)), + closed: Cell::new(false), tx_lock: AsyncRefCell::new(()), }; @@ -370,8 +376,48 @@ pub fn ws_create_server_stream( Ok(rid) } -#[op] -pub async fn op_ws_send_binary( +#[op(fast)] +pub fn op_ws_send_binary( + state: &mut OpState, + rid: ResourceId, + data: ZeroCopyBuf, +) { + let resource = state.resource_table.get::(rid).unwrap(); + let data = data.to_vec(); + let len = data.len(); + resource.buffered.set(resource.buffered.get() + len); + deno_core::task::spawn(async move { + if let Err(err) = resource + .write_frame(Frame::new(true, OpCode::Binary, None, data)) + .await + { + resource.errored.set(Some(err)); + } else { + resource.buffered.set(resource.buffered.get() - len); + } + }); +} + +#[op(fast)] +pub fn op_ws_send_text(state: &mut OpState, rid: ResourceId, data: String) { + let resource = state.resource_table.get::(rid).unwrap(); + let len = data.len(); + resource.buffered.set(resource.buffered.get() + len); + deno_core::task::spawn(async move { + if let Err(err) = resource + .write_frame(Frame::new(true, OpCode::Text, None, data.into_bytes())) + .await + { + resource.errored.set(Some(err)); + } else { + resource.buffered.set(resource.buffered.get() - len); + } + }); +} + +/// Async version of send. Does not update buffered amount as we rely on the socket itself for backpressure. +#[op(fast)] +pub async fn op_ws_send_binary_async( state: Rc>, rid: ResourceId, data: ZeroCopyBuf, @@ -380,13 +426,15 @@ pub async fn op_ws_send_binary( .borrow_mut() .resource_table .get::(rid)?; + let data = data.to_vec(); resource - .write_frame(Frame::new(true, OpCode::Binary, None, data.to_vec())) + .write_frame(Frame::new(true, OpCode::Binary, None, data)) .await } -#[op] -pub async fn op_ws_send_text( +/// Async version of send. Does not update buffered amount as we rely on the socket itself for backpressure. +#[op(fast)] +pub async fn op_ws_send_text_async( state: Rc>, rid: ResourceId, data: String, @@ -400,6 +448,16 @@ pub async fn op_ws_send_text( .await } +#[op(fast)] +pub fn op_ws_get_buffered_amount(state: &mut OpState, rid: ResourceId) -> u32 { + state + .resource_table + .get::(rid) + .unwrap() + .buffered + .get() as u32 +} + #[op] pub async fn op_ws_send_pong( state: Rc>, @@ -441,8 +499,7 @@ pub async fn op_ws_close( .map(|reason| Frame::close(code.unwrap_or(1005), reason.as_bytes())) .unwrap_or_else(|| Frame::close_raw(vec![])); - let cell = Rc::clone(&resource.closed); - cell.set(true); + resource.closed.set(true); resource.write_frame(frame).await?; Ok(()) } @@ -457,6 +514,10 @@ pub async fn op_ws_next_event( .resource_table .get::(rid)?; + if let Some(err) = resource.errored.take() { + return Err(err); + } + let mut ws = RcRef::map(&resource, |r| &r.ws).borrow_mut().await; loop { let val = match ws.read_frame().await { @@ -519,8 +580,11 @@ deno_core::extension!(deno_websocket, op_ws_next_event, op_ws_send_binary, op_ws_send_text, + op_ws_send_binary_async, + op_ws_send_text_async, op_ws_send_ping, op_ws_send_pong, + op_ws_get_buffered_amount, ], esm = [ "01_websocket.js", "02_websocketstream.js" ], options = { From c76f9a022710be4abf2fa35f50716ff6df71cf17 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Tue, 6 Jun 2023 16:02:16 +0200 Subject: [PATCH 309/320] refactor(serde_v8): don't access backing store twice (#19382) I did that change recently, did spot that we're calling `get_backing_store()` in succession and that API call is not cheap. --- serde_v8/magic/v8slice.rs | 27 ++++++++------------------- 1 file changed, 8 insertions(+), 19 deletions(-) diff --git a/serde_v8/magic/v8slice.rs b/serde_v8/magic/v8slice.rs index b1dd897703..2b103f1c96 100644 --- a/serde_v8/magic/v8slice.rs +++ b/serde_v8/magic/v8slice.rs @@ -31,20 +31,6 @@ pub struct V8Slice { unsafe impl Send for V8Slice {} impl V8Slice { - pub fn from_buffer( - buffer: v8::Local, - range: Range, - ) -> Result { - let store = buffer.get_backing_store(); - if store.is_shared() { - return Err(v8::DataError::BadType { - actual: "shared ArrayBufferView", - expected: "non-shared ArrayBufferView", - }); - } - Ok(Self { store, range }) - } - fn as_slice(&self) -> &[u8] { // SAFETY: v8::SharedRef is similar to Arc<[u8]>, // it points to a fixed continuous slice of bytes on the heap. @@ -92,12 +78,15 @@ impl FromV8 for V8Slice { value: v8::Local, ) -> Result { match to_ranged_buffer(scope, value) { - Ok((b, r)) => { - if b.get_backing_store().is_resizable_by_user_javascript() { - return Err(crate::Error::ResizableBackingStoreNotSupported); + Ok((b, range)) => { + let store = b.get_backing_store(); + if store.is_resizable_by_user_javascript() { + Err(crate::Error::ResizableBackingStoreNotSupported) + } else if store.is_shared() { + Err(crate::Error::ExpectedBuffer(value_to_type_str(value))) + } else { + Ok(V8Slice { store, range }) } - Self::from_buffer(b, r) - .map_err(|_| crate::Error::ExpectedBuffer(value_to_type_str(value))) } Err(_) => Err(crate::Error::ExpectedBuffer(value_to_type_str(value))), } From 5aca8b9e5d6420c65ab3ecf516e9d8c8eaaee28f Mon Sep 17 00:00:00 2001 From: Leo Kettmeir Date: Tue, 6 Jun 2023 16:37:10 +0200 Subject: [PATCH 310/320] fix(node/http): use fake socket and proper url handling (#19340) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes https://github.com/denoland/deno/issues/19349 --------- Co-authored-by: Bartek Iwańczuk --- cli/tests/unit_node/http_test.ts | 29 +++++++++++++++-- ext/node/polyfills/http.ts | 56 ++++++++++---------------------- test_util/src/lib.rs | 5 +++ 3 files changed, 49 insertions(+), 41 deletions(-) diff --git a/cli/tests/unit_node/http_test.ts b/cli/tests/unit_node/http_test.ts index 05731f543d..6b02282743 100644 --- a/cli/tests/unit_node/http_test.ts +++ b/cli/tests/unit_node/http_test.ts @@ -195,11 +195,14 @@ Deno.test("[node/http] request default protocol", async () => { // @ts-ignore IncomingMessageForClient // deno-lint-ignore no-explicit-any let clientRes: any; + // deno-lint-ignore no-explicit-any + let clientReq: any; server.listen(() => { - const req = http.request( + clientReq = http.request( // deno-lint-ignore no-explicit-any { host: "localhost", port: (server.address() as any).port }, (res) => { + assert(res.socket instanceof EventEmitter); assertEquals(res.complete, false); res.on("data", () => {}); res.on("end", () => { @@ -210,13 +213,14 @@ Deno.test("[node/http] request default protocol", async () => { promise2.resolve(); }, ); - req.end(); + clientReq.end(); }); server.on("close", () => { promise.resolve(); }); await promise; await promise2; + assert(clientReq.socket instanceof EventEmitter); assertEquals(clientRes!.complete, true); }); @@ -596,3 +600,24 @@ Deno.test("[node/http] ClientRequest PUT", async () => { await def; assertEquals(body, "hello world"); }); + +Deno.test("[node/http] ClientRequest search params", async () => { + let body = ""; + const def = deferred(); + const req = http.request({ + host: "localhost:4545", + path: "search_params?foo=bar", + }, (resp) => { + resp.on("data", (chunk) => { + body += chunk; + }); + + resp.on("end", () => { + def.resolve(); + }); + }); + req.once("error", (e) => def.reject(e)); + req.end(); + await def; + assertEquals(body, "foo=bar"); +}); diff --git a/ext/node/polyfills/http.ts b/ext/node/polyfills/http.ts index 2429206dd8..250d34e7cb 100644 --- a/ext/node/polyfills/http.ts +++ b/ext/node/polyfills/http.ts @@ -267,6 +267,9 @@ const kError = Symbol("kError"); const kUniqueHeaders = Symbol("kUniqueHeaders"); +class FakeSocket extends EventEmitter { +} + /** ClientRequest represents the http(s) request from the client */ class ClientRequest extends OutgoingMessage { defaultProtocol = "http:"; @@ -541,6 +544,7 @@ class ClientRequest extends OutgoingMessage { this.onSocket(createConnection(optsWithoutSignal)); } }*/ + this.onSocket(new FakeSocket()); const url = this._createUrlStrFromOptions(); @@ -570,41 +574,12 @@ class ClientRequest extends OutgoingMessage { return undefined; } - onSocket(socket, err) { - if (this.destroyed || err) { - this.destroyed = true; - - // deno-lint-ignore no-inner-declarations - function _destroy(req, err) { - if (!req.aborted && !err) { - err = connResetException("socket hang up"); - } - if (err) { - req.emit("error", err); - } - req._closed = true; - req.emit("close"); - } - - if (socket) { - if (!err && this.agent && !socket.destroyed) { - socket.emit("free"); - } else { - finished(socket.destroy(err || this[kError]), (er) => { - if (er?.code === "ERR_STREAM_PREMATURE_CLOSE") { - er = null; - } - _destroy(this, er || err); - }); - return; - } - } - - _destroy(this, err || this[kError]); - } else { - //tickOnSocket(this, socket); - //this._flush(); - } + // TODO(bartlomieju): handle error + onSocket(socket, _err) { + nextTick(() => { + this.socket = socket; + this.emit("socket", socket); + }); } // deno-lint-ignore no-explicit-any @@ -737,16 +712,19 @@ class ClientRequest extends OutgoingMessage { const auth = this.auth; const host = this.host ?? this.hostname ?? "localhost"; const hash = this.hash ? `#${this.hash}` : ""; - const search = this.search ? this.search : ""; const defaultPort = this.agent?.defaultPort; const port = this.port ?? defaultPort ?? 80; let path = this.path ?? "/"; if (!path.startsWith("/")) { path = "/" + path; } - return `${protocol}//${auth ? `${auth}@` : ""}${host}${ - port === 80 ? "" : `:${port}` - }${path}${search}${hash}`; + const url = new URL( + `${protocol}//${auth ? `${auth}@` : ""}${host}${ + port === 80 ? "" : `:${port}` + }${path}`, + ); + url.hash = hash; + return url.href; } setTimeout(msecs: number, callback?: () => void) { diff --git a/test_util/src/lib.rs b/test_util/src/lib.rs index 4b64905873..f88092ad9e 100644 --- a/test_util/src/lib.rs +++ b/test_util/src/lib.rs @@ -1085,6 +1085,11 @@ async fn main_server( )); Ok(res) } + (_, "/search_params") => { + let query = req.uri().query().map(|s| s.to_string()); + let res = Response::new(Body::from(query.unwrap_or_default())); + Ok(res) + } _ => { let mut file_path = testdata_path(); file_path.push(&req.uri().path()[1..]); From 1c3d2132c28f4d47aeebeb5b863bbbda05db7147 Mon Sep 17 00:00:00 2001 From: Marvin Hagemeister Date: Tue, 6 Jun 2023 16:55:37 +0200 Subject: [PATCH 311/320] perf(http): avoid flattening http headers (#19384) --- ext/http/00_serve.js | 3 +-- ext/http/http_next.rs | 33 +++++++++++++++++++++++++-------- 2 files changed, 26 insertions(+), 10 deletions(-) diff --git a/ext/http/00_serve.js b/ext/http/00_serve.js index c5a5c0e189..7c9b290695 100644 --- a/ext/http/00_serve.js +++ b/ext/http/00_serve.js @@ -37,7 +37,6 @@ import { import { listen, TcpConn } from "ext:deno_net/01_net.js"; import { listenTls } from "ext:deno_net/02_tls.js"; const { - ArrayPrototypeFlat, ArrayPrototypePush, ObjectPrototypeIsPrototypeOf, PromisePrototypeCatch, @@ -559,7 +558,7 @@ function mapToCallback(context, callback, onError) { if (headers.length == 1) { op_http_set_response_header(req, headers[0][0], headers[0][1]); } else { - op_http_set_response_headers(req, ArrayPrototypeFlat(headers)); + op_http_set_response_headers(req, headers); } } diff --git a/ext/http/http_next.rs b/ext/http/http_next.rs index 14b5457e5d..900a956f49 100644 --- a/ext/http/http_next.rs +++ b/ext/http/http_next.rs @@ -21,6 +21,7 @@ use deno_core::error::AnyError; use deno_core::futures::TryFutureExt; use deno_core::op; use deno_core::serde_v8; +use deno_core::serde_v8::from_v8; use deno_core::task::spawn; use deno_core::task::JoinHandle; use deno_core::v8; @@ -384,17 +385,33 @@ pub fn op_http_set_response_header(slab_id: SlabId, name: &str, value: &str) { resp_headers.append(name, value); } -#[op] -pub fn op_http_set_response_headers(slab_id: SlabId, headers: Vec) { +#[op(v8)] +fn op_http_set_response_headers( + scope: &mut v8::HandleScope, + slab_id: SlabId, + headers: serde_v8::Value, +) { let mut http = slab_get(slab_id); // TODO(mmastrac): Invalid headers should be handled? let resp_headers = http.response().headers_mut(); - resp_headers.reserve(headers.len()); - for header in headers.chunks_exact(2) { - // These are valid latin-1 strings - let name = HeaderName::from_bytes(&header[0]).unwrap(); - let value = HeaderValue::from_bytes(&header[1]).unwrap(); - resp_headers.append(name, value); + + let arr = v8::Local::::try_from(headers.v8_value).unwrap(); + + let len = arr.length(); + let header_len = len * 2; + resp_headers.reserve(header_len.try_into().unwrap()); + + for i in 0..len { + let item = arr.get_index(scope, i).unwrap(); + let pair = v8::Local::::try_from(item).unwrap(); + let name = pair.get_index(scope, 0).unwrap(); + let value = pair.get_index(scope, 1).unwrap(); + + let v8_name: ByteString = from_v8(scope, name).unwrap(); + let v8_value: ByteString = from_v8(scope, value).unwrap(); + let header_name = HeaderName::from_bytes(&v8_name).unwrap(); + let header_value = HeaderValue::from_bytes(&v8_value).unwrap(); + resp_headers.append(header_name, header_value); } } From 40d77c56055cca89437b36bb5763a820ef931539 Mon Sep 17 00:00:00 2001 From: Matt Mastracci Date: Tue, 6 Jun 2023 12:53:41 -0600 Subject: [PATCH 312/320] chore(core): build_bench tool (#19387) This is a quick tool that I've been using to build benchmarking builds for Deno. Usage: Build a benchmark `HEAD~1` and `origin/main` executable: ```sh deno run tools/build_bench.ts HEAD~1 origin/main ``` Build debug benchmark executables of the last three commits: ```sh deno run tools/build_bench.ts --profile debug HEAD HEAD~1 HEAD~2 ``` --- tools/build_bench.ts | 136 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 136 insertions(+) create mode 100755 tools/build_bench.ts diff --git a/tools/build_bench.ts b/tools/build_bench.ts new file mode 100755 index 0000000000..dbbe029677 --- /dev/null +++ b/tools/build_bench.ts @@ -0,0 +1,136 @@ +#!/usr/bin/env -S deno run --unstable --allow-env --allow-read --allow-write --allow-run +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +import $ from "https://deno.land/x/dax@0.32.0/mod.ts"; + +if (Deno.args.length === 0) { + $.log( + "Usage: build_bench [-v] [--profile release|debug] commit1 [commit2 [comment3...]]", + ); + Deno.exit(1); +} + +const args = Deno.args.slice(); +let verbose = false; +if (args[0] == "-v") { + args.shift(); + verbose = true; +} + +let profile = "release"; +if (args[0] == "--profile") { + args.shift(); + profile = args.shift(); +} + +function exit(msg: string) { + $.logError(msg); + Deno.exit(1); +} + +// Make sure the .git dir exists +const gitDir = Deno.cwd() + "/.git"; +await Deno.stat(gitDir); + +async function runCommand(human: string, cmd) { + if (verbose) { + const out = await cmd.noThrow(); + if (out.code != 0) { + exit(human); + } + } else { + const out = await cmd.stdout("piped").stderr("piped").noThrow(); + if (out.code != 0) { + $.logLight("stdout"); + $.logGroup(); + $.log(out.stdout); + $.logGroupEnd(); + $.logLight("stderr"); + $.logGroup(); + $.log(out.stderr); + $.logGroupEnd(); + exit(human); + } + } +} + +async function buildGitCommit(progress, commit) { + const tempDir = $.path(await Deno.makeTempDir()); + + const gitInfo = + await $`git log --pretty=oneline --abbrev-commit -n1 ${commit}`.stdout( + "piped", + ).stderr("piped").noThrow(); + if (gitInfo.code != 0) { + $.log(gitInfo.stdout); + $.log(gitInfo.stderr); + exit(`Failed to get git info for commit ${commit}`); + } + + const hash = gitInfo.stdout.split(" ")[0]; + progress.message(`${commit} is ${hash}`); + + progress.message(`clone ${hash}`); + await runCommand( + `Failed to clone commit ${commit}`, + $`git clone ${gitDir} ${tempDir}`, + ); + + progress.message(`reset ${hash}`); + await runCommand( + `Failed to reset commit ${commit}`, + $`git reset --hard ${hash}`.cwd(tempDir), + ); + + progress.message(`build ${hash} (please wait)`); + const now = Date.now(); + const interval = setInterval(() => { + const elapsed = Math.round((Date.now() - now) / 1000); + progress.message(`build ${hash} (${elapsed}s)`); + }, 100); + try { + if (profile === "debug") { + await runCommand( + `Failed to build commit ${commit}`, + $`cargo build`.cwd(tempDir), + ); + } else { + await runCommand( + `Failed to build commit ${commit}`, + $`cargo build --profile ${profile}`.cwd(tempDir), + ); + } + } finally { + clearInterval(interval); + } + const elapsed = Math.round((Date.now() - now) / 1000); + + let file; + if (profile === "release") { + file = `deno-${hash}`; + } else { + file = `deno-${profile}-${hash}`; + } + progress.message(`copy ${hash}`); + await tempDir.join("target").join(profile).join("deno").copyFile(file); + + progress.message(`cleanup ${hash}`); + await tempDir.remove({ recursive: true }); + + progress.message("done"); + $.log(`Built ./${file} (${commit}) in ${elapsed}s: ${gitInfo.stdout}`); +} + +const promises = []; +for (const arg of args) { + if (verbose) { + promises.push(buildGitCommit({ message() {} }, arg)); + } else { + const progress = $.progress(`${arg}`); + promises.push(progress.with(async () => { + await buildGitCommit(progress, arg); + })); + } +} + +await Promise.all(promises); From 455b0eb8bb8445f80d9c80a9161f18c1dede5733 Mon Sep 17 00:00:00 2001 From: sigmaSd Date: Tue, 6 Jun 2023 22:06:30 +0100 Subject: [PATCH 313/320] fix(repl): correctly print string exception (#19391) Fixes a recent regression where `throw "hello"` in the repl prints `Uncaught undefined` instead of `throw "hello"` --- cli/tests/integration/repl_tests.rs | 4 ++++ cli/tools/repl/session.rs | 12 +++++++++--- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/cli/tests/integration/repl_tests.rs b/cli/tests/integration/repl_tests.rs index e6fc7aa911..77a534d2e8 100644 --- a/cli/tests/integration/repl_tests.rs +++ b/cli/tests/integration/repl_tests.rs @@ -814,6 +814,10 @@ fn repl_reject() { console.expect(" at "); console.write_line("console.log(2);"); console.expect("2"); + console.write_line(r#"throw "hello";"#); + console.expect(r#"Uncaught "hello""#); + console.write_line(r#"throw `hello ${"world"}`;"#); + console.expect(r#"Uncaught "hello world""#); }); } diff --git a/cli/tools/repl/session.rs b/cli/tools/repl/session.rs index 40cf7d3b07..4a30c93c44 100644 --- a/cli/tools/repl/session.rs +++ b/cli/tools/repl/session.rs @@ -258,9 +258,15 @@ impl ReplSession { Ok(if let Some(exception_details) = exception_details { session.set_last_thrown_error(&result).await?; let description = match exception_details.exception { - Some(exception) => exception - .description - .unwrap_or_else(|| "undefined".to_string()), + Some(exception) => { + if let Some(description) = exception.description { + description + } else if let Some(value) = exception.value { + value.to_string() + } else { + "undefined".to_string() + } + } None => "Unknown exception".to_string(), }; EvaluationOutput::Error(format!( From 2aba4365ae620a8f097800e7cf85ff86f566b69a Mon Sep 17 00:00:00 2001 From: David Sherret Date: Tue, 6 Jun 2023 17:07:46 -0400 Subject: [PATCH 314/320] perf(cli): conditionally load typescript declaration files (#19392) Closes #18583 --- cli/factory.rs | 16 ++++++++- cli/graph_util.rs | 36 +++++++++++++++---- cli/lsp/language_server.rs | 3 +- cli/tests/integration/bench_tests.rs | 16 +++++++++ cli/tests/integration/cache_tests.rs | 10 ++++++ cli/tests/integration/run_tests.rs | 20 ++++++++--- cli/tests/integration/test_tests.rs | 16 +++++++++ .../run/type_directives_js_main.js.out | 3 -- cli/tools/bench.rs | 34 +++++++----------- cli/tools/compile.rs | 14 +++++++- cli/tools/doc.rs | 5 +-- cli/tools/info.rs | 3 +- cli/tools/test.rs | 34 +++++++----------- cli/tools/vendor/mod.rs | 5 ++- cli/tools/vendor/test.rs | 3 +- cli/tsc/mod.rs | 5 +-- 16 files changed, 156 insertions(+), 67 deletions(-) delete mode 100644 cli/tests/testdata/run/type_directives_js_main.js.out diff --git a/cli/factory.rs b/cli/factory.rs index 17d141be14..78aefe7804 100644 --- a/cli/factory.rs +++ b/cli/factory.rs @@ -8,6 +8,7 @@ use crate::args::Lockfile; use crate::args::PackageJsonDepsProvider; use crate::args::StorageKeyResolver; use crate::args::TsConfigType; +use crate::args::TypeCheckMode; use crate::cache::Caches; use crate::cache::DenoDir; use crate::cache::DenoDirProvider; @@ -47,6 +48,7 @@ use crate::worker::HasNodeSpecifierChecker; use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; +use deno_graph::GraphKind; use deno_runtime::deno_fs; use deno_runtime::deno_node::analyze::NodeCodeTranslator; use deno_runtime::deno_node::NodeResolver; @@ -537,7 +539,19 @@ impl CliFactory { } pub fn graph_container(&self) -> &Arc { - self.services.graph_container.get_or_init(Default::default) + self.services.graph_container.get_or_init(|| { + let graph_kind = match self.options.sub_command() { + DenoSubcommand::Cache(_) => GraphKind::All, + _ => { + if self.options.type_check_mode() == TypeCheckMode::None { + GraphKind::CodeOnly + } else { + GraphKind::All + } + } + }; + Arc::new(ModuleGraphContainer::new(graph_kind)) + }) } pub fn maybe_inspector_server(&self) -> &Option> { diff --git a/cli/graph_util.rs b/cli/graph_util.rs index 976c2aeca5..55052b9d0b 100644 --- a/cli/graph_util.rs +++ b/cli/graph_util.rs @@ -23,6 +23,7 @@ use deno_core::ModuleSpecifier; use deno_core::TaskQueue; use deno_core::TaskQueuePermit; use deno_graph::source::Loader; +use deno_graph::GraphKind; use deno_graph::Module; use deno_graph::ModuleError; use deno_graph::ModuleGraph; @@ -200,6 +201,7 @@ impl ModuleGraphBuilder { pub async fn create_graph_with_loader( &self, + graph_kind: GraphKind, roots: Vec, loader: &mut dyn Loader, ) -> Result { @@ -210,7 +212,7 @@ impl ModuleGraphBuilder { let graph_npm_resolver = cli_resolver.as_graph_npm_resolver(); let analyzer = self.parsed_source_cache.as_analyzer(); - let mut graph = ModuleGraph::default(); + let mut graph = ModuleGraph::new(graph_kind); self .build_graph_with_npm_resolution( &mut graph, @@ -249,7 +251,13 @@ impl ModuleGraphBuilder { let graph_resolver = cli_resolver.as_graph_resolver(); let graph_npm_resolver = cli_resolver.as_graph_npm_resolver(); let analyzer = self.parsed_source_cache.as_analyzer(); - let mut graph = ModuleGraph::default(); + let should_type_check = + self.options.type_check_mode() != TypeCheckMode::None; + let graph_kind = match should_type_check { + true => GraphKind::All, + false => GraphKind::CodeOnly, + }; + let mut graph = ModuleGraph::new(graph_kind); self .build_graph_with_npm_resolution( &mut graph, @@ -272,7 +280,7 @@ impl ModuleGraphBuilder { graph_lock_or_exit(&graph, &mut lockfile.lock()); } - if self.options.type_check_mode() != TypeCheckMode::None { + if should_type_check { self .type_checker .check( @@ -338,10 +346,13 @@ impl ModuleGraphBuilder { pub async fn create_graph( &self, + graph_kind: GraphKind, roots: Vec, ) -> Result { let mut cache = self.create_graph_loader(); - self.create_graph_with_loader(roots, &mut cache).await + self + .create_graph_with_loader(graph_kind, roots, &mut cache) + .await } } @@ -404,15 +415,15 @@ fn get_resolution_error_bare_specifier( } } -#[derive(Default, Debug)] +#[derive(Debug)] struct GraphData { graph: Arc, checked_libs: HashMap>, } /// Holds the `ModuleGraph` and what parts of it are type checked. -#[derive(Default)] pub struct ModuleGraphContainer { + graph_kind: GraphKind, // Allow only one request to update the graph data at a time, // but allow other requests to read from it at any time even // while another request is updating the data. @@ -421,8 +432,19 @@ pub struct ModuleGraphContainer { } impl ModuleGraphContainer { + pub fn new(graph_kind: GraphKind) -> Self { + Self { + graph_kind, + update_queue: Default::default(), + graph_data: Arc::new(RwLock::new(GraphData { + graph: Arc::new(ModuleGraph::new(graph_kind)), + checked_libs: Default::default(), + })), + } + } + pub fn clear(&self) { - self.graph_data.write().graph = Default::default(); + self.graph_data.write().graph = Arc::new(ModuleGraph::new(self.graph_kind)); } /// Acquires a permit to modify the module graph without other code diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index 7c4191c82d..66ad043ce9 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -11,6 +11,7 @@ use deno_core::serde_json::json; use deno_core::serde_json::Value; use deno_core::task::spawn; use deno_core::ModuleSpecifier; +use deno_graph::GraphKind; use deno_lockfile::Lockfile; use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot; use deno_npm::NpmSystemInfo; @@ -273,7 +274,7 @@ impl LanguageServer { open_docs: &open_docs, }; let graph = module_graph_builder - .create_graph_with_loader(roots.clone(), &mut loader) + .create_graph_with_loader(GraphKind::All, roots.clone(), &mut loader) .await?; graph_util::graph_valid( &graph, diff --git a/cli/tests/integration/bench_tests.rs b/cli/tests/integration/bench_tests.rs index 5b7361b304..0fc2680765 100644 --- a/cli/tests/integration/bench_tests.rs +++ b/cli/tests/integration/bench_tests.rs @@ -3,6 +3,7 @@ use deno_core::url::Url; use test_util as util; use util::assert_contains; +use util::assert_not_contains; use util::env_vars_for_npm_tests; use util::TestContext; @@ -250,3 +251,18 @@ itest!(bench_no_lock { cwd: Some("lockfile/basic"), output: "lockfile/basic/bench.nolock.out", }); + +#[test] +fn conditionally_loads_type_graph() { + let context = TestContext::default(); + let output = context + .new_command() + .args("bench --reload -L debug run/type_directives_js_main.js") + .run(); + output.assert_matches_text("[WILDCARD] - FileFetcher::fetch() - specifier: file:///[WILDCARD]/subdir/type_reference.d.ts[WILDCARD]"); + let output = context + .new_command() + .args("bench --reload -L debug --no-check run/type_directives_js_main.js") + .run(); + assert_not_contains!(output.combined_output(), "type_reference.d.ts"); +} diff --git a/cli/tests/integration/cache_tests.rs b/cli/tests/integration/cache_tests.rs index 7975cbf193..e8449ca05e 100644 --- a/cli/tests/integration/cache_tests.rs +++ b/cli/tests/integration/cache_tests.rs @@ -1,6 +1,7 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. use test_util::env_vars_for_npm_tests; +use test_util::TestContext; use test_util::TestContextBuilder; itest!(_036_import_map_fetch { @@ -181,3 +182,12 @@ fn cache_put_overwrite() { output.assert_matches_text("res1\n"); output.assert_exit_code(0); } + +#[test] +fn loads_type_graph() { + let output = TestContext::default() + .new_command() + .args("cache --reload -L debug run/type_directives_js_main.js") + .run(); + output.assert_matches_text("[WILDCARD] - FileFetcher::fetch() - specifier: file:///[WILDCARD]/subdir/type_reference.d.ts[WILDCARD]"); +} diff --git a/cli/tests/integration/run_tests.rs b/cli/tests/integration/run_tests.rs index 31b541e1c5..2accd54445 100644 --- a/cli/tests/integration/run_tests.rs +++ b/cli/tests/integration/run_tests.rs @@ -12,6 +12,7 @@ use test_util::TempDir; use trust_dns_client::serialize::txt::Lexer; use trust_dns_client::serialize::txt::Parser; use util::assert_contains; +use util::assert_not_contains; use util::env_vars_for_npm_tests_no_sync_download; use util::TestContext; use util::TestContextBuilder; @@ -1277,11 +1278,20 @@ itest!(type_directives_02 { output: "run/type_directives_02.ts.out", }); -itest!(type_directives_js_main { - args: "run --reload -L debug run/type_directives_js_main.js", - output: "run/type_directives_js_main.js.out", - exit_code: 0, -}); +#[test] +fn type_directives_js_main() { + let context = TestContext::default(); + let output = context + .new_command() + .args("run --reload -L debug --check run/type_directives_js_main.js") + .run(); + output.assert_matches_text("[WILDCARD] - FileFetcher::fetch() - specifier: file:///[WILDCARD]/subdir/type_reference.d.ts[WILDCARD]"); + let output = context + .new_command() + .args("run --reload -L debug run/type_directives_js_main.js") + .run(); + assert_not_contains!(output.combined_output(), "type_reference.d.ts"); +} itest!(type_directives_redirect { args: "run --reload --check run/type_directives_redirect.ts", diff --git a/cli/tests/integration/test_tests.rs b/cli/tests/integration/test_tests.rs index 4dd29528fd..cbaea36bd1 100644 --- a/cli/tests/integration/test_tests.rs +++ b/cli/tests/integration/test_tests.rs @@ -3,6 +3,7 @@ use deno_core::url::Url; use test_util as util; use util::assert_contains; +use util::assert_not_contains; use util::env_vars_for_npm_tests; use util::wildcard_match; use util::TestContext; @@ -566,3 +567,18 @@ fn test_with_glob_config_and_flags() { assert_contains!(output, "glob/data/test1.js"); assert_contains!(output, "glob/data/test1.ts"); } + +#[test] +fn conditionally_loads_type_graph() { + let context = TestContext::default(); + let output = context + .new_command() + .args("test --reload -L debug run/type_directives_js_main.js") + .run(); + output.assert_matches_text("[WILDCARD] - FileFetcher::fetch() - specifier: file:///[WILDCARD]/subdir/type_reference.d.ts[WILDCARD]"); + let output = context + .new_command() + .args("test --reload -L debug --no-check run/type_directives_js_main.js") + .run(); + assert_not_contains!(output.combined_output(), "type_reference.d.ts"); +} diff --git a/cli/tests/testdata/run/type_directives_js_main.js.out b/cli/tests/testdata/run/type_directives_js_main.js.out deleted file mode 100644 index 7bca837f02..0000000000 --- a/cli/tests/testdata/run/type_directives_js_main.js.out +++ /dev/null @@ -1,3 +0,0 @@ -[WILDCARD] -DEBUG RS - [WILDCARD] - FileFetcher::fetch() - specifier: file:///[WILDCARD]/subdir/type_reference.d.ts -[WILDCARD] diff --git a/cli/tools/bench.rs b/cli/tools/bench.rs index 107fd2b9b0..1a5df92bf0 100644 --- a/cli/tools/bench.rs +++ b/cli/tools/bench.rs @@ -31,6 +31,7 @@ use deno_core::task::spawn; use deno_core::task::spawn_blocking; use deno_core::v8; use deno_core::ModuleSpecifier; +use deno_graph::GraphKind; use deno_runtime::permissions::Permissions; use deno_runtime::permissions::PermissionsContainer; use deno_runtime::tokio_util::create_and_run_current_thread; @@ -693,7 +694,11 @@ pub async fn run_benchmarks_with_watch( // file would have impact on other files, which is undesirable. let permissions = Permissions::from_options(&cli_options.permissions_options())?; - let no_check = cli_options.type_check_mode() == TypeCheckMode::None; + let type_check = cli_options.type_check_mode() != TypeCheckMode::None; + let graph_kind = match type_check { + true => GraphKind::All, + false => GraphKind::CodeOnly, + }; let resolver = |changed: Option>| { let paths_to_watch = bench_options.files.include.clone(); @@ -714,7 +719,7 @@ pub async fn run_benchmarks_with_watch( bench_modules.clone() }; let graph = module_graph_builder - .create_graph(bench_modules.clone()) + .create_graph(graph_kind, bench_modules.clone()) .await?; graph_valid_with_cli_options(&graph, &bench_modules, &cli_options)?; @@ -726,32 +731,19 @@ pub async fn run_benchmarks_with_watch( // This needs to be accessible to skip getting dependencies if they're already there, // otherwise this will cause a stack overflow with circular dependencies output: &mut HashSet<&'a ModuleSpecifier>, - no_check: bool, ) { if let Some(module) = maybe_module.and_then(|m| m.esm()) { for dep in module.dependencies.values() { if let Some(specifier) = &dep.get_code() { if !output.contains(specifier) { output.insert(specifier); - get_dependencies( - graph, - graph.get(specifier), - output, - no_check, - ); + get_dependencies(graph, graph.get(specifier), output); } } - if !no_check { - if let Some(specifier) = &dep.get_type() { - if !output.contains(specifier) { - output.insert(specifier); - get_dependencies( - graph, - graph.get(specifier), - output, - no_check, - ); - } + if let Some(specifier) = &dep.get_type() { + if !output.contains(specifier) { + output.insert(specifier); + get_dependencies(graph, graph.get(specifier), output); } } } @@ -761,7 +753,7 @@ pub async fn run_benchmarks_with_watch( // This bench module and all it's dependencies let mut modules = HashSet::new(); modules.insert(&specifier); - get_dependencies(&graph, graph.get(&specifier), &mut modules, no_check); + get_dependencies(&graph, graph.get(&specifier), &mut modules); paths_to_watch.extend( modules diff --git a/cli/tools/compile.rs b/cli/tools/compile.rs index 2ce03e3534..540c23fc86 100644 --- a/cli/tools/compile.rs +++ b/cli/tools/compile.rs @@ -2,6 +2,7 @@ use crate::args::CompileFlags; use crate::args::Flags; +use crate::args::TypeCheckMode; use crate::factory::CliFactory; use crate::standalone::is_standalone_binary; use crate::util::path::path_has_trailing_slash; @@ -10,6 +11,7 @@ use deno_core::anyhow::Context; use deno_core::error::generic_error; use deno_core::error::AnyError; use deno_core::resolve_url_or_path; +use deno_graph::GraphKind; use deno_runtime::colors; use std::path::Path; use std::path::PathBuf; @@ -44,10 +46,20 @@ pub async fn compile( let graph = Arc::try_unwrap( module_graph_builder - .create_graph_and_maybe_check(module_roots) + .create_graph_and_maybe_check(module_roots.clone()) .await?, ) .unwrap(); + let graph = if cli_options.type_check_mode() == TypeCheckMode::None { + graph + } else { + // In this case, the previous graph creation did type checking, which will + // create a module graph with types information in it. We don't want to + // store that in the eszip so create a code only module graph from scratch. + module_graph_builder + .create_graph(GraphKind::CodeOnly, module_roots) + .await? + }; let parser = parsed_source_cache.as_capturing_parser(); let eszip = eszip::EszipV2::from_graph(graph, &parser, Default::default())?; diff --git a/cli/tools/doc.rs b/cli/tools/doc.rs index 2cb53cb6ab..87fa253151 100644 --- a/cli/tools/doc.rs +++ b/cli/tools/doc.rs @@ -16,6 +16,7 @@ use deno_core::error::AnyError; use deno_core::resolve_path; use deno_core::resolve_url_or_path; use deno_doc as doc; +use deno_graph::GraphKind; use deno_graph::ModuleSpecifier; use std::path::PathBuf; @@ -43,7 +44,7 @@ pub async fn print_docs( Vec::new(), ); let analyzer = deno_graph::CapturingModuleAnalyzer::default(); - let mut graph = deno_graph::ModuleGraph::default(); + let mut graph = deno_graph::ModuleGraph::new(GraphKind::TypesOnly); graph .build( vec![source_file_specifier.clone()], @@ -87,7 +88,7 @@ pub async fn print_docs( file_fetcher.insert_cached(root); let graph = module_graph_builder - .create_graph(vec![root_specifier.clone()]) + .create_graph(GraphKind::TypesOnly, vec![root_specifier.clone()]) .await?; if let Some(lockfile) = maybe_lockfile { diff --git a/cli/tools/info.rs b/cli/tools/info.rs index 63a755369c..95a7da7b0f 100644 --- a/cli/tools/info.rs +++ b/cli/tools/info.rs @@ -11,6 +11,7 @@ use deno_core::resolve_url_or_path; use deno_core::serde_json; use deno_core::serde_json::json; use deno_graph::Dependency; +use deno_graph::GraphKind; use deno_graph::Module; use deno_graph::ModuleError; use deno_graph::ModuleGraph; @@ -43,7 +44,7 @@ pub async fn info(flags: Flags, info_flags: InfoFlags) -> Result<(), AnyError> { let mut loader = module_graph_builder.create_graph_loader(); loader.enable_loading_cache_info(); // for displaying the cache information let graph = module_graph_builder - .create_graph_with_loader(vec![specifier], &mut loader) + .create_graph_with_loader(GraphKind::All, vec![specifier], &mut loader) .await?; if let Some(lockfile) = maybe_lockfile { diff --git a/cli/tools/test.rs b/cli/tools/test.rs index f78e325394..bc8f685999 100644 --- a/cli/tools/test.rs +++ b/cli/tools/test.rs @@ -39,6 +39,7 @@ use deno_core::task::spawn_blocking; use deno_core::url::Url; use deno_core::v8; use deno_core::ModuleSpecifier; +use deno_graph::GraphKind; use deno_runtime::deno_io::Stdio; use deno_runtime::deno_io::StdioPipe; use deno_runtime::fmt_errors::format_js_error; @@ -1706,7 +1707,11 @@ pub async fn run_tests_with_watch( // file would have impact on other files, which is undesirable. let permissions = Permissions::from_options(&cli_options.permissions_options())?; - let no_check = cli_options.type_check_mode() == TypeCheckMode::None; + let type_check = cli_options.type_check_mode() != TypeCheckMode::None; + let graph_kind = match type_check { + true => GraphKind::All, + false => GraphKind::CodeOnly, + }; let log_level = cli_options.log_level(); let resolver = |changed: Option>| { @@ -1731,7 +1736,7 @@ pub async fn run_tests_with_watch( test_modules.clone() }; let graph = module_graph_builder - .create_graph(test_modules.clone()) + .create_graph(graph_kind, test_modules.clone()) .await?; graph_valid_with_cli_options(&graph, &test_modules, &cli_options)?; @@ -1743,32 +1748,19 @@ pub async fn run_tests_with_watch( // This needs to be accessible to skip getting dependencies if they're already there, // otherwise this will cause a stack overflow with circular dependencies output: &mut HashSet<&'a ModuleSpecifier>, - no_check: bool, ) { if let Some(module) = maybe_module.and_then(|m| m.esm()) { for dep in module.dependencies.values() { if let Some(specifier) = &dep.get_code() { if !output.contains(specifier) { output.insert(specifier); - get_dependencies( - graph, - graph.get(specifier), - output, - no_check, - ); + get_dependencies(graph, graph.get(specifier), output); } } - if !no_check { - if let Some(specifier) = &dep.get_type() { - if !output.contains(specifier) { - output.insert(specifier); - get_dependencies( - graph, - graph.get(specifier), - output, - no_check, - ); - } + if let Some(specifier) = &dep.get_type() { + if !output.contains(specifier) { + output.insert(specifier); + get_dependencies(graph, graph.get(specifier), output); } } } @@ -1778,7 +1770,7 @@ pub async fn run_tests_with_watch( // This test module and all it's dependencies let mut modules = HashSet::new(); modules.insert(&specifier); - get_dependencies(&graph, graph.get(&specifier), &mut modules, no_check); + get_dependencies(&graph, graph.get(&specifier), &mut modules); paths_to_watch.extend( modules diff --git a/cli/tools/vendor/mod.rs b/cli/tools/vendor/mod.rs index 5690f5b227..61ada605c5 100644 --- a/cli/tools/vendor/mod.rs +++ b/cli/tools/vendor/mod.rs @@ -10,6 +10,7 @@ use deno_core::anyhow::bail; use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::resolve_url_or_path; +use deno_graph::GraphKind; use log::warn; use crate::args::CliOptions; @@ -371,7 +372,9 @@ async fn create_graph( .map(|p| resolve_url_or_path(p, initial_cwd)) .collect::, _>>()?; - module_graph_builder.create_graph(entry_points).await + module_graph_builder + .create_graph(GraphKind::All, entry_points) + .await } #[cfg(test)] diff --git a/cli/tools/vendor/test.rs b/cli/tools/vendor/test.rs index e8a474ed34..08b6d8355b 100644 --- a/cli/tools/vendor/test.rs +++ b/cli/tools/vendor/test.rs @@ -16,6 +16,7 @@ use deno_core::serde_json; use deno_graph::source::LoadFuture; use deno_graph::source::LoadResponse; use deno_graph::source::Loader; +use deno_graph::GraphKind; use deno_graph::ModuleGraph; use import_map::ImportMap; @@ -279,7 +280,7 @@ async fn build_test_graph( Default::default(), ) }); - let mut graph = ModuleGraph::default(); + let mut graph = ModuleGraph::new(GraphKind::All); graph .build( roots, diff --git a/cli/tsc/mod.rs b/cli/tsc/mod.rs index d9f9b8b531..83fd84f9dc 100644 --- a/cli/tsc/mod.rs +++ b/cli/tsc/mod.rs @@ -839,6 +839,7 @@ mod tests { use crate::args::TsConfig; use deno_core::futures::future; use deno_core::OpState; + use deno_graph::GraphKind; use deno_graph::ModuleGraph; use std::fs; @@ -882,7 +883,7 @@ mod tests { let hash_data = maybe_hash_data.unwrap_or(0); let fixtures = test_util::testdata_path().join("tsc2"); let mut loader = MockLoader { fixtures }; - let mut graph = ModuleGraph::default(); + let mut graph = ModuleGraph::new(GraphKind::TypesOnly); graph .build(vec![specifier], &mut loader, Default::default()) .await; @@ -908,7 +909,7 @@ mod tests { let hash_data = 123; // something random let fixtures = test_util::testdata_path().join("tsc2"); let mut loader = MockLoader { fixtures }; - let mut graph = ModuleGraph::default(); + let mut graph = ModuleGraph::new(GraphKind::TypesOnly); graph .build(vec![specifier.clone()], &mut loader, Default::default()) .await; From 42c10ecfdb5b48ef7cf1293ce8d2614d5fae9f33 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Wed, 7 Jun 2023 11:54:49 +0200 Subject: [PATCH 315/320] perf(ext/websocket): monomorphize code (#19394) Using `deopt-explorer` I found that a bunch of fields on `WebSocket` class were polymorphic. Fortunately it was enough to initialize them to `undefined` to fix the problem. --- ext/websocket/01_websocket.js | 139 +++++++++++++++++----------------- 1 file changed, 70 insertions(+), 69 deletions(-) diff --git a/ext/websocket/01_websocket.js b/ext/websocket/01_websocket.js index a38af036aa..f6cb6599d8 100644 --- a/ext/websocket/01_websocket.js +++ b/ext/websocket/01_websocket.js @@ -116,78 +116,18 @@ const _idleTimeoutDuration = Symbol("[[idleTimeout]]"); const _idleTimeoutTimeout = Symbol("[[idleTimeoutTimeout]]"); const _serverHandleIdleTimeout = Symbol("[[serverHandleIdleTimeout]]"); class WebSocket extends EventTarget { - [_rid]; - [_role]; - - [_readyState] = CONNECTING; - get readyState() { - webidl.assertBranded(this, WebSocketPrototype); - return this[_readyState]; - } - - get CONNECTING() { - webidl.assertBranded(this, WebSocketPrototype); - return CONNECTING; - } - get OPEN() { - webidl.assertBranded(this, WebSocketPrototype); - return OPEN; - } - get CLOSING() { - webidl.assertBranded(this, WebSocketPrototype); - return CLOSING; - } - get CLOSED() { - webidl.assertBranded(this, WebSocketPrototype); - return CLOSED; - } - - [_extensions] = ""; - get extensions() { - webidl.assertBranded(this, WebSocketPrototype); - return this[_extensions]; - } - - [_protocol] = ""; - get protocol() { - webidl.assertBranded(this, WebSocketPrototype); - return this[_protocol]; - } - - [_url] = ""; - get url() { - webidl.assertBranded(this, WebSocketPrototype); - return this[_url]; - } - - [_binaryType] = "blob"; - get binaryType() { - webidl.assertBranded(this, WebSocketPrototype); - return this[_binaryType]; - } - set binaryType(value) { - webidl.assertBranded(this, WebSocketPrototype); - value = webidl.converters.DOMString( - value, - "Failed to set 'binaryType' on 'WebSocket'", - ); - if (value === "blob" || value === "arraybuffer") { - this[_binaryType] = value; - } - } - - get bufferedAmount() { - webidl.assertBranded(this, WebSocketPrototype); - if (this[_readyState] === OPEN) { - return op_ws_get_buffered_amount(this[_rid]); - } else { - return 0; - } - } - constructor(url, protocols = []) { super(); this[webidl.brand] = webidl.brand; + this[_rid] = undefined; + this[_role] = undefined; + this[_readyState] = CONNECTING; + this[_extensions] = ""; + this[_protocol] = ""; + this[_url] = ""; + this[_binaryType] = "blob"; + this[_idleTimeoutDuration] = 0; + this[_idleTimeoutTimeout] = undefined; const prefix = "Failed to construct 'WebSocket'"; webidl.requiredArguments(arguments.length, 1, prefix); url = webidl.converters.USVString(url, prefix, "Argument 1"); @@ -307,6 +247,67 @@ class WebSocket extends EventTarget { ); } + get readyState() { + webidl.assertBranded(this, WebSocketPrototype); + return this[_readyState]; + } + + get CONNECTING() { + webidl.assertBranded(this, WebSocketPrototype); + return CONNECTING; + } + get OPEN() { + webidl.assertBranded(this, WebSocketPrototype); + return OPEN; + } + get CLOSING() { + webidl.assertBranded(this, WebSocketPrototype); + return CLOSING; + } + get CLOSED() { + webidl.assertBranded(this, WebSocketPrototype); + return CLOSED; + } + + get extensions() { + webidl.assertBranded(this, WebSocketPrototype); + return this[_extensions]; + } + + get protocol() { + webidl.assertBranded(this, WebSocketPrototype); + return this[_protocol]; + } + + get url() { + webidl.assertBranded(this, WebSocketPrototype); + return this[_url]; + } + + get binaryType() { + webidl.assertBranded(this, WebSocketPrototype); + return this[_binaryType]; + } + set binaryType(value) { + webidl.assertBranded(this, WebSocketPrototype); + value = webidl.converters.DOMString( + value, + "Failed to set 'binaryType' on 'WebSocket'", + ); + if (value === "blob" || value === "arraybuffer") { + this[_binaryType] = value; + } + } + + get bufferedAmount() { + webidl.assertBranded(this, WebSocketPrototype); + if (this[_readyState] === OPEN) { + return op_ws_get_buffered_amount(this[_rid]); + } else { + return 0; + } + } + send(data) { webidl.assertBranded(this, WebSocketPrototype); const prefix = "Failed to execute 'send' on 'WebSocket'"; From 28ce0ef583b1be03f5ec4fdd90b946590862c5d4 Mon Sep 17 00:00:00 2001 From: Mike Mulchrone Date: Wed, 7 Jun 2023 06:27:25 -0400 Subject: [PATCH 316/320] fix(cli): formatting bench with colors (#19323) --- cli/tests/integration/bench_tests.rs | 6 ++++++ cli/tests/testdata/bench/bench_formatting.out | 8 ++++++++ cli/tests/testdata/bench/bench_formatting.ts | 3 +++ cli/tools/bench.rs | 6 +++--- 4 files changed, 20 insertions(+), 3 deletions(-) create mode 100644 cli/tests/testdata/bench/bench_formatting.out create mode 100644 cli/tests/testdata/bench/bench_formatting.ts diff --git a/cli/tests/integration/bench_tests.rs b/cli/tests/integration/bench_tests.rs index 0fc2680765..0ba297e63b 100644 --- a/cli/tests/integration/bench_tests.rs +++ b/cli/tests/integration/bench_tests.rs @@ -43,6 +43,12 @@ itest!(fail { output: "bench/fail.out", }); +itest!(bench_formatting { + args: "bench bench/bench_formatting.ts", + exit_code: 0, + output: "bench/bench_formatting.out", +}); + itest!(collect { args: "bench --ignore=bench/collect/ignore bench/collect", exit_code: 0, diff --git a/cli/tests/testdata/bench/bench_formatting.out b/cli/tests/testdata/bench/bench_formatting.out new file mode 100644 index 0000000000..9539e712f6 --- /dev/null +++ b/cli/tests/testdata/bench/bench_formatting.out @@ -0,0 +1,8 @@ +Check [WILDCARD]/bench/bench_formatting.ts +cpu: [WILDCARD] +runtime: deno [WILDCARD] ([WILDCARD]) + +[WILDCARD]/bench/bench_formatting.ts +benchmark time (avg) (min … max) p75 p99 p995 +------------------------------------------------- ----------------------------- +[WILDCARD] [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD] \ No newline at end of file diff --git a/cli/tests/testdata/bench/bench_formatting.ts b/cli/tests/testdata/bench/bench_formatting.ts new file mode 100644 index 0000000000..fdee15abb7 --- /dev/null +++ b/cli/tests/testdata/bench/bench_formatting.ts @@ -0,0 +1,3 @@ +Deno.bench("Date.now", () => { + Date.now(); +}); diff --git a/cli/tools/bench.rs b/cli/tools/bench.rs index 1a5df92bf0..6461e544f3 100644 --- a/cli/tools/bench.rs +++ b/cli/tools/bench.rs @@ -1147,13 +1147,13 @@ mod mitata { } else { if options.avg { s.push_str(&format!( - "{:>23}", + "{:>30}", format!("{}/iter", colors::yellow(fmt_duration(stats.avg))) )); } if options.min_max { s.push_str(&format!( - "{:>42}", + "{:>50}", format!( "({} … {})", colors::cyan(fmt_duration(stats.min)), @@ -1163,7 +1163,7 @@ mod mitata { } if options.percentiles { s.push_str(&format!( - " {:>18} {:>18} {:>18}", + " {:>22} {:>22} {:>22}", colors::magenta(fmt_duration(stats.p75)), colors::magenta(fmt_duration(stats.p99)), colors::magenta(fmt_duration(stats.p995)) From da9db887e34f0075e1433ee93854267aec0ef468 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Wed, 7 Jun 2023 10:09:10 -0400 Subject: [PATCH 317/320] refactor: helpers methods on `TypeCheckMode` (#19393) --- cli/args/flags.rs | 20 ++++++++++++++++++++ cli/factory.rs | 11 +++-------- cli/graph_util.rs | 16 ++++------------ cli/module_loader.rs | 3 +-- cli/tools/bench.rs | 8 +------- cli/tools/bundle.rs | 3 +-- cli/tools/compile.rs | 7 +++---- cli/tools/test.rs | 8 +------- 8 files changed, 34 insertions(+), 42 deletions(-) diff --git a/cli/args/flags.rs b/cli/args/flags.rs index c4d8a3f87e..2e4d826f92 100644 --- a/cli/args/flags.rs +++ b/cli/args/flags.rs @@ -9,6 +9,7 @@ use clap::Command; use clap::ValueHint; use deno_core::resolve_url_or_path; use deno_core::url::Url; +use deno_graph::GraphKind; use deno_runtime::permissions::parse_sys_kind; use log::debug; use log::Level; @@ -255,6 +256,25 @@ pub enum TypeCheckMode { Local, } +impl TypeCheckMode { + /// Gets if type checking will occur under this mode. + pub fn is_true(&self) -> bool { + match self { + Self::None => false, + Self::Local | Self::All => true, + } + } + + /// Gets the corresponding module `GraphKind` that should be created + /// for the current `TypeCheckMode`. + pub fn as_graph_kind(&self) -> GraphKind { + match self.is_true() { + true => GraphKind::All, + false => GraphKind::CodeOnly, + } + } +} + impl Default for TypeCheckMode { fn default() -> Self { Self::None diff --git a/cli/factory.rs b/cli/factory.rs index 78aefe7804..c4331652e1 100644 --- a/cli/factory.rs +++ b/cli/factory.rs @@ -8,7 +8,6 @@ use crate::args::Lockfile; use crate::args::PackageJsonDepsProvider; use crate::args::StorageKeyResolver; use crate::args::TsConfigType; -use crate::args::TypeCheckMode; use crate::cache::Caches; use crate::cache::DenoDir; use crate::cache::DenoDirProvider; @@ -541,14 +540,10 @@ impl CliFactory { pub fn graph_container(&self) -> &Arc { self.services.graph_container.get_or_init(|| { let graph_kind = match self.options.sub_command() { + // todo(dsherret): ideally the graph container would not be used + // for deno cache because it doesn't dynamically load modules DenoSubcommand::Cache(_) => GraphKind::All, - _ => { - if self.options.type_check_mode() == TypeCheckMode::None { - GraphKind::CodeOnly - } else { - GraphKind::All - } - } + _ => self.options.type_check_mode().as_graph_kind(), }; Arc::new(ModuleGraphContainer::new(graph_kind)) }) diff --git a/cli/graph_util.rs b/cli/graph_util.rs index 55052b9d0b..530b0a9745 100644 --- a/cli/graph_util.rs +++ b/cli/graph_util.rs @@ -3,7 +3,6 @@ use crate::args::CliOptions; use crate::args::Lockfile; use crate::args::TsTypeLib; -use crate::args::TypeCheckMode; use crate::cache; use crate::cache::ParsedSourceCache; use crate::colors; @@ -57,7 +56,7 @@ pub fn graph_valid_with_cli_options( roots, GraphValidOptions { is_vendoring: false, - follow_type_only: options.type_check_mode() != TypeCheckMode::None, + follow_type_only: options.type_check_mode().is_true(), check_js: options.check_js(), }, ) @@ -229,9 +228,7 @@ impl ModuleGraphBuilder { ) .await?; - if graph.has_node_specifier - && self.options.type_check_mode() != TypeCheckMode::None - { + if graph.has_node_specifier && self.options.type_check_mode().is_true() { self .npm_resolver .inject_synthetic_types_node_package() @@ -251,12 +248,7 @@ impl ModuleGraphBuilder { let graph_resolver = cli_resolver.as_graph_resolver(); let graph_npm_resolver = cli_resolver.as_graph_npm_resolver(); let analyzer = self.parsed_source_cache.as_analyzer(); - let should_type_check = - self.options.type_check_mode() != TypeCheckMode::None; - let graph_kind = match should_type_check { - true => GraphKind::All, - false => GraphKind::CodeOnly, - }; + let graph_kind = self.options.type_check_mode().as_graph_kind(); let mut graph = ModuleGraph::new(graph_kind); self .build_graph_with_npm_resolution( @@ -280,7 +272,7 @@ impl ModuleGraphBuilder { graph_lock_or_exit(&graph, &mut lockfile.lock()); } - if should_type_check { + if self.options.type_check_mode().is_true() { self .type_checker .check( diff --git a/cli/module_loader.rs b/cli/module_loader.rs index 3352cb951f..804c9a162b 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -3,7 +3,6 @@ use crate::args::CliOptions; use crate::args::DenoSubcommand; use crate::args::TsTypeLib; -use crate::args::TypeCheckMode; use crate::cache::ParsedSourceCache; use crate::emit::Emitter; use crate::graph_util::graph_lock_or_exit; @@ -169,7 +168,7 @@ impl ModuleLoadPreparer { drop(_pb_clear_guard); // type check if necessary - if self.options.type_check_mode() != TypeCheckMode::None + if self.options.type_check_mode().is_true() && !self.graph_container.is_type_checked(&roots, lib) { let graph = Arc::new(graph.segment(&roots)); diff --git a/cli/tools/bench.rs b/cli/tools/bench.rs index 6461e544f3..a7b75d8be8 100644 --- a/cli/tools/bench.rs +++ b/cli/tools/bench.rs @@ -2,7 +2,6 @@ use crate::args::BenchOptions; use crate::args::CliOptions; -use crate::args::TypeCheckMode; use crate::colors; use crate::display::write_json_to_stdout; use crate::factory::CliFactory; @@ -31,7 +30,6 @@ use deno_core::task::spawn; use deno_core::task::spawn_blocking; use deno_core::v8; use deno_core::ModuleSpecifier; -use deno_graph::GraphKind; use deno_runtime::permissions::Permissions; use deno_runtime::permissions::PermissionsContainer; use deno_runtime::tokio_util::create_and_run_current_thread; @@ -694,11 +692,7 @@ pub async fn run_benchmarks_with_watch( // file would have impact on other files, which is undesirable. let permissions = Permissions::from_options(&cli_options.permissions_options())?; - let type_check = cli_options.type_check_mode() != TypeCheckMode::None; - let graph_kind = match type_check { - true => GraphKind::All, - false => GraphKind::CodeOnly, - }; + let graph_kind = cli_options.type_check_mode().as_graph_kind(); let resolver = |changed: Option>| { let paths_to_watch = bench_options.files.include.clone(); diff --git a/cli/tools/bundle.rs b/cli/tools/bundle.rs index 759882c833..f38948776d 100644 --- a/cli/tools/bundle.rs +++ b/cli/tools/bundle.rs @@ -12,7 +12,6 @@ use crate::args::BundleFlags; use crate::args::CliOptions; use crate::args::Flags; use crate::args::TsConfigType; -use crate::args::TypeCheckMode; use crate::factory::CliFactory; use crate::graph_util::error_for_any_npm_specifier; use crate::util; @@ -157,7 +156,7 @@ fn bundle_module_graph( let ts_config_result = cli_options.resolve_ts_config_for_emit(TsConfigType::Bundle)?; - if cli_options.type_check_mode() == TypeCheckMode::None { + if !cli_options.type_check_mode().is_true() { if let Some(ignored_options) = ts_config_result.maybe_ignored_options { log::warn!("{}", ignored_options); } diff --git a/cli/tools/compile.rs b/cli/tools/compile.rs index 540c23fc86..c53ae4e028 100644 --- a/cli/tools/compile.rs +++ b/cli/tools/compile.rs @@ -2,7 +2,6 @@ use crate::args::CompileFlags; use crate::args::Flags; -use crate::args::TypeCheckMode; use crate::factory::CliFactory; use crate::standalone::is_standalone_binary; use crate::util::path::path_has_trailing_slash; @@ -50,15 +49,15 @@ pub async fn compile( .await?, ) .unwrap(); - let graph = if cli_options.type_check_mode() == TypeCheckMode::None { - graph - } else { + let graph = if cli_options.type_check_mode().is_true() { // In this case, the previous graph creation did type checking, which will // create a module graph with types information in it. We don't want to // store that in the eszip so create a code only module graph from scratch. module_graph_builder .create_graph(GraphKind::CodeOnly, module_roots) .await? + } else { + graph }; let parser = parsed_source_cache.as_capturing_parser(); diff --git a/cli/tools/test.rs b/cli/tools/test.rs index bc8f685999..ebe4deb9ae 100644 --- a/cli/tools/test.rs +++ b/cli/tools/test.rs @@ -3,7 +3,6 @@ use crate::args::CliOptions; use crate::args::FilesConfig; use crate::args::TestOptions; -use crate::args::TypeCheckMode; use crate::colors; use crate::display; use crate::factory::CliFactory; @@ -39,7 +38,6 @@ use deno_core::task::spawn_blocking; use deno_core::url::Url; use deno_core::v8; use deno_core::ModuleSpecifier; -use deno_graph::GraphKind; use deno_runtime::deno_io::Stdio; use deno_runtime::deno_io::StdioPipe; use deno_runtime::fmt_errors::format_js_error; @@ -1707,11 +1705,7 @@ pub async fn run_tests_with_watch( // file would have impact on other files, which is undesirable. let permissions = Permissions::from_options(&cli_options.permissions_options())?; - let type_check = cli_options.type_check_mode() != TypeCheckMode::None; - let graph_kind = match type_check { - true => GraphKind::All, - false => GraphKind::CodeOnly, - }; + let graph_kind = cli_options.type_check_mode().as_graph_kind(); let log_level = cli_options.log_level(); let resolver = |changed: Option>| { From 7e91f74d2b00cdc64042ba66e45d912fa2d9b647 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Wed, 7 Jun 2023 17:02:43 -0400 Subject: [PATCH 318/320] chore: downgrade to Rust 1.69 (#19407) --- .github/workflows/ci.generate.ts | 20 ++++++++++---------- .github/workflows/ci.yml | 24 ++++++++++++------------ Cargo.lock | 9 +++++---- ext/ffi/Cargo.toml | 3 ++- rust-toolchain.toml | 2 +- 5 files changed, 30 insertions(+), 28 deletions(-) diff --git a/.github/workflows/ci.generate.ts b/.github/workflows/ci.generate.ts index 59bada4fc4..2e99e3f9b7 100755 --- a/.github/workflows/ci.generate.ts +++ b/.github/workflows/ci.generate.ts @@ -5,7 +5,7 @@ import * as yaml from "https://deno.land/std@0.173.0/encoding/yaml.ts"; // Bump this number when you want to purge the cache. // Note: the tools/release/01_bump_crate_versions.ts script will update this version // automatically via regex, so ensure that this line maintains this format. -const cacheVersion = 34; +const cacheVersion = 35; const Runners = (() => { const ubuntuRunner = "ubuntu-22.04"; @@ -24,16 +24,16 @@ const prCacheKeyPrefix = `${cacheVersion}-cargo-target-\${{ matrix.os }}-\${{ matrix.profile }}-\${{ matrix.job }}-`; const installPkgsCommand = - "sudo apt-get install --no-install-recommends debootstrap clang-16 lld-16"; + "sudo apt-get install --no-install-recommends debootstrap clang-15 lld-15"; const sysRootStep = { name: "Set up incremental LTO and sysroot build", run: `# Avoid running man-db triggers, which sometimes takes several minutes # to complete. sudo apt-get remove --purge -y man-db -# Install clang-16, lld-16, and debootstrap. -echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-16 main" | - sudo dd of=/etc/apt/sources.list.d/llvm-toolchain-jammy-16.list +# Install clang-15, lld-15, and debootstrap. +echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-15 main" | + sudo dd of=/etc/apt/sources.list.d/llvm-toolchain-jammy-15.list curl https://apt.llvm.org/llvm-snapshot.gpg.key | gpg --dearmor | sudo dd of=/etc/apt/trusted.gpg.d/llvm-snapshot.gpg @@ -70,8 +70,8 @@ CARGO_PROFILE_RELEASE_INCREMENTAL=false CARGO_PROFILE_RELEASE_LTO=false RUSTFLAGS<<__1 -C linker-plugin-lto=true - -C linker=clang-16 - -C link-arg=-fuse-ld=lld-16 + -C linker=clang-15 + -C link-arg=-fuse-ld=lld-15 -C link-arg=--sysroot=/sysroot -C link-arg=-ldl -C link-arg=-Wl,--allow-shlib-undefined @@ -81,8 +81,8 @@ RUSTFLAGS<<__1 __1 RUSTDOCFLAGS<<__1 -C linker-plugin-lto=true - -C linker=clang-16 - -C link-arg=-fuse-ld=lld-16 + -C linker=clang-15 + -C link-arg=-fuse-ld=lld-15 -C link-arg=--sysroot=/sysroot -C link-arg=-ldl -C link-arg=-Wl,--allow-shlib-undefined @@ -90,7 +90,7 @@ RUSTDOCFLAGS<<__1 -C link-arg=-Wl,--thinlto-cache-policy,cache_size_bytes=700m \${{ env.RUSTFLAGS }} __1 -CC=clang-16 +CC=clang-15 CFLAGS=-flto=thin --sysroot=/sysroot __0`, }; diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3ac0c2e243..690d98caf1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -210,15 +210,15 @@ jobs: # to complete. sudo apt-get remove --purge -y man-db - # Install clang-16, lld-16, and debootstrap. - echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-16 main" | - sudo dd of=/etc/apt/sources.list.d/llvm-toolchain-jammy-16.list + # Install clang-15, lld-15, and debootstrap. + echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-15 main" | + sudo dd of=/etc/apt/sources.list.d/llvm-toolchain-jammy-15.list curl https://apt.llvm.org/llvm-snapshot.gpg.key | gpg --dearmor | sudo dd of=/etc/apt/trusted.gpg.d/llvm-snapshot.gpg sudo apt-get update # this was unreliable sometimes, so try again if it fails - sudo apt-get install --no-install-recommends debootstrap clang-16 lld-16 || echo 'Failed. Trying again.' && sudo apt-get clean && sudo apt-get update && sudo apt-get install --no-install-recommends debootstrap clang-16 lld-16 + sudo apt-get install --no-install-recommends debootstrap clang-15 lld-15 || echo 'Failed. Trying again.' && sudo apt-get clean && sudo apt-get update && sudo apt-get install --no-install-recommends debootstrap clang-15 lld-15 # Create ubuntu-16.04 sysroot environment, which is used to avoid # depending on a very recent version of glibc. @@ -249,8 +249,8 @@ jobs: CARGO_PROFILE_RELEASE_LTO=false RUSTFLAGS<<__1 -C linker-plugin-lto=true - -C linker=clang-16 - -C link-arg=-fuse-ld=lld-16 + -C linker=clang-15 + -C link-arg=-fuse-ld=lld-15 -C link-arg=--sysroot=/sysroot -C link-arg=-ldl -C link-arg=-Wl,--allow-shlib-undefined @@ -260,8 +260,8 @@ jobs: __1 RUSTDOCFLAGS<<__1 -C linker-plugin-lto=true - -C linker=clang-16 - -C link-arg=-fuse-ld=lld-16 + -C linker=clang-15 + -C link-arg=-fuse-ld=lld-15 -C link-arg=--sysroot=/sysroot -C link-arg=-ldl -C link-arg=-Wl,--allow-shlib-undefined @@ -269,7 +269,7 @@ jobs: -C link-arg=-Wl,--thinlto-cache-policy,cache_size_bytes=700m ${{ env.RUSTFLAGS }} __1 - CC=clang-16 + CC=clang-15 CFLAGS=-flto=thin --sysroot=/sysroot __0 - name: Log versions @@ -293,7 +293,7 @@ jobs: ~/.cargo/registry/index ~/.cargo/registry/cache ~/.cargo/git/db - key: '34-cargo-home-${{ matrix.os }}-${{ hashFiles(''Cargo.lock'') }}' + key: '35-cargo-home-${{ matrix.os }}-${{ hashFiles(''Cargo.lock'') }}' if: '!(github.event_name == ''pull_request'' && matrix.skip_pr)' - name: Restore cache build output (PR) uses: actions/cache/restore@v3 @@ -305,7 +305,7 @@ jobs: !./target/*/*.zip !./target/*/*.tar.gz key: never_saved - restore-keys: '34-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-' + restore-keys: '35-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-' - name: Apply and update mtime cache if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (!startsWith(github.ref, ''refs/tags/''))' uses: ./.github/mtime_cache @@ -589,7 +589,7 @@ jobs: !./target/*/gn_out !./target/*/*.zip !./target/*/*.tar.gz - key: '34-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' + key: '35-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' publish-canary: name: publish canary runs-on: ubuntu-22.04 diff --git a/Cargo.lock b/Cargo.lock index f0721f433c..0372622ac2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1001,6 +1001,7 @@ dependencies = [ "dlopen", "dynasmrt", "libffi", + "libffi-sys", "serde", "serde-value", "serde_json", @@ -2787,9 +2788,9 @@ checksum = "99227334921fae1a979cf0bfdfcc6b3e5ce376ef57e16fb6fb3ea2ed6095f80c" [[package]] name = "libffi" -version = "3.2.0" +version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce826c243048e3d5cec441799724de52e2d42f820468431fc3fceee2341871e2" +checksum = "6cb06d5b4c428f3cd682943741c39ed4157ae989fffe1094a08eaf7c4014cf60" dependencies = [ "libc", "libffi-sys", @@ -2797,9 +2798,9 @@ dependencies = [ [[package]] name = "libffi-sys" -version = "2.3.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f36115160c57e8529781b4183c2bb51fdc1f6d6d1ed345591d84be7703befb3c" +checksum = "11c6f11e063a27ffe040a9d15f0b661bf41edc2383b7ae0e0ad5a7e7d53d9da3" dependencies = [ "cc", ] diff --git a/ext/ffi/Cargo.toml b/ext/ffi/Cargo.toml index ae62c20631..a1d2a68c34 100644 --- a/ext/ffi/Cargo.toml +++ b/ext/ffi/Cargo.toml @@ -17,7 +17,8 @@ path = "lib.rs" deno_core.workspace = true dlopen.workspace = true dynasmrt = "1.2.3" -libffi = "3.2.0" +libffi = "=3.1.0" +libffi-sys = "=2.1.0" # temporary pin for downgrade to Rust 1.69 serde.workspace = true serde-value = "0.7" serde_json = "1.0" diff --git a/rust-toolchain.toml b/rust-toolchain.toml index f15cd1c929..f332202964 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,3 +1,3 @@ [toolchain] -channel = "1.70.0" +channel = "1.69.0" components = ["rustfmt", "clippy"] From 19f82b0eaa14f0df58fdfc685e60c8560582c5a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Wed, 7 Jun 2023 23:50:14 +0200 Subject: [PATCH 319/320] refactor(core): use JoinSet instead of FuturesUnordered (#19378) This commit migrates "deno_core" from using "FuturesUnordered" to "tokio::task::JoinSet". This makes every op to be a separate Tokio task and should unlock better utilization of kqueue/epoll. There were two quirks added to this PR: - because of the fact that "JoinSet" immediately polls spawn tasks, op sanitizers can give false positives in some cases, this was alleviated by polling event loop once before running a test with "deno test", which gives canceled ops an opportunity to settle - "JsRuntimeState::waker" was moved to "OpState::waker" so that FFI API can still use threadsafe functions - without this change the registered wakers were wrong as they would not wake up the whole "JsRuntime" but the task associated with an op --------- Co-authored-by: Matt Mastracci --- cli/tools/test.rs | 17 +++++++++++ core/ops.rs | 4 +++ core/realm.rs | 10 +++++-- core/runtime.rs | 72 +++++++++++++++++---------------------------- ext/ffi/callback.rs | 21 +++++++------ 5 files changed, 65 insertions(+), 59 deletions(-) diff --git a/cli/tools/test.rs b/cli/tools/test.rs index ebe4deb9ae..6f32d69e49 100644 --- a/cli/tools/test.rs +++ b/cli/tools/test.rs @@ -28,6 +28,7 @@ use deno_core::error::AnyError; use deno_core::error::JsError; use deno_core::futures::future; use deno_core::futures::stream; +use deno_core::futures::task::noop_waker; use deno_core::futures::FutureExt; use deno_core::futures::StreamExt; use deno_core::located_script_name; @@ -66,6 +67,7 @@ use std::sync::atomic::AtomicBool; use std::sync::atomic::AtomicUsize; use std::sync::atomic::Ordering; use std::sync::Arc; +use std::task::Context; use std::time::Duration; use std::time::Instant; use std::time::SystemTime; @@ -1006,6 +1008,21 @@ pub async fn test_specifier( continue; } sender.send(TestEvent::Wait(desc.id))?; + + // TODO(bartlomieju): this is a nasty (beautiful) hack, that was required + // when switching `JsRuntime` from `FuturesUnordered` to `JoinSet`. With + // `JoinSet` all pending ops are immediately polled and that caused a problem + // when some async ops were fired and canceled before running tests (giving + // false positives in the ops sanitizer). We should probably rewrite sanitizers + // to be done in Rust instead of in JS (40_testing.js). + { + // Poll event loop once, this will allow all ops that are already resolved, + // but haven't responded to settle. + let waker = noop_waker(); + let mut cx = Context::from_waker(&waker); + let _ = worker.js_runtime.poll_event_loop(&mut cx, false); + } + let earlier = SystemTime::now(); let result = match worker.js_runtime.call_and_await(&function).await { Ok(r) => r, diff --git a/core/ops.rs b/core/ops.rs index 5f1bf67ef6..b766eb60d2 100644 --- a/core/ops.rs +++ b/core/ops.rs @@ -10,6 +10,7 @@ use crate::OpDecl; use crate::OpsTracker; use anyhow::Error; use futures::future::MaybeDone; +use futures::task::AtomicWaker; use futures::Future; use futures::FutureExt; use pin_project::pin_project; @@ -21,6 +22,7 @@ use std::pin::Pin; use std::ptr::NonNull; use std::rc::Rc; use std::rc::Weak; +use std::sync::Arc; use v8::fast_api::CFunctionInfo; use v8::fast_api::CTypeInfo; @@ -184,6 +186,7 @@ pub struct OpState { pub tracker: OpsTracker, pub last_fast_op_error: Option, pub(crate) gotham_state: GothamState, + pub waker: Arc, } impl OpState { @@ -194,6 +197,7 @@ impl OpState { gotham_state: Default::default(), last_fast_op_error: None, tracker: OpsTracker::new(ops_count), + waker: Arc::new(AtomicWaker::new()), } } diff --git a/core/realm.rs b/core/realm.rs index 94ce77464d..d18f41e662 100644 --- a/core/realm.rs +++ b/core/realm.rs @@ -5,10 +5,12 @@ use crate::modules::ModuleCode; use crate::ops::OpCtx; use crate::runtime::exception_to_err_result; use crate::runtime::JsRuntimeState; +use crate::task::MaskResultAsSend; use crate::JsRuntime; -use crate::OpCall; +use crate::OpId; +use crate::OpResult; +use crate::PromiseId; use anyhow::Error; -use futures::stream::FuturesUnordered; use std::cell::RefCell; use std::collections::HashSet; use std::collections::VecDeque; @@ -16,6 +18,7 @@ use std::hash::BuildHasherDefault; use std::hash::Hasher; use std::option::Option; use std::rc::Rc; +use tokio::task::JoinSet; use v8::HandleScope; use v8::Local; @@ -48,7 +51,8 @@ pub(crate) struct ContextState { pub(crate) pending_promise_rejections: VecDeque<(v8::Global, v8::Global)>, pub(crate) unrefed_ops: HashSet>, - pub(crate) pending_ops: FuturesUnordered, + pub(crate) pending_ops: + JoinSet>, // We don't explicitly re-read this prop but need the slice to live alongside // the context pub(crate) op_ctxs: Box<[OpCtx]>, diff --git a/core/runtime.rs b/core/runtime.rs index a27717a8b4..ecfd0bd571 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -41,7 +41,6 @@ use futures::future::FutureExt; use futures::future::MaybeDone; use futures::stream::StreamExt; use futures::task::noop_waker; -use futures::task::AtomicWaker; use smallvec::SmallVec; use std::any::Any; use std::cell::RefCell; @@ -309,7 +308,6 @@ pub struct JsRuntimeState { dyn_module_evaluate_idle_counter: u32, pub(crate) source_map_getter: Option>>, pub(crate) source_map_cache: Rc>, - pub(crate) have_unpolled_ops: bool, pub(crate) op_state: Rc>, pub(crate) shared_array_buffer_store: Option, pub(crate) compiled_wasm_module_store: Option, @@ -320,7 +318,6 @@ pub struct JsRuntimeState { // flimsy. Try to poll it similarly to `pending_promise_rejections`. pub(crate) dispatched_exception: Option>, pub(crate) inspector: Option>>, - waker: AtomicWaker, } impl JsRuntimeState { @@ -546,8 +543,6 @@ impl JsRuntime { shared_array_buffer_store: options.shared_array_buffer_store, compiled_wasm_module_store: options.compiled_wasm_module_store, op_state: op_state.clone(), - waker: AtomicWaker::new(), - have_unpolled_ops: false, dispatched_exception: None, // Some fields are initialized later after isolate is created inspector: None, @@ -1328,7 +1323,7 @@ impl JsRuntime { { let state = self.inner.state.borrow(); has_inspector = state.inspector.is_some(); - state.waker.register(cx.waker()); + state.op_state.borrow().waker.register(cx.waker()); } if has_inspector { @@ -1419,12 +1414,11 @@ impl JsRuntime { // TODO(andreubotella) The event loop will spin as long as there are pending // background tasks. We should look into having V8 notify us when a // background task is done. - if state.have_unpolled_ops - || pending_state.has_pending_background_tasks + if pending_state.has_pending_background_tasks || pending_state.has_tick_scheduled || maybe_scheduling { - state.waker.wake(); + state.op_state.borrow().waker.wake(); } drop(state); @@ -1477,7 +1471,7 @@ impl JsRuntime { // evaluation may complete during this, in which case the counter will // reset. state.dyn_module_evaluate_idle_counter += 1; - state.waker.wake(); + state.op_state.borrow().waker.wake(); } } @@ -1670,7 +1664,7 @@ impl JsRuntimeState { /// after initiating new dynamic import load. pub fn notify_new_dynamic_import(&mut self) { // Notify event loop to poll again soon. - self.waker.wake(); + self.op_state.borrow().waker.wake(); } } @@ -2404,12 +2398,6 @@ impl JsRuntime { // Polls pending ops and then runs `Deno.core.eventLoopTick` callback. fn do_js_event_loop_tick(&mut self, cx: &mut Context) -> Result<(), Error> { - // Now handle actual ops. - { - let mut state = self.inner.state.borrow_mut(); - state.have_unpolled_ops = false; - } - // Handle responses for each realm. let state = self.inner.state.clone(); let isolate = &mut self.inner.v8_isolate; @@ -2433,10 +2421,15 @@ impl JsRuntime { let mut args: SmallVec<[v8::Local; 32]> = SmallVec::with_capacity(32); - while let Poll::Ready(Some(item)) = - context_state.pending_ops.poll_next_unpin(cx) - { - let (promise_id, op_id, mut resp) = item; + loop { + let item = { + let next = std::pin::pin!(context_state.pending_ops.join_next()); + let Poll::Ready(Some(item)) = next.poll(cx) else { + break; + }; + item + }; + let (promise_id, op_id, mut resp) = item.unwrap().into_inner(); state .borrow() .op_state @@ -2486,11 +2479,6 @@ pub fn queue_fast_async_op( promise_id: PromiseId, op: impl Future> + 'static, ) { - let runtime_state = match ctx.runtime_state.upgrade() { - Some(rc_state) => rc_state, - // at least 1 Rc is held by the JsRuntime. - None => unreachable!(), - }; let get_class = { let state = RefCell::borrow(&ctx.state); state.tracker.track_async(ctx.id); @@ -2499,13 +2487,10 @@ pub fn queue_fast_async_op( let fut = op .map(|result| crate::_ops::to_op_result(get_class, result)) .boxed_local(); - let mut state = runtime_state.borrow_mut(); - ctx - .context_state - .borrow_mut() - .pending_ops - .push(OpCall::pending(ctx, promise_id, fut)); - state.have_unpolled_ops = true; + // SAFETY: this this is guaranteed to be running on a current-thread executor + ctx.context_state.borrow_mut().pending_ops.spawn(unsafe { + crate::task::MaskFutureAsSend::new(OpCall::pending(ctx, promise_id, fut)) + }); } #[inline] @@ -2584,12 +2569,6 @@ pub fn queue_async_op<'s>( promise_id: PromiseId, mut op: MaybeDone>>>, ) -> Option> { - let runtime_state = match ctx.runtime_state.upgrade() { - Some(rc_state) => rc_state, - // at least 1 Rc is held by the JsRuntime. - None => unreachable!(), - }; - // An op's realm (as given by `OpCtx::realm_idx`) must match the realm in // which it is invoked. Otherwise, we might have cross-realm object exposure. // deno_core doesn't currently support such exposure, even though embedders @@ -2627,9 +2606,12 @@ pub fn queue_async_op<'s>( // Otherwise we will push it to the `pending_ops` and let it be polled again // or resolved on the next tick of the event loop. - let mut state = runtime_state.borrow_mut(); - ctx.context_state.borrow_mut().pending_ops.push(op_call); - state.have_unpolled_ops = true; + ctx + .context_state + .borrow_mut() + .pending_ops + // SAFETY: this this is guaranteed to be running on a current-thread executor + .spawn(unsafe { crate::task::MaskFutureAsSend::new(op_call) }); None } @@ -2744,8 +2726,8 @@ pub mod tests { (runtime, dispatch_count) } - #[test] - fn test_ref_unref_ops() { + #[tokio::test] + async fn test_ref_unref_ops() { let (mut runtime, _dispatch_count) = setup(Mode::AsyncDeferred); runtime .execute_script_static( @@ -4735,6 +4717,7 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { } } + #[ignore] #[tokio::test] async fn js_realm_gc() { static INVOKE_COUNT: AtomicUsize = AtomicUsize::new(0); @@ -4793,7 +4776,6 @@ Deno.core.opAsync("op_async_serialize_object_with_numbers_as_keys", { .await .unwrap(); } - drop(runtime); // Make sure the OpState was dropped properly when the runtime dropped diff --git a/ext/ffi/callback.rs b/ext/ffi/callback.rs index 2d2cf491be..78a21ab8f4 100644 --- a/ext/ffi/callback.rs +++ b/ext/ffi/callback.rs @@ -10,6 +10,7 @@ use crate::MAX_SAFE_INTEGER; use crate::MIN_SAFE_INTEGER; use deno_core::error::AnyError; use deno_core::futures::channel::mpsc; +use deno_core::futures::task::AtomicWaker; use deno_core::op; use deno_core::serde_v8; use deno_core::v8; @@ -32,8 +33,8 @@ use std::rc::Rc; use std::sync::atomic; use std::sync::atomic::AtomicU32; use std::sync::mpsc::sync_channel; +use std::sync::Arc; use std::task::Poll; -use std::task::Waker; static THREAD_ID_COUNTER: AtomicU32 = AtomicU32::new(1); @@ -99,21 +100,20 @@ struct CallbackInfo { pub parameters: Box<[NativeType]>, pub result: NativeType, pub thread_id: u32, - pub waker: Option, + pub waker: Arc, } impl Future for CallbackInfo { type Output = (); fn poll( - mut self: Pin<&mut Self>, - cx: &mut std::task::Context<'_>, + self: Pin<&mut Self>, + _cx: &mut std::task::Context<'_>, ) -> std::task::Poll { - // Always replace the waker to make sure it's bound to the proper Future. - self.waker.replace(cx.waker().clone()); // The future for the CallbackInfo never resolves: It can only be canceled. Poll::Pending } } + unsafe extern "C" fn deno_ffi_callback( cif: &libffi::low::ffi_cif, result: &mut c_void, @@ -136,10 +136,8 @@ unsafe extern "C" fn deno_ffi_callback( response_sender.send(()).unwrap(); }); async_work_sender.unbounded_send(fut).unwrap(); - if let Some(waker) = info.waker.as_ref() { - // Make sure event loop wakes up to receive our message before we start waiting for a response. - waker.wake_by_ref(); - } + // Make sure event loop wakes up to receive our message before we start waiting for a response. + info.waker.wake(); response_receiver.recv().unwrap(); } }); @@ -574,6 +572,7 @@ where let current_context = scope.get_current_context(); let context = v8::Global::new(scope, current_context).into_raw(); + let waker = state.waker.clone(); let info: *mut CallbackInfo = Box::leak(Box::new(CallbackInfo { async_work_sender, callback, @@ -581,7 +580,7 @@ where parameters: args.parameters.clone().into(), result: args.result.clone(), thread_id, - waker: None, + waker, })); let cif = Cif::new( args From 794f731238f5036a980b1e1e21e7b4acb652d914 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Wed, 7 Jun 2023 17:57:36 -0400 Subject: [PATCH 320/320] ci: output file system space before and after building (#19409) This will help give us better insight. --- .github/workflows/ci.generate.ts | 7 ++++++- .github/workflows/ci.yml | 5 ++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.generate.ts b/.github/workflows/ci.generate.ts index 2e99e3f9b7..b9a60f76b3 100755 --- a/.github/workflows/ci.generate.ts +++ b/.github/workflows/ci.generate.ts @@ -558,7 +558,12 @@ const ci = { { name: "Build debug", if: "matrix.job == 'test' && matrix.profile == 'debug'", - run: "cargo build --locked --all-targets", + run: [ + // output fs space before and after building + "df -h", + "cargo build --locked --all-targets", + "df -h", + ].join("\n"), env: { CARGO_PROFILE_DEV_DEBUG: 0 }, }, { diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 690d98caf1..f1c13959fd 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -336,7 +336,10 @@ jobs: run: deno run --allow-write --allow-read --allow-run=git ./tools/node_compat/setup.ts --check - name: Build debug if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.job == ''test'' && matrix.profile == ''debug'')' - run: cargo build --locked --all-targets + run: |- + df -h + cargo build --locked --all-targets + df -h env: CARGO_PROFILE_DEV_DEBUG: 0 - name: Build release