From 3df8f1650039e9453056d516744e755d6be8801b Mon Sep 17 00:00:00 2001 From: Leo Kettmeir Date: Sat, 12 Oct 2024 08:20:17 -0700 Subject: [PATCH 01/52] refactor(ext/broadcastchannel): use concrete error type (#26105) --- Cargo.lock | 9 +-- ext/broadcast_channel/Cargo.toml | 1 + .../in_memory_broadcast_channel.rs | 13 ++-- ext/broadcast_channel/lib.rs | 70 +++++++++++++++---- runtime/errors.rs | 18 +++++ 5 files changed, 90 insertions(+), 21 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0e6b4fb6ac..a166c6f232 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1340,6 +1340,7 @@ version = "0.165.0" dependencies = [ "async-trait", "deno_core", + "thiserror", "tokio", "uuid", ] @@ -7153,18 +7154,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.61" +version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" +checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.61" +version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" +checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3" dependencies = [ "proc-macro2", "quote", diff --git a/ext/broadcast_channel/Cargo.toml b/ext/broadcast_channel/Cargo.toml index 3caf7b9efd..b19c4ce151 100644 --- a/ext/broadcast_channel/Cargo.toml +++ b/ext/broadcast_channel/Cargo.toml @@ -16,5 +16,6 @@ path = "lib.rs" [dependencies] async-trait.workspace = true deno_core.workspace = true +thiserror.workspace = true tokio.workspace = true uuid.workspace = true diff --git a/ext/broadcast_channel/in_memory_broadcast_channel.rs b/ext/broadcast_channel/in_memory_broadcast_channel.rs index 00b52a9d60..61dc68e17d 100644 --- a/ext/broadcast_channel/in_memory_broadcast_channel.rs +++ b/ext/broadcast_channel/in_memory_broadcast_channel.rs @@ -3,13 +3,13 @@ use std::sync::Arc; use async_trait::async_trait; -use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; use tokio::sync::broadcast; use tokio::sync::mpsc; use uuid::Uuid; use crate::BroadcastChannel; +use crate::BroadcastChannelError; #[derive(Clone)] pub struct InMemoryBroadcastChannel(Arc>>); @@ -41,7 +41,7 @@ impl Default for InMemoryBroadcastChannel { impl BroadcastChannel for InMemoryBroadcastChannel { type Resource = InMemoryBroadcastChannelResource; - fn subscribe(&self) -> Result { + fn subscribe(&self) -> Result { let (cancel_tx, cancel_rx) = mpsc::unbounded_channel(); let broadcast_rx = self.0.lock().subscribe(); let rx = tokio::sync::Mutex::new((broadcast_rx, cancel_rx)); @@ -53,7 +53,10 @@ impl BroadcastChannel for InMemoryBroadcastChannel { }) } - fn unsubscribe(&self, resource: &Self::Resource) -> Result<(), AnyError> { + fn unsubscribe( + &self, + resource: &Self::Resource, + ) -> Result<(), BroadcastChannelError> { Ok(resource.cancel_tx.send(())?) } @@ -62,7 +65,7 @@ impl BroadcastChannel for InMemoryBroadcastChannel { resource: &Self::Resource, name: String, data: Vec, - ) -> Result<(), AnyError> { + ) -> Result<(), BroadcastChannelError> { let name = Arc::new(name); let data = Arc::new(data); let uuid = resource.uuid; @@ -73,7 +76,7 @@ impl BroadcastChannel for InMemoryBroadcastChannel { async fn recv( &self, resource: &Self::Resource, - ) -> Result, AnyError> { + ) -> Result, BroadcastChannelError> { let mut g = resource.rx.lock().await; let (broadcast_rx, cancel_rx) = &mut *g; loop { diff --git a/ext/broadcast_channel/lib.rs b/ext/broadcast_channel/lib.rs index 47c48656d8..c1de118a36 100644 --- a/ext/broadcast_channel/lib.rs +++ b/ext/broadcast_channel/lib.rs @@ -10,34 +10,69 @@ use std::path::PathBuf; use std::rc::Rc; use async_trait::async_trait; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::JsBuffer; use deno_core::OpState; use deno_core::Resource; use deno_core::ResourceId; +use tokio::sync::broadcast::error::SendError as BroadcastSendError; +use tokio::sync::mpsc::error::SendError as MpscSendError; pub const UNSTABLE_FEATURE_NAME: &str = "broadcast-channel"; +#[derive(Debug, thiserror::Error)] +pub enum BroadcastChannelError { + #[error(transparent)] + Resource(deno_core::error::AnyError), + #[error(transparent)] + MPSCSendError(MpscSendError>), + #[error(transparent)] + BroadcastSendError( + BroadcastSendError>, + ), + #[error(transparent)] + Other(deno_core::error::AnyError), +} + +impl From> + for BroadcastChannelError +{ + fn from(value: MpscSendError) -> Self { + BroadcastChannelError::MPSCSendError(MpscSendError(Box::new(value.0))) + } +} +impl From> + for BroadcastChannelError +{ + fn from(value: BroadcastSendError) -> Self { + BroadcastChannelError::BroadcastSendError(BroadcastSendError(Box::new( + value.0, + ))) + } +} + #[async_trait] pub trait BroadcastChannel: Clone { type Resource: Resource; - fn subscribe(&self) -> Result; + fn subscribe(&self) -> Result; - fn unsubscribe(&self, resource: &Self::Resource) -> Result<(), AnyError>; + fn unsubscribe( + &self, + resource: &Self::Resource, + ) -> Result<(), BroadcastChannelError>; async fn send( &self, resource: &Self::Resource, name: String, data: Vec, - ) -> Result<(), AnyError>; + ) -> Result<(), BroadcastChannelError>; async fn recv( &self, resource: &Self::Resource, - ) -> Result, AnyError>; + ) -> Result, BroadcastChannelError>; } pub type Message = (String, Vec); @@ -46,7 +81,7 @@ pub type Message = (String, Vec); #[smi] pub fn op_broadcast_subscribe( state: &mut OpState, -) -> Result +) -> Result where BC: BroadcastChannel + 'static, { @@ -62,11 +97,14 @@ where pub fn op_broadcast_unsubscribe( state: &mut OpState, #[smi] rid: ResourceId, -) -> Result<(), AnyError> +) -> Result<(), BroadcastChannelError> where BC: BroadcastChannel + 'static, { - let resource = state.resource_table.get::(rid)?; + let resource = state + .resource_table + .get::(rid) + .map_err(BroadcastChannelError::Resource)?; let bc = state.borrow::(); bc.unsubscribe(&resource) } @@ -77,11 +115,15 @@ pub async fn op_broadcast_send( #[smi] rid: ResourceId, #[string] name: String, #[buffer] buf: JsBuffer, -) -> Result<(), AnyError> +) -> Result<(), BroadcastChannelError> where BC: BroadcastChannel + 'static, { - let resource = state.borrow().resource_table.get::(rid)?; + let resource = state + .borrow() + .resource_table + .get::(rid) + .map_err(BroadcastChannelError::Resource)?; let bc = state.borrow().borrow::().clone(); bc.send(&resource, name, buf.to_vec()).await } @@ -91,11 +133,15 @@ where pub async fn op_broadcast_recv( state: Rc>, #[smi] rid: ResourceId, -) -> Result, AnyError> +) -> Result, BroadcastChannelError> where BC: BroadcastChannel + 'static, { - let resource = state.borrow().resource_table.get::(rid)?; + let resource = state + .borrow() + .resource_table + .get::(rid) + .map_err(BroadcastChannelError::Resource)?; let bc = state.borrow().borrow::().clone(); bc.recv(&resource).await } diff --git a/runtime/errors.rs b/runtime/errors.rs index 694402773e..476aae63b9 100644 --- a/runtime/errors.rs +++ b/runtime/errors.rs @@ -9,6 +9,7 @@ //! Diagnostics are compile-time type errors, whereas JsErrors are runtime //! exceptions. +use deno_broadcast_channel::BroadcastChannelError; use deno_core::error::AnyError; use deno_core::serde_json; use deno_core::url; @@ -153,12 +154,29 @@ pub fn get_nix_error_class(error: &nix::Error) -> &'static str { } } +fn get_broadcast_channel_error(error: &BroadcastChannelError) -> &'static str { + match error { + BroadcastChannelError::Resource(err) => { + deno_core::error::get_custom_error_class(err).unwrap() + } + BroadcastChannelError::MPSCSendError(_) => "Error", + BroadcastChannelError::BroadcastSendError(_) => "Error", + BroadcastChannelError::Other(err) => { + get_error_class_name(err).unwrap_or("Error") + } + } +} + pub fn get_error_class_name(e: &AnyError) -> Option<&'static str> { deno_core::error::get_custom_error_class(e) .or_else(|| deno_webgpu::error::get_error_class_name(e)) .or_else(|| deno_web::get_error_class_name(e)) .or_else(|| deno_webstorage::get_not_supported_error_class_name(e)) .or_else(|| deno_websocket::get_network_error_class_name(e)) + .or_else(|| { + e.downcast_ref::() + .map(get_broadcast_channel_error) + }) .or_else(|| { e.downcast_ref::() .map(get_dlopen_error_class) From 938a8ebe347639c07042768e97969b75cc600e16 Mon Sep 17 00:00:00 2001 From: Leo Kettmeir Date: Sat, 12 Oct 2024 09:15:10 -0700 Subject: [PATCH 02/52] refactor(ext/cache): use concrete error type (#26109) --- Cargo.lock | 1 + ext/cache/Cargo.toml | 1 + ext/cache/lib.rs | 60 +++++++++++++++++++++++++++++++------------- ext/cache/sqlite.rs | 50 ++++++++++++++++++++---------------- runtime/errors.rs | 14 +++++++++++ 5 files changed, 88 insertions(+), 38 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a166c6f232..84a57f0362 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1354,6 +1354,7 @@ dependencies = [ "rusqlite", "serde", "sha2", + "thiserror", "tokio", ] diff --git a/ext/cache/Cargo.toml b/ext/cache/Cargo.toml index 71ca34380c..9d876fcb71 100644 --- a/ext/cache/Cargo.toml +++ b/ext/cache/Cargo.toml @@ -19,4 +19,5 @@ deno_core.workspace = true rusqlite.workspace = true serde.workspace = true sha2.workspace = true +thiserror.workspace = true tokio.workspace = true diff --git a/ext/cache/lib.rs b/ext/cache/lib.rs index f6d758b95c..08661c3493 100644 --- a/ext/cache/lib.rs +++ b/ext/cache/lib.rs @@ -7,7 +7,6 @@ use std::sync::Arc; use async_trait::async_trait; use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::serde::Deserialize; use deno_core::serde::Serialize; @@ -19,6 +18,20 @@ use deno_core::ResourceId; mod sqlite; pub use sqlite::SqliteBackedCache; +#[derive(Debug, thiserror::Error)] +pub enum CacheError { + #[error(transparent)] + Sqlite(#[from] rusqlite::Error), + #[error(transparent)] + JoinError(#[from] tokio::task::JoinError), + #[error(transparent)] + Resource(deno_core::error::AnyError), + #[error(transparent)] + Other(deno_core::error::AnyError), + #[error(transparent)] + Io(#[from] std::io::Error), +} + #[derive(Clone)] pub struct CreateCache(pub Arc C>); @@ -92,26 +105,31 @@ pub struct CacheDeleteRequest { pub trait Cache: Clone + 'static { type CacheMatchResourceType: Resource; - async fn storage_open(&self, cache_name: String) -> Result; - async fn storage_has(&self, cache_name: String) -> Result; - async fn storage_delete(&self, cache_name: String) -> Result; + async fn storage_open(&self, cache_name: String) -> Result; + async fn storage_has(&self, cache_name: String) -> Result; + async fn storage_delete( + &self, + cache_name: String, + ) -> Result; /// Put a resource into the cache. async fn put( &self, request_response: CachePutRequest, resource: Option>, - ) -> Result<(), AnyError>; + ) -> Result<(), CacheError>; async fn r#match( &self, request: CacheMatchRequest, ) -> Result< Option<(CacheMatchResponseMeta, Option)>, - AnyError, + CacheError, >; - async fn delete(&self, request: CacheDeleteRequest) - -> Result; + async fn delete( + &self, + request: CacheDeleteRequest, + ) -> Result; } #[op2(async)] @@ -119,7 +137,7 @@ pub trait Cache: Clone + 'static { pub async fn op_cache_storage_open( state: Rc>, #[string] cache_name: String, -) -> Result +) -> Result where CA: Cache, { @@ -131,7 +149,7 @@ where pub async fn op_cache_storage_has( state: Rc>, #[string] cache_name: String, -) -> Result +) -> Result where CA: Cache, { @@ -143,7 +161,7 @@ where pub async fn op_cache_storage_delete( state: Rc>, #[string] cache_name: String, -) -> Result +) -> Result where CA: Cache, { @@ -155,13 +173,19 @@ where pub async fn op_cache_put( state: Rc>, #[serde] request_response: CachePutRequest, -) -> Result<(), AnyError> +) -> Result<(), CacheError> where CA: Cache, { let cache = get_cache::(&state)?; let resource = match request_response.response_rid { - Some(rid) => Some(state.borrow_mut().resource_table.take_any(rid)?), + Some(rid) => Some( + state + .borrow_mut() + .resource_table + .take_any(rid) + .map_err(CacheError::Resource)?, + ), None => None, }; cache.put(request_response, resource).await @@ -172,7 +196,7 @@ where pub async fn op_cache_match( state: Rc>, #[serde] request: CacheMatchRequest, -) -> Result, AnyError> +) -> Result, CacheError> where CA: Cache, { @@ -191,7 +215,7 @@ where pub async fn op_cache_delete( state: Rc>, #[serde] request: CacheDeleteRequest, -) -> Result +) -> Result where CA: Cache, { @@ -199,7 +223,7 @@ where cache.delete(request).await } -pub fn get_cache(state: &Rc>) -> Result +pub fn get_cache(state: &Rc>) -> Result where CA: Cache, { @@ -211,7 +235,9 @@ where state.put(cache); Ok(state.borrow::().clone()) } else { - Err(type_error("CacheStorage is not available in this context")) + Err(CacheError::Other(type_error( + "CacheStorage is not available in this context", + ))) } } diff --git a/ext/cache/sqlite.rs b/ext/cache/sqlite.rs index c3c55dd5e9..e4991c32f1 100644 --- a/ext/cache/sqlite.rs +++ b/ext/cache/sqlite.rs @@ -30,6 +30,7 @@ use crate::serialize_headers; use crate::vary_header_matches; use crate::Cache; use crate::CacheDeleteRequest; +use crate::CacheError; use crate::CacheMatchRequest; use crate::CacheMatchResponseMeta; use crate::CachePutRequest; @@ -102,7 +103,7 @@ impl Cache for SqliteBackedCache { /// Open a cache storage. Internally, this creates a row in the /// sqlite db if the cache doesn't exist and returns the internal id /// of the cache. - async fn storage_open(&self, cache_name: String) -> Result { + async fn storage_open(&self, cache_name: String) -> Result { let db = self.connection.clone(); let cache_storage_dir = self.cache_storage_dir.clone(); spawn_blocking(move || { @@ -121,14 +122,14 @@ impl Cache for SqliteBackedCache { )?; let responses_dir = get_responses_dir(cache_storage_dir, cache_id); std::fs::create_dir_all(responses_dir)?; - Ok::(cache_id) + Ok::(cache_id) }) .await? } /// Check if a cache with the provided name exists. /// Note: this doesn't check the disk, it only checks the sqlite db. - async fn storage_has(&self, cache_name: String) -> Result { + async fn storage_has(&self, cache_name: String) -> Result { let db = self.connection.clone(); spawn_blocking(move || { let db = db.lock(); @@ -140,13 +141,16 @@ impl Cache for SqliteBackedCache { Ok(count > 0) }, )?; - Ok::(cache_exists) + Ok::(cache_exists) }) .await? } /// Delete a cache storage. Internally, this deletes the row in the sqlite db. - async fn storage_delete(&self, cache_name: String) -> Result { + async fn storage_delete( + &self, + cache_name: String, + ) -> Result { let db = self.connection.clone(); let cache_storage_dir = self.cache_storage_dir.clone(); spawn_blocking(move || { @@ -167,7 +171,7 @@ impl Cache for SqliteBackedCache { std::fs::remove_dir_all(cache_dir)?; } } - Ok::(maybe_cache_id.is_some()) + Ok::(maybe_cache_id.is_some()) }) .await? } @@ -176,10 +180,12 @@ impl Cache for SqliteBackedCache { &self, request_response: CachePutRequest, resource: Option>, - ) -> Result<(), AnyError> { + ) -> Result<(), CacheError> { let db = self.connection.clone(); let cache_storage_dir = self.cache_storage_dir.clone(); - let now = SystemTime::now().duration_since(UNIX_EPOCH)?; + let now = SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("SystemTime is before unix epoch"); if let Some(resource) = resource { let body_key = hash(&format!( @@ -193,7 +199,11 @@ impl Cache for SqliteBackedCache { let mut file = tokio::fs::File::create(response_path).await?; let mut buf = BufMutView::new(64 * 1024); loop { - let (size, buf2) = resource.clone().read_byob(buf).await?; + let (size, buf2) = resource + .clone() + .read_byob(buf) + .await + .map_err(CacheError::Other)?; if size == 0 { break; } @@ -224,7 +234,7 @@ impl Cache for SqliteBackedCache { request: CacheMatchRequest, ) -> Result< Option<(CacheMatchResponseMeta, Option)>, - AnyError, + CacheError, > { let db = self.connection.clone(); let cache_storage_dir = self.cache_storage_dir.clone(); @@ -290,19 +300,17 @@ impl Cache for SqliteBackedCache { } Err(err) => return Err(err.into()), }; - return Ok(Some((cache_meta, Some(CacheResponseResource::new(file))))); + Ok(Some((cache_meta, Some(CacheResponseResource::new(file))))) } - Some((cache_meta, None)) => { - return Ok(Some((cache_meta, None))); - } - None => return Ok(None), + Some((cache_meta, None)) => Ok(Some((cache_meta, None))), + None => Ok(None), } } async fn delete( &self, request: CacheDeleteRequest, - ) -> Result { + ) -> Result { let db = self.connection.clone(); spawn_blocking(move || { // TODO(@satyarohith): remove the response body from disk if one exists @@ -311,17 +319,17 @@ impl Cache for SqliteBackedCache { "DELETE FROM request_response_list WHERE cache_id = ?1 AND request_url = ?2", (request.cache_id, &request.request_url), )?; - Ok::(rows_effected > 0) + Ok::(rows_effected > 0) }) .await? } } async fn insert_cache_asset( - db: Arc>, + db: Arc>, put: CachePutRequest, response_body_key: Option, -) -> Result, deno_core::anyhow::Error> { +) -> Result, CacheError> { spawn_blocking(move || { let maybe_response_body = { let db = db.lock(); @@ -339,7 +347,7 @@ async fn insert_cache_asset( response_body_key, put.response_status, put.response_status_text, - SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs(), + SystemTime::now().duration_since(UNIX_EPOCH).expect("SystemTime is before unix epoch").as_secs(), ), |row| { let response_body_key: Option = row.get(0)?; @@ -347,7 +355,7 @@ async fn insert_cache_asset( }, )? }; - Ok::, AnyError>(maybe_response_body) + Ok::, CacheError>(maybe_response_body) }).await? } diff --git a/runtime/errors.rs b/runtime/errors.rs index 476aae63b9..59928965b0 100644 --- a/runtime/errors.rs +++ b/runtime/errors.rs @@ -10,6 +10,7 @@ //! exceptions. use deno_broadcast_channel::BroadcastChannelError; +use deno_cache::CacheError; use deno_core::error::AnyError; use deno_core::serde_json; use deno_core::url; @@ -154,6 +155,18 @@ pub fn get_nix_error_class(error: &nix::Error) -> &'static str { } } +pub fn get_cache_error(error: &CacheError) -> &'static str { + match error { + CacheError::Sqlite(_) => "Error", + CacheError::JoinError(_) => "Error", + CacheError::Resource(err) => { + deno_core::error::get_custom_error_class(err).unwrap_or("Error") + } + CacheError::Other(e) => get_error_class_name(e).unwrap_or("Error"), + CacheError::Io(err) => get_io_error_class(err), + } +} + fn get_broadcast_channel_error(error: &BroadcastChannelError) -> &'static str { match error { BroadcastChannelError::Resource(err) => { @@ -173,6 +186,7 @@ pub fn get_error_class_name(e: &AnyError) -> Option<&'static str> { .or_else(|| deno_web::get_error_class_name(e)) .or_else(|| deno_webstorage::get_not_supported_error_class_name(e)) .or_else(|| deno_websocket::get_network_error_class_name(e)) + .or_else(|| e.downcast_ref::().map(get_cache_error)) .or_else(|| { e.downcast_ref::() .map(get_broadcast_channel_error) From 8b2c6fc2d22f0a62ceefa71ff739f81796142699 Mon Sep 17 00:00:00 2001 From: Leo Kettmeir Date: Sat, 12 Oct 2024 10:00:35 -0700 Subject: [PATCH 03/52] refactor(ext/canvas): use concrete error type (#26111) --- Cargo.lock | 1 + ext/canvas/Cargo.toml | 1 + ext/canvas/lib.rs | 23 ++++++++++++++--------- runtime/errors.rs | 10 ++++++++++ 4 files changed, 26 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 84a57f0362..2500b5a2cd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1386,6 +1386,7 @@ dependencies = [ "deno_webgpu", "image", "serde", + "thiserror", ] [[package]] diff --git a/ext/canvas/Cargo.toml b/ext/canvas/Cargo.toml index 47c37560ec..78c674348b 100644 --- a/ext/canvas/Cargo.toml +++ b/ext/canvas/Cargo.toml @@ -18,3 +18,4 @@ deno_core.workspace = true deno_webgpu.workspace = true image = { version = "0.24.7", default-features = false, features = ["png"] } serde = { workspace = true, features = ["derive"] } +thiserror.workspace = true diff --git a/ext/canvas/lib.rs b/ext/canvas/lib.rs index 72173f1331..defb288ac9 100644 --- a/ext/canvas/lib.rs +++ b/ext/canvas/lib.rs @@ -1,7 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::ToJsBuffer; use image::imageops::FilterType; @@ -13,6 +11,14 @@ use serde::Deserialize; use serde::Serialize; use std::path::PathBuf; +#[derive(Debug, thiserror::Error)] +pub enum CanvasError { + #[error("Color type '{0:?}' not supported")] + UnsupportedColorType(ColorType), + #[error(transparent)] + Image(#[from] image::ImageError), +} + #[derive(Debug, Deserialize)] #[serde(rename_all = "snake_case")] enum ImageResizeQuality { @@ -43,7 +49,7 @@ struct ImageProcessArgs { fn op_image_process( #[buffer] buf: &[u8], #[serde] args: ImageProcessArgs, -) -> Result { +) -> ToJsBuffer { let view = RgbaImage::from_vec(args.width, args.height, buf.to_vec()).unwrap(); @@ -105,7 +111,7 @@ fn op_image_process( } } - Ok(image_out.to_vec().into()) + image_out.to_vec().into() } #[derive(Debug, Serialize)] @@ -117,17 +123,16 @@ struct DecodedPng { #[op2] #[serde] -fn op_image_decode_png(#[buffer] buf: &[u8]) -> Result { +fn op_image_decode_png( + #[buffer] buf: &[u8], +) -> Result { let png = image::codecs::png::PngDecoder::new(buf)?; let (width, height) = png.dimensions(); // TODO(@crowlKats): maybe use DynamicImage https://docs.rs/image/0.24.7/image/enum.DynamicImage.html ? if png.color_type() != ColorType::Rgba8 { - return Err(type_error(format!( - "Color type '{:?}' not supported", - png.color_type() - ))); + return Err(CanvasError::UnsupportedColorType(png.color_type())); } // read_image will assert that the buffer is the correct size, so we need to fill it with zeros diff --git a/runtime/errors.rs b/runtime/errors.rs index 59928965b0..daec653d1e 100644 --- a/runtime/errors.rs +++ b/runtime/errors.rs @@ -11,6 +11,7 @@ use deno_broadcast_channel::BroadcastChannelError; use deno_cache::CacheError; +use deno_canvas::CanvasError; use deno_core::error::AnyError; use deno_core::serde_json; use deno_core::url; @@ -155,6 +156,13 @@ pub fn get_nix_error_class(error: &nix::Error) -> &'static str { } } +fn get_canvas_error(e: &CanvasError) -> &'static str { + match e { + CanvasError::UnsupportedColorType(_) => "TypeError", + CanvasError::Image(_) => "Error", + } +} + pub fn get_cache_error(error: &CacheError) -> &'static str { match error { CacheError::Sqlite(_) => "Error", @@ -186,6 +194,8 @@ pub fn get_error_class_name(e: &AnyError) -> Option<&'static str> { .or_else(|| deno_web::get_error_class_name(e)) .or_else(|| deno_webstorage::get_not_supported_error_class_name(e)) .or_else(|| deno_websocket::get_network_error_class_name(e)) + .or_else(|| deno_websocket::get_network_error_class_name(e)) + .or_else(|| e.downcast_ref::().map(get_canvas_error)) .or_else(|| e.downcast_ref::().map(get_cache_error)) .or_else(|| { e.downcast_ref::() From 7a990d9d42b633ca70eca235df0f9ba09f876720 Mon Sep 17 00:00:00 2001 From: Nathan Whitaker <17734409+nathanwhit@users.noreply.github.com> Date: Sat, 12 Oct 2024 12:14:32 -0700 Subject: [PATCH 04/52] feat(npm): support `--allow-scripts` on `deno run` (and `deno add`, `deno test`, etc) (#26075) Fixes https://github.com/denoland/deno/issues/25533. Fixes https://github.com/denoland/deno/issues/25396. Previously we only supported it on `deno install` and `deno cache`, which is annoying if you're using `nodeModulesDir: auto`. Also changes from printing output of lifecycle scripts directly to capturing the output and only printing it on error. --- cli/args/flags.rs | 60 ++++++++---- .../resolvers/common/lifecycle_scripts.rs | 37 +++++++- cli/npm/managed/resolvers/local.rs | 1 + cli/task_runner.rs | 93 +++++++++++++++++-- cli/tools/task.rs | 26 +++--- .../better-say-hello/1.0.0/say-hello.js | 2 +- .../node-lifecycle-scripts/1.0.0/index.js | 4 +- .../node-lifecycle-scripts/1.0.0/package.json | 1 + .../@denotest/say-hello/1.0.0/say-hello.js | 5 +- .../install/install_entrypoint/lifecycle.out | 17 +--- .../npm/lifecycle_scripts/__test__.jsonc | 34 +++++-- .../npm/lifecycle_scripts/all_lifecycles.out | 17 +--- .../all_lifecycles_deno_run.out | 4 + .../all_lifecycles_not_run.out | 1 + .../npm/lifecycle_scripts/conflicting_bin.out | 3 +- .../lifecycle_scripts/conflicting_bin2.out | 1 + .../future_install_all_lifecycles.out | 18 +--- .../npm/lifecycle_scripts/no_deno_json.out | 5 +- .../lifecycle_scripts/node_gyp_not_found.out | 5 +- .../lifecycle_scripts/only_warns_first1.out | 3 +- .../lifecycle_scripts/only_warns_first2.out | 4 +- 21 files changed, 235 insertions(+), 106 deletions(-) create mode 100644 tests/specs/npm/lifecycle_scripts/all_lifecycles_deno_run.out create mode 100644 tests/specs/npm/lifecycle_scripts/conflicting_bin2.out diff --git a/cli/args/flags.rs b/cli/args/flags.rs index 258712ca92..d59e5ac1ac 100644 --- a/cli/args/flags.rs +++ b/cli/args/flags.rs @@ -1342,7 +1342,7 @@ pub fn flags_from_vec(args: Vec) -> clap::error::Result { } match subcommand.as_str() { - "add" => add_parse(&mut flags, &mut m), + "add" => add_parse(&mut flags, &mut m)?, "remove" => remove_parse(&mut flags, &mut m), "bench" => bench_parse(&mut flags, &mut m)?, "bundle" => bundle_parse(&mut flags, &mut m), @@ -1675,6 +1675,7 @@ You can add multiple dependencies at once: .action(ArgAction::Append), ) .arg(add_dev_arg()) + .arg(allow_scripts_arg()) }) } @@ -1717,7 +1718,7 @@ If you specify a directory instead of a file, the path is expanded to all contai UnstableArgsConfig::ResolutionAndRuntime, ) .defer(|cmd| { - runtime_args(cmd, true, false) + runtime_args(cmd, true, false, true) .arg(check_arg(true)) .arg( Arg::new("json") @@ -1881,7 +1882,7 @@ On the first invocation with deno will download the proper binary and cache it i UnstableArgsConfig::ResolutionAndRuntime, ) .defer(|cmd| { - runtime_args(cmd, true, false) + runtime_args(cmd, true, false, true) .arg(check_arg(true)) .arg( Arg::new("include") @@ -2202,7 +2203,7 @@ This command has implicit access to all permissions. UnstableArgsConfig::ResolutionAndRuntime, ) .defer(|cmd| { - runtime_args(cmd, false, true) + runtime_args(cmd, false, true, true) .arg(check_arg(false)) .arg(executable_ext_arg()) .arg( @@ -2501,7 +2502,7 @@ The installation root is determined, in order of precedence: These must be added to the path manually if required."), UnstableArgsConfig::ResolutionAndRuntime) .visible_alias("i") .defer(|cmd| { - permission_args(runtime_args(cmd, false, true), Some("global")) + permission_args(runtime_args(cmd, false, true, false), Some("global")) .arg(check_arg(true)) .arg(allow_scripts_arg()) .arg( @@ -2767,7 +2768,7 @@ It is especially useful for quick prototyping and checking snippets of code. TypeScript is supported, however it is not type-checked, only transpiled." ), UnstableArgsConfig::ResolutionAndRuntime) - .defer(|cmd| runtime_args(cmd, true, true) + .defer(|cmd| runtime_args(cmd, true, true, true) .arg(check_arg(false)) .arg( Arg::new("eval-file") @@ -2799,7 +2800,7 @@ TypeScript is supported, however it is not type-checked, only transpiled." } fn run_args(command: Command, top_level: bool) -> Command { - runtime_args(command, true, true) + runtime_args(command, true, true, true) .arg(check_arg(false)) .arg(watch_arg(true)) .arg(hmr_arg(true)) @@ -2855,7 +2856,7 @@ Start a server defined in server.ts: Start a server defined in server.ts, watching for changes and running on port 5050: deno serve --watch --port 5050 server.ts -Read more: https://docs.deno.com/go/serve"), UnstableArgsConfig::ResolutionAndRuntime), true, true) +Read more: https://docs.deno.com/go/serve"), UnstableArgsConfig::ResolutionAndRuntime), true, true, true) .arg( Arg::new("port") .long("port") @@ -2929,7 +2930,7 @@ or **/__tests__/**: UnstableArgsConfig::ResolutionAndRuntime ) .defer(|cmd| - runtime_args(cmd, true, true) + runtime_args(cmd, true, true, true) .arg(check_arg(true)) .arg( Arg::new("ignore") @@ -3642,6 +3643,7 @@ fn runtime_args( app: Command, include_perms: bool, include_inspector: bool, + include_allow_scripts: bool, ) -> Command { let app = compile_args(app); let app = if include_perms { @@ -3654,6 +3656,11 @@ fn runtime_args( } else { app }; + let app = if include_allow_scripts { + app.arg(allow_scripts_arg()) + } else { + app + }; app .arg(frozen_lockfile_arg()) .arg(cached_only_arg()) @@ -4235,8 +4242,13 @@ fn allow_scripts_arg_parse( Ok(()) } -fn add_parse(flags: &mut Flags, matches: &mut ArgMatches) { +fn add_parse( + flags: &mut Flags, + matches: &mut ArgMatches, +) -> clap::error::Result<()> { + allow_scripts_arg_parse(flags, matches)?; flags.subcommand = DenoSubcommand::Add(add_parse_inner(matches, None)); + Ok(()) } fn add_parse_inner( @@ -4262,7 +4274,7 @@ fn bench_parse( ) -> clap::error::Result<()> { flags.type_check_mode = TypeCheckMode::Local; - runtime_args_parse(flags, matches, true, false)?; + runtime_args_parse(flags, matches, true, false, true)?; ext_arg_parse(flags, matches); // NOTE: `deno bench` always uses `--no-prompt`, tests shouldn't ever do @@ -4352,7 +4364,7 @@ fn compile_parse( matches: &mut ArgMatches, ) -> clap::error::Result<()> { flags.type_check_mode = TypeCheckMode::Local; - runtime_args_parse(flags, matches, true, false)?; + runtime_args_parse(flags, matches, true, false, true)?; let mut script = matches.remove_many::("script_arg").unwrap(); let source_file = script.next().unwrap(); @@ -4527,7 +4539,7 @@ fn eval_parse( flags: &mut Flags, matches: &mut ArgMatches, ) -> clap::error::Result<()> { - runtime_args_parse(flags, matches, false, true)?; + runtime_args_parse(flags, matches, false, true, false)?; unstable_args_parse(flags, matches, UnstableArgsConfig::ResolutionAndRuntime); flags.allow_all(); @@ -4620,7 +4632,7 @@ fn install_parse( flags: &mut Flags, matches: &mut ArgMatches, ) -> clap::error::Result<()> { - runtime_args_parse(flags, matches, true, true)?; + runtime_args_parse(flags, matches, true, true, false)?; let global = matches.get_flag("global"); if global { @@ -4846,7 +4858,7 @@ fn repl_parse( flags: &mut Flags, matches: &mut ArgMatches, ) -> clap::error::Result<()> { - runtime_args_parse(flags, matches, true, true)?; + runtime_args_parse(flags, matches, true, true, true)?; unsafely_ignore_certificate_errors_parse(flags, matches); let eval_files = matches @@ -4879,7 +4891,7 @@ fn run_parse( mut app: Command, bare: bool, ) -> clap::error::Result<()> { - runtime_args_parse(flags, matches, true, true)?; + runtime_args_parse(flags, matches, true, true, true)?; ext_arg_parse(flags, matches); flags.code_cache_enabled = !matches.get_flag("no-code-cache"); @@ -4920,7 +4932,7 @@ fn serve_parse( let worker_count = parallel_arg_parse(matches).map(|v| v.get()); - runtime_args_parse(flags, matches, true, true)?; + runtime_args_parse(flags, matches, true, true, true)?; // If the user didn't pass --allow-net, add this port to the network // allowlist. If the host is 0.0.0.0, we add :{port} and allow the same network perms // as if it was passed to --allow-net directly. @@ -5015,7 +5027,7 @@ fn test_parse( matches: &mut ArgMatches, ) -> clap::error::Result<()> { flags.type_check_mode = TypeCheckMode::Local; - runtime_args_parse(flags, matches, true, true)?; + runtime_args_parse(flags, matches, true, true, true)?; ext_arg_parse(flags, matches); // NOTE: `deno test` always uses `--no-prompt`, tests shouldn't ever do @@ -5380,6 +5392,7 @@ fn runtime_args_parse( matches: &mut ArgMatches, include_perms: bool, include_inspector: bool, + include_allow_scripts: bool, ) -> clap::error::Result<()> { unstable_args_parse(flags, matches, UnstableArgsConfig::ResolutionAndRuntime); compile_args_parse(flags, matches)?; @@ -5391,6 +5404,9 @@ fn runtime_args_parse( if include_inspector { inspect_arg_parse(flags, matches); } + if include_allow_scripts { + allow_scripts_arg_parse(flags, matches)?; + } location_arg_parse(flags, matches); v8_flags_arg_parse(flags, matches); seed_arg_parse(flags, matches); @@ -8862,8 +8878,12 @@ mod tests { #[test] fn test_no_colon_in_value_name() { - let app = - runtime_args(Command::new("test_inspect_completion_value"), true, true); + let app = runtime_args( + Command::new("test_inspect_completion_value"), + true, + true, + false, + ); let inspect_args = app .get_arguments() .filter(|arg| arg.get_id() == "inspect") diff --git a/cli/npm/managed/resolvers/common/lifecycle_scripts.rs b/cli/npm/managed/resolvers/common/lifecycle_scripts.rs index b358c35856..5735f52482 100644 --- a/cli/npm/managed/resolvers/common/lifecycle_scripts.rs +++ b/cli/npm/managed/resolvers/common/lifecycle_scripts.rs @@ -2,6 +2,8 @@ use super::bin_entries::BinEntries; use crate::args::LifecycleScriptsConfig; +use crate::task_runner::TaskStdio; +use crate::util::progress_bar::ProgressBar; use deno_core::anyhow::Context; use deno_npm::resolution::NpmResolutionSnapshot; use deno_runtime::deno_io::FromRawIoHandle; @@ -148,6 +150,7 @@ impl<'a> LifecycleScripts<'a> { snapshot: &NpmResolutionSnapshot, packages: &[NpmResolutionPackage], root_node_modules_dir_path: Option<&Path>, + progress_bar: &ProgressBar, ) -> Result<(), AnyError> { self.warn_not_run_scripts()?; let get_package_path = @@ -201,7 +204,15 @@ impl<'a> LifecycleScripts<'a> { { continue; } - let exit_code = crate::task_runner::run_task( + let _guard = progress_bar.update_with_prompt( + crate::util::progress_bar::ProgressMessagePrompt::Initialize, + &format!("{}: running '{script_name}' script", package.id.nv), + ); + let crate::task_runner::TaskResult { + exit_code, + stderr, + stdout, + } = crate::task_runner::run_task( crate::task_runner::RunTaskOptions { task_name: script_name, script, @@ -211,15 +222,37 @@ impl<'a> LifecycleScripts<'a> { init_cwd, argv: &[], root_node_modules_dir: root_node_modules_dir_path, + stdio: Some(crate::task_runner::TaskIo { + stderr: TaskStdio::piped(), + stdout: TaskStdio::piped(), + }), }, ) .await?; + let stdout = stdout.unwrap(); + let stderr = stderr.unwrap(); if exit_code != 0 { log::warn!( - "error: script '{}' in '{}' failed with exit code {}", + "error: script '{}' in '{}' failed with exit code {}{}{}", script_name, package.id.nv, exit_code, + if !stdout.trim_ascii().is_empty() { + format!( + "\nstdout:\n{}\n", + String::from_utf8_lossy(&stdout).trim() + ) + } else { + String::new() + }, + if !stderr.trim_ascii().is_empty() { + format!( + "\nstderr:\n{}\n", + String::from_utf8_lossy(&stderr).trim() + ) + } else { + String::new() + }, ); failed_packages.push(&package.id.nv); // assume if earlier script fails, later ones will fail too diff --git a/cli/npm/managed/resolvers/local.rs b/cli/npm/managed/resolvers/local.rs index 63a972a431..54f7576ade 100644 --- a/cli/npm/managed/resolvers/local.rs +++ b/cli/npm/managed/resolvers/local.rs @@ -713,6 +713,7 @@ async fn sync_resolution_with_fs( snapshot, &package_partitions.packages, Some(root_node_modules_dir_path), + progress_bar, ) .await?; diff --git a/cli/task_runner.rs b/cli/task_runner.rs index ab7163bc93..418043b23f 100644 --- a/cli/task_runner.rs +++ b/cli/task_runner.rs @@ -16,8 +16,11 @@ use deno_task_shell::ExecutableCommand; use deno_task_shell::ExecuteResult; use deno_task_shell::ShellCommand; use deno_task_shell::ShellCommandContext; +use deno_task_shell::ShellPipeReader; +use deno_task_shell::ShellPipeWriter; use lazy_regex::Lazy; use regex::Regex; +use tokio::task::JoinHandle; use tokio::task::LocalSet; use crate::npm::CliNpmResolver; @@ -36,6 +39,35 @@ pub fn get_script_with_args(script: &str, argv: &[String]) -> String { script.trim().to_owned() } +pub struct TaskStdio(Option, ShellPipeWriter); + +impl TaskStdio { + pub fn stdout() -> Self { + Self(None, ShellPipeWriter::stdout()) + } + pub fn stderr() -> Self { + Self(None, ShellPipeWriter::stderr()) + } + pub fn piped() -> Self { + let (r, w) = deno_task_shell::pipe(); + Self(Some(r), w) + } +} + +pub struct TaskIo { + pub stdout: TaskStdio, + pub stderr: TaskStdio, +} + +impl Default for TaskIo { + fn default() -> Self { + Self { + stderr: TaskStdio::stderr(), + stdout: TaskStdio::stdout(), + } + } +} + pub struct RunTaskOptions<'a> { pub task_name: &'a str, pub script: &'a str, @@ -45,24 +77,69 @@ pub struct RunTaskOptions<'a> { pub argv: &'a [String], pub custom_commands: HashMap>, pub root_node_modules_dir: Option<&'a Path>, + pub stdio: Option, } pub type TaskCustomCommands = HashMap>; -pub async fn run_task(opts: RunTaskOptions<'_>) -> Result { +pub struct TaskResult { + pub exit_code: i32, + pub stdout: Option>, + pub stderr: Option>, +} + +pub async fn run_task( + opts: RunTaskOptions<'_>, +) -> Result { let script = get_script_with_args(opts.script, opts.argv); let seq_list = deno_task_shell::parser::parse(&script) .with_context(|| format!("Error parsing script '{}'.", opts.task_name))?; let env_vars = prepare_env_vars(opts.env_vars, opts.init_cwd, opts.root_node_modules_dir); + let state = + deno_task_shell::ShellState::new(env_vars, opts.cwd, opts.custom_commands); + let stdio = opts.stdio.unwrap_or_default(); + let ( + TaskStdio(stdout_read, stdout_write), + TaskStdio(stderr_read, stderr_write), + ) = (stdio.stdout, stdio.stderr); + + fn read(reader: ShellPipeReader) -> JoinHandle, AnyError>> { + tokio::task::spawn_blocking(move || { + let mut buf = Vec::new(); + reader.pipe_to(&mut buf)?; + Ok(buf) + }) + } + + let stdout = stdout_read.map(read); + let stderr = stderr_read.map(read); + let local = LocalSet::new(); - let future = deno_task_shell::execute( - seq_list, - env_vars, - opts.cwd, - opts.custom_commands, - ); - Ok(local.run_until(future).await) + let future = async move { + let exit_code = deno_task_shell::execute_with_pipes( + seq_list, + state, + ShellPipeReader::stdin(), + stdout_write, + stderr_write, + ) + .await; + Ok::<_, AnyError>(TaskResult { + exit_code, + stdout: if let Some(stdout) = stdout { + Some(stdout.await??) + } else { + None + }, + stderr: if let Some(stderr) = stderr { + Some(stderr.await??) + } else { + None + }, + }) + }; + local.run_until(future).await } fn prepare_env_vars( diff --git a/cli/tools/task.rs b/cli/tools/task.rs index 464b65d984..502b09d2c2 100644 --- a/cli/tools/task.rs +++ b/cli/tools/task.rs @@ -182,17 +182,21 @@ async fn run_task(opts: RunTaskOptions<'_>) -> Result { &task_runner::get_script_with_args(script, cli_options.argv()), ); - task_runner::run_task(task_runner::RunTaskOptions { - task_name, - script, - cwd, - env_vars, - custom_commands, - init_cwd: opts.cli_options.initial_cwd(), - argv: cli_options.argv(), - root_node_modules_dir: npm_resolver.root_node_modules_path(), - }) - .await + Ok( + task_runner::run_task(task_runner::RunTaskOptions { + task_name, + script, + cwd, + env_vars, + custom_commands, + init_cwd: opts.cli_options.initial_cwd(), + argv: cli_options.argv(), + root_node_modules_dir: npm_resolver.root_node_modules_path(), + stdio: None, + }) + .await? + .exit_code, + ) } fn output_task(task_name: &str, script: &str) { diff --git a/tests/registry/npm/@denotest/better-say-hello/1.0.0/say-hello.js b/tests/registry/npm/@denotest/better-say-hello/1.0.0/say-hello.js index 0b8d63cf48..5243d49026 100644 --- a/tests/registry/npm/@denotest/better-say-hello/1.0.0/say-hello.js +++ b/tests/registry/npm/@denotest/better-say-hello/1.0.0/say-hello.js @@ -1,2 +1,2 @@ import { sayBetterHello } from "./index.js"; -sayBetterHello(); \ No newline at end of file +sayBetterHello(); diff --git a/tests/registry/npm/@denotest/node-lifecycle-scripts/1.0.0/index.js b/tests/registry/npm/@denotest/node-lifecycle-scripts/1.0.0/index.js index 4eb9b107a0..242fe92f57 100644 --- a/tests/registry/npm/@denotest/node-lifecycle-scripts/1.0.0/index.js +++ b/tests/registry/npm/@denotest/node-lifecycle-scripts/1.0.0/index.js @@ -1,5 +1,3 @@ -modules.export = { +module.exports = { value: 42 }; - -console.log('index.js', modules.export.value); \ No newline at end of file diff --git a/tests/registry/npm/@denotest/node-lifecycle-scripts/1.0.0/package.json b/tests/registry/npm/@denotest/node-lifecycle-scripts/1.0.0/package.json index 3c6fa005fe..085ab6414a 100644 --- a/tests/registry/npm/@denotest/node-lifecycle-scripts/1.0.0/package.json +++ b/tests/registry/npm/@denotest/node-lifecycle-scripts/1.0.0/package.json @@ -6,6 +6,7 @@ "install": "echo install && cli-esm 'hello from install script'", "postinstall": "echo postinstall && npx cowsay postinstall" }, + "exports": "./index.js", "dependencies": { "@denotest/bin": "1.0.0" } diff --git a/tests/registry/npm/@denotest/say-hello/1.0.0/say-hello.js b/tests/registry/npm/@denotest/say-hello/1.0.0/say-hello.js index 8751b8e47b..66d02cde56 100644 --- a/tests/registry/npm/@denotest/say-hello/1.0.0/say-hello.js +++ b/tests/registry/npm/@denotest/say-hello/1.0.0/say-hello.js @@ -1,2 +1,5 @@ import { sayHello } from "./index.js"; -console.log(sayHello()); \ No newline at end of file +console.log(sayHello()); +import path from "node:path"; +import fs from "node:fs"; +fs.writeSync(fs.openSync(path.join(process.env.INIT_CWD, "say-hello-output.txt"), "w"), sayHello()); diff --git a/tests/specs/install/install_entrypoint/lifecycle.out b/tests/specs/install/install_entrypoint/lifecycle.out index 8eae8ee120..2fd2798754 100644 --- a/tests/specs/install/install_entrypoint/lifecycle.out +++ b/tests/specs/install/install_entrypoint/lifecycle.out @@ -5,18 +5,7 @@ Download http://localhost:4260/@denotest/node-lifecycle-scripts/1.0.0.tgz Download http://localhost:4260/@denotest/bin/1.0.0.tgz Initialize @denotest/node-lifecycle-scripts@1.0.0 Initialize @denotest/bin@1.0.0 +Initialize @denotest/node-lifecycle-scripts@1.0.0: running 'preinstall' script +Initialize @denotest/node-lifecycle-scripts@1.0.0: running 'install' script +Initialize @denotest/node-lifecycle-scripts@1.0.0: running 'postinstall' script [UNORDERED_END] -preinstall -deno preinstall.js -node preinstall.js -install -hello from install script -postinstall[WILDCARD] - _____________ -< postinstall > - ------------- - \ ^__^ - \ (oo)\_______ - (__)\ )\/\ - ||----w | - || || diff --git a/tests/specs/npm/lifecycle_scripts/__test__.jsonc b/tests/specs/npm/lifecycle_scripts/__test__.jsonc index b798b968ff..589c131dda 100644 --- a/tests/specs/npm/lifecycle_scripts/__test__.jsonc +++ b/tests/specs/npm/lifecycle_scripts/__test__.jsonc @@ -5,7 +5,7 @@ "steps": [ { "args": "cache --allow-scripts=npm:@denotest/node-addon main.js", - "output": "[WILDCARD]gyp info ok \n" + "output": "[WILDCARD]Initialize @denotest/node-addon@1.0.0: running 'install' script\n" }, { "args": "run -A main.js", @@ -38,7 +38,7 @@ "steps": [ { // without running scripts (should warn) - "args": "cache all_lifecycles.js", + "args": "run all_lifecycles.js", "output": "all_lifecycles_not_run.out" }, { @@ -51,6 +51,23 @@ } ] }, + "deno_run_lifecycle_scripts": { + "steps": [ + { + // without running scripts (should warn) + "args": "run all_lifecycles.js", + "output": "all_lifecycles_not_run.out" + }, + { + // now run scripts + "args": "run --allow-scripts all_lifecycles.js", + // this test package covers running preinstall, install, and postinstall scripts + // it also exercises running bin packages (esbuild in this case), using `node` in a script + // (with and without node-only CLI flags), and using `npx` in a script + "output": "all_lifecycles_deno_run.out" + } + ] + }, "global_lifecycle_scripts": { "steps": [ { @@ -79,14 +96,12 @@ { // without running scripts (should warn) "args": "run all_lifecycles.js", - "output": "only_warns_first1.out", - "exitCode": 1 + "output": "only_warns_first1.out" }, { // without running scripts (does not warn) "args": "run all_lifecycles.js", - "output": "only_warns_first2.out", - "exitCode": 1 + "output": "only_warns_first2.out" }, { // should warn because this is an explicit install @@ -128,6 +143,13 @@ // we run the install script, we should use the correct binary (relative to the package) "args": "cache --allow-scripts conflicting_bin.js", "output": "conflicting_bin.out" + }, + { + "args": [ + "eval", + "console.log(Deno.readTextFileSync('./say-hello-output.txt'))" + ], + "output": "conflicting_bin2.out" } ] }, diff --git a/tests/specs/npm/lifecycle_scripts/all_lifecycles.out b/tests/specs/npm/lifecycle_scripts/all_lifecycles.out index 956c006ddc..bdbb2b08ea 100644 --- a/tests/specs/npm/lifecycle_scripts/all_lifecycles.out +++ b/tests/specs/npm/lifecycle_scripts/all_lifecycles.out @@ -1,14 +1,3 @@ -preinstall -deno preinstall.js -node preinstall.js -install -hello from install script -postinstall[WILDCARD] - _____________ -< postinstall > - ------------- - \ ^__^ - \ (oo)\_______ - (__)\ )\/\ - ||----w | - || || +Initialize @denotest/node-lifecycle-scripts@1.0.0: running 'preinstall' script +Initialize @denotest/node-lifecycle-scripts@1.0.0: running 'install' script +Initialize @denotest/node-lifecycle-scripts@1.0.0: running 'postinstall' script diff --git a/tests/specs/npm/lifecycle_scripts/all_lifecycles_deno_run.out b/tests/specs/npm/lifecycle_scripts/all_lifecycles_deno_run.out new file mode 100644 index 0000000000..5f06f6214c --- /dev/null +++ b/tests/specs/npm/lifecycle_scripts/all_lifecycles_deno_run.out @@ -0,0 +1,4 @@ +Initialize @denotest/node-lifecycle-scripts@1.0.0: running 'preinstall' script +Initialize @denotest/node-lifecycle-scripts@1.0.0: running 'install' script +Initialize @denotest/node-lifecycle-scripts@1.0.0: running 'postinstall' script +value is 42 diff --git a/tests/specs/npm/lifecycle_scripts/all_lifecycles_not_run.out b/tests/specs/npm/lifecycle_scripts/all_lifecycles_not_run.out index 09324c8452..cdfdeabe24 100644 --- a/tests/specs/npm/lifecycle_scripts/all_lifecycles_not_run.out +++ b/tests/specs/npm/lifecycle_scripts/all_lifecycles_not_run.out @@ -12,3 +12,4 @@ Warning The following packages contained npm lifecycle scripts (preinstall/insta ┠─ This may cause the packages to not work correctly. ┖─ To run lifecycle scripts, use the `--allow-scripts` flag with `deno install`: deno install --allow-scripts=npm:@denotest/node-lifecycle-scripts@1.0.0 +value is 42 diff --git a/tests/specs/npm/lifecycle_scripts/conflicting_bin.out b/tests/specs/npm/lifecycle_scripts/conflicting_bin.out index c70aafdd53..bae5275dca 100644 --- a/tests/specs/npm/lifecycle_scripts/conflicting_bin.out +++ b/tests/specs/npm/lifecycle_scripts/conflicting_bin.out @@ -8,6 +8,5 @@ Download http://localhost:4260/@denotest/say-hello/1.0.0.tgz Initialize @denotest/better-say-hello@1.0.0 Initialize @denotest/say-hello-on-install@1.0.0 Initialize @denotest/say-hello@1.0.0 +Initialize @denotest/say-hello-on-install@1.0.0: running 'install' script [UNORDERED_END] -install script -@denotest/say-hello says hello! diff --git a/tests/specs/npm/lifecycle_scripts/conflicting_bin2.out b/tests/specs/npm/lifecycle_scripts/conflicting_bin2.out new file mode 100644 index 0000000000..b4640b02c5 --- /dev/null +++ b/tests/specs/npm/lifecycle_scripts/conflicting_bin2.out @@ -0,0 +1 @@ +@denotest/say-hello says hello! diff --git a/tests/specs/npm/lifecycle_scripts/future_install_all_lifecycles.out b/tests/specs/npm/lifecycle_scripts/future_install_all_lifecycles.out index f62079d3fe..bdbb2b08ea 100644 --- a/tests/specs/npm/lifecycle_scripts/future_install_all_lifecycles.out +++ b/tests/specs/npm/lifecycle_scripts/future_install_all_lifecycles.out @@ -1,15 +1,3 @@ -preinstall -deno preinstall.js -node preinstall.js -install -hello from install script -postinstall -[WILDCARD] - _____________ -< postinstall > - ------------- - \ ^__^ - \ (oo)\_______ - (__)\ )\/\ - ||----w | - || || +Initialize @denotest/node-lifecycle-scripts@1.0.0: running 'preinstall' script +Initialize @denotest/node-lifecycle-scripts@1.0.0: running 'install' script +Initialize @denotest/node-lifecycle-scripts@1.0.0: running 'postinstall' script diff --git a/tests/specs/npm/lifecycle_scripts/no_deno_json.out b/tests/specs/npm/lifecycle_scripts/no_deno_json.out index 38a4614494..8509d8f9f5 100644 --- a/tests/specs/npm/lifecycle_scripts/no_deno_json.out +++ b/tests/specs/npm/lifecycle_scripts/no_deno_json.out @@ -5,7 +5,6 @@ Download http://localhost:4260/@denotest/lifecycle-scripts-cjs/1.0.0.tgz Download http://localhost:4260/@denotest/bin/1.0.0.tgz Initialize @denotest/lifecycle-scripts-cjs@1.0.0 Initialize @denotest/bin@1.0.0 +Initialize @denotest/lifecycle-scripts-cjs@1.0.0: running 'preinstall' script +Initialize @denotest/lifecycle-scripts-cjs@1.0.0: running 'install' script [UNORDERED_END] -preinstall -install -hello from install script diff --git a/tests/specs/npm/lifecycle_scripts/node_gyp_not_found.out b/tests/specs/npm/lifecycle_scripts/node_gyp_not_found.out index 06c856cd94..81577e6ba0 100644 --- a/tests/specs/npm/lifecycle_scripts/node_gyp_not_found.out +++ b/tests/specs/npm/lifecycle_scripts/node_gyp_not_found.out @@ -2,8 +2,11 @@ Download http://localhost:4260/@denotest/node-addon-implicit-node-gyp Download http://localhost:4260/@denotest/node-addon-implicit-node-gyp/1.0.0.tgz Initialize @denotest/node-addon-implicit-node-gyp@1.0.0 +Initialize @denotest/node-addon-implicit-node-gyp@1.0.0: running 'install' script [UNORDERED_END] Warning node-gyp was used in a script, but was not listed as a dependency. Either add it as a dependency or install it globally (e.g. `npm install -g node-gyp`) -[WILDCARD] error: script 'install' in '@denotest/node-addon-implicit-node-gyp@1.0.0' failed with exit code 1 +stderr: +Error launching 'node-gyp': [WILDCARD] + error: failed to run scripts for packages: @denotest/node-addon-implicit-node-gyp@1.0.0 diff --git a/tests/specs/npm/lifecycle_scripts/only_warns_first1.out b/tests/specs/npm/lifecycle_scripts/only_warns_first1.out index 947777b5ba..cdfdeabe24 100644 --- a/tests/specs/npm/lifecycle_scripts/only_warns_first1.out +++ b/tests/specs/npm/lifecycle_scripts/only_warns_first1.out @@ -12,5 +12,4 @@ Warning The following packages contained npm lifecycle scripts (preinstall/insta ┠─ This may cause the packages to not work correctly. ┖─ To run lifecycle scripts, use the `--allow-scripts` flag with `deno install`: deno install --allow-scripts=npm:@denotest/node-lifecycle-scripts@1.0.0 -error: Uncaught SyntaxError: The requested module 'npm:@denotest/node-lifecycle-scripts' does not provide an export named 'value' -[WILDCARD] +value is 42 diff --git a/tests/specs/npm/lifecycle_scripts/only_warns_first2.out b/tests/specs/npm/lifecycle_scripts/only_warns_first2.out index f6a02c7279..9e78061598 100644 --- a/tests/specs/npm/lifecycle_scripts/only_warns_first2.out +++ b/tests/specs/npm/lifecycle_scripts/only_warns_first2.out @@ -1,3 +1 @@ -[# note no warning] -error: Uncaught SyntaxError: The requested module 'npm:@denotest/node-lifecycle-scripts' does not provide an export named 'value' -[WILDCARD] +value is 42 From 4f89225f76ca67e8a6385bbc52a73c284cc3402c Mon Sep 17 00:00:00 2001 From: Nathan Whitaker <17734409+nathanwhit@users.noreply.github.com> Date: Sat, 12 Oct 2024 12:36:23 -0700 Subject: [PATCH 05/52] fix(node/util): export `styleText` from `node:util` (#26194) Fixes #26184. It was added but not publicly exported. --- ext/node/polyfills/util.ts | 3 +++ tests/unit_node/util_test.ts | 5 +++++ 2 files changed, 8 insertions(+) diff --git a/ext/node/polyfills/util.ts b/ext/node/polyfills/util.ts index 586fae17e9..d82b288b03 100644 --- a/ext/node/polyfills/util.ts +++ b/ext/node/polyfills/util.ts @@ -39,6 +39,7 @@ import { formatWithOptions, inspect, stripVTControlCharacters, + styleText, } from "ext:deno_node/internal/util/inspect.mjs"; import { codes } from "ext:deno_node/internal/error_codes.ts"; import types from "node:util/types"; @@ -63,6 +64,7 @@ export { parseArgs, promisify, stripVTControlCharacters, + styleText, types, }; @@ -354,4 +356,5 @@ export default { debuglog, debug: debuglog, isDeepStrictEqual, + styleText, }; diff --git a/tests/unit_node/util_test.ts b/tests/unit_node/util_test.ts index edd5002623..a47c231a18 100644 --- a/tests/unit_node/util_test.ts +++ b/tests/unit_node/util_test.ts @@ -348,3 +348,8 @@ Deno.test("[util] aborted()", async () => { await promise; assertEquals(done, true); }); + +Deno.test("[util] styleText()", () => { + const redText = util.styleText("red", "error"); + assertEquals(redText, "\x1B[31merror\x1B[39m"); +}); From 2ac699fe6e5b69f656046c5a9657542dc1cf2e9d Mon Sep 17 00:00:00 2001 From: Leo Kettmeir Date: Sat, 12 Oct 2024 14:23:49 -0700 Subject: [PATCH 06/52] refactor(ext/cron): use concrete error type (#26135) --- Cargo.lock | 1 + ext/cron/Cargo.toml | 1 + ext/cron/interface.rs | 6 +++--- ext/cron/lib.rs | 42 +++++++++++++++++++++++++++++------------- ext/cron/local.rs | 31 ++++++++++++++++--------------- runtime/errors.rs | 19 ++++++++++++++++++- 6 files changed, 68 insertions(+), 32 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2500b5a2cd..d1341189f0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1467,6 +1467,7 @@ dependencies = [ "chrono", "deno_core", "saffron", + "thiserror", "tokio", ] diff --git a/ext/cron/Cargo.toml b/ext/cron/Cargo.toml index a773521f5b..10f09b57cf 100644 --- a/ext/cron/Cargo.toml +++ b/ext/cron/Cargo.toml @@ -19,4 +19,5 @@ async-trait.workspace = true chrono = { workspace = true, features = ["now"] } deno_core.workspace = true saffron.workspace = true +thiserror.workspace = true tokio.workspace = true diff --git a/ext/cron/interface.rs b/ext/cron/interface.rs index 01b1d17895..a19525cc4e 100644 --- a/ext/cron/interface.rs +++ b/ext/cron/interface.rs @@ -1,17 +1,17 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +use crate::CronError; use async_trait::async_trait; -use deno_core::error::AnyError; pub trait CronHandler { type EH: CronHandle + 'static; - fn create(&self, spec: CronSpec) -> Result; + fn create(&self, spec: CronSpec) -> Result; } #[async_trait(?Send)] pub trait CronHandle { - async fn next(&self, prev_success: bool) -> Result; + async fn next(&self, prev_success: bool) -> Result; fn close(&self); } diff --git a/ext/cron/lib.rs b/ext/cron/lib.rs index e350e4d698..feffb5e511 100644 --- a/ext/cron/lib.rs +++ b/ext/cron/lib.rs @@ -7,16 +7,13 @@ use std::borrow::Cow; use std::cell::RefCell; use std::rc::Rc; +pub use crate::interface::*; use deno_core::error::get_custom_error_class; -use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::OpState; use deno_core::Resource; use deno_core::ResourceId; -pub use crate::interface::*; - pub const UNSTABLE_FEATURE_NAME: &str = "cron"; deno_core::extension!(deno_cron, @@ -49,6 +46,28 @@ impl Resource for CronResource { } } +#[derive(Debug, thiserror::Error)] +pub enum CronError { + #[error(transparent)] + Resource(deno_core::error::AnyError), + #[error("Cron name cannot exceed 64 characters: current length {0}")] + NameExceeded(usize), + #[error("Invalid cron name: only alphanumeric characters, whitespace, hyphens, and underscores are allowed")] + NameInvalid, + #[error("Cron with this name already exists")] + AlreadyExists, + #[error("Too many crons")] + TooManyCrons, + #[error("Invalid cron schedule")] + InvalidCron, + #[error("Invalid backoff schedule")] + InvalidBackoff, + #[error(transparent)] + AcquireError(#[from] tokio::sync::AcquireError), + #[error(transparent)] + Other(deno_core::error::AnyError), +} + #[op2] #[smi] fn op_cron_create( @@ -56,7 +75,7 @@ fn op_cron_create( #[string] name: String, #[string] cron_schedule: String, #[serde] backoff_schedule: Option>, -) -> Result +) -> Result where C: CronHandler + 'static, { @@ -90,7 +109,7 @@ async fn op_cron_next( state: Rc>, #[smi] rid: ResourceId, prev_success: bool, -) -> Result +) -> Result where C: CronHandler + 'static, { @@ -102,7 +121,7 @@ where if get_custom_error_class(&err) == Some("BadResource") { return Ok(false); } else { - return Err(err); + return Err(CronError::Resource(err)); } } }; @@ -112,17 +131,14 @@ where cron_handler.next(prev_success).await } -fn validate_cron_name(name: &str) -> Result<(), AnyError> { +fn validate_cron_name(name: &str) -> Result<(), CronError> { if name.len() > 64 { - return Err(type_error(format!( - "Cron name cannot exceed 64 characters: current length {}", - name.len() - ))); + return Err(CronError::NameExceeded(name.len())); } if !name.chars().all(|c| { c.is_ascii_whitespace() || c.is_ascii_alphanumeric() || c == '_' || c == '-' }) { - return Err(type_error("Invalid cron name: only alphanumeric characters, whitespace, hyphens, and underscores are allowed")); + return Err(CronError::NameInvalid); } Ok(()) } diff --git a/ext/cron/local.rs b/ext/cron/local.rs index dd60e750a0..1110baadb8 100644 --- a/ext/cron/local.rs +++ b/ext/cron/local.rs @@ -10,8 +10,6 @@ use std::rc::Weak; use std::sync::Arc; use async_trait::async_trait; -use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::futures; use deno_core::futures::FutureExt; use deno_core::unsync::spawn; @@ -21,6 +19,7 @@ use tokio::sync::mpsc::WeakSender; use tokio::sync::OwnedSemaphorePermit; use tokio::sync::Semaphore; +use crate::CronError; use crate::CronHandle; use crate::CronHandler; use crate::CronSpec; @@ -81,7 +80,7 @@ impl LocalCronHandler { async fn cron_loop( runtime_state: Rc>, mut cron_schedule_rx: mpsc::Receiver<(String, bool)>, - ) -> Result<(), AnyError> { + ) -> Result<(), CronError> { loop { let earliest_deadline = runtime_state .borrow() @@ -154,7 +153,7 @@ impl LocalCronHandler { impl RuntimeState { fn get_ready_crons( &mut self, - ) -> Result)>, AnyError> { + ) -> Result)>, CronError> { let now = chrono::Utc::now().timestamp_millis() as u64; let ready = { @@ -191,7 +190,7 @@ impl RuntimeState { impl CronHandler for LocalCronHandler { type EH = CronExecutionHandle; - fn create(&self, spec: CronSpec) -> Result { + fn create(&self, spec: CronSpec) -> Result { // Ensure that the cron loop is started. self.cron_loop_join_handle.get_or_init(|| { let (cron_schedule_tx, cron_schedule_rx) = @@ -208,17 +207,17 @@ impl CronHandler for LocalCronHandler { let mut runtime_state = self.runtime_state.borrow_mut(); if runtime_state.crons.len() > MAX_CRONS { - return Err(type_error("Too many crons")); + return Err(CronError::TooManyCrons); } if runtime_state.crons.contains_key(&spec.name) { - return Err(type_error("Cron with this name already exists")); + return Err(CronError::AlreadyExists); } // Validate schedule expression. spec .cron_schedule .parse::() - .map_err(|_| type_error("Invalid cron schedule"))?; + .map_err(|_| CronError::InvalidCron)?; // Validate backoff_schedule. if let Some(backoff_schedule) = &spec.backoff_schedule { @@ -263,7 +262,7 @@ struct Inner { #[async_trait(?Send)] impl CronHandle for CronExecutionHandle { - async fn next(&self, prev_success: bool) -> Result { + async fn next(&self, prev_success: bool) -> Result { self.inner.borrow_mut().permit.take(); if self @@ -300,7 +299,7 @@ impl CronHandle for CronExecutionHandle { } } -fn compute_next_deadline(cron_expression: &str) -> Result { +fn compute_next_deadline(cron_expression: &str) -> Result { let now = chrono::Utc::now(); if let Ok(test_schedule) = env::var("DENO_CRON_TEST_SCHEDULE_OFFSET") { @@ -311,19 +310,21 @@ fn compute_next_deadline(cron_expression: &str) -> Result { let cron = cron_expression .parse::() - .map_err(|_| anyhow::anyhow!("invalid cron expression"))?; + .map_err(|_| CronError::InvalidCron)?; let Some(next_deadline) = cron.next_after(now) else { - return Err(anyhow::anyhow!("invalid cron expression")); + return Err(CronError::InvalidCron); }; Ok(next_deadline.timestamp_millis() as u64) } -fn validate_backoff_schedule(backoff_schedule: &[u32]) -> Result<(), AnyError> { +fn validate_backoff_schedule( + backoff_schedule: &[u32], +) -> Result<(), CronError> { if backoff_schedule.len() > MAX_BACKOFF_COUNT { - return Err(type_error("Invalid backoff schedule")); + return Err(CronError::InvalidBackoff); } if backoff_schedule.iter().any(|s| *s > MAX_BACKOFF_MS) { - return Err(type_error("Invalid backoff schedule")); + return Err(CronError::InvalidBackoff); } Ok(()) } diff --git a/runtime/errors.rs b/runtime/errors.rs index daec653d1e..51d6d96fa3 100644 --- a/runtime/errors.rs +++ b/runtime/errors.rs @@ -16,6 +16,7 @@ use deno_core::error::AnyError; use deno_core::serde_json; use deno_core::url; use deno_core::ModuleResolutionError; +use deno_cron::CronError; use std::env; use std::error::Error; use std::io; @@ -156,6 +157,22 @@ pub fn get_nix_error_class(error: &nix::Error) -> &'static str { } } +pub fn get_cron_error_class(e: &CronError) -> &'static str { + match e { + CronError::Resource(e) => { + deno_core::error::get_custom_error_class(e).unwrap_or("Error") + } + CronError::NameExceeded(_) => "TypeError", + CronError::NameInvalid => "TypeError", + CronError::AlreadyExists => "TypeError", + CronError::TooManyCrons => "TypeError", + CronError::InvalidCron => "TypeError", + CronError::InvalidBackoff => "TypeError", + CronError::AcquireError(_) => "Error", + CronError::Other(e) => get_error_class_name(e).unwrap_or("Error"), + } +} + fn get_canvas_error(e: &CanvasError) -> &'static str { match e { CanvasError::UnsupportedColorType(_) => "TypeError", @@ -194,7 +211,7 @@ pub fn get_error_class_name(e: &AnyError) -> Option<&'static str> { .or_else(|| deno_web::get_error_class_name(e)) .or_else(|| deno_webstorage::get_not_supported_error_class_name(e)) .or_else(|| deno_websocket::get_network_error_class_name(e)) - .or_else(|| deno_websocket::get_network_error_class_name(e)) + .or_else(|| e.downcast_ref::().map(get_cron_error_class)) .or_else(|| e.downcast_ref::().map(get_canvas_error)) .or_else(|| e.downcast_ref::().map(get_cache_error)) .or_else(|| { From 64c304a45265705832ebb4ab4e9ef19f899ac911 Mon Sep 17 00:00:00 2001 From: Leo Kettmeir Date: Sat, 12 Oct 2024 16:53:38 -0700 Subject: [PATCH 07/52] refactor(ext/tls): use concrete error types (#26174) --- Cargo.lock | 1 + ext/net/ops_tls.rs | 2 +- ext/tls/Cargo.toml | 1 + ext/tls/lib.rs | 87 ++++++++++++++++++++++---------------------- ext/tls/tls_key.rs | 28 +++++++++----- ext/websocket/lib.rs | 1 + runtime/errors.rs | 13 +++++++ 7 files changed, 79 insertions(+), 54 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d1341189f0..c6297d0b56 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2117,6 +2117,7 @@ dependencies = [ "rustls-tokio-stream", "rustls-webpki", "serde", + "thiserror", "tokio", "webpki-roots", ] diff --git a/ext/net/ops_tls.rs b/ext/net/ops_tls.rs index 5bc04ceb51..a68d144b57 100644 --- a/ext/net/ops_tls.rs +++ b/ext/net/ops_tls.rs @@ -250,7 +250,7 @@ pub fn op_tls_cert_resolver_resolve_error( #[string] sni: String, #[string] error: String, ) { - lookup.resolve(sni, Err(anyhow!(error))) + lookup.resolve(sni, Err(error)) } #[op2] diff --git a/ext/tls/Cargo.toml b/ext/tls/Cargo.toml index 6c1554241e..9f7bffe67c 100644 --- a/ext/tls/Cargo.toml +++ b/ext/tls/Cargo.toml @@ -21,5 +21,6 @@ rustls-pemfile.workspace = true rustls-tokio-stream.workspace = true rustls-webpki.workspace = true serde.workspace = true +thiserror.workspace = true tokio.workspace = true webpki-roots.workspace = true diff --git a/ext/tls/lib.rs b/ext/tls/lib.rs index c4d548ccf2..883d2995e4 100644 --- a/ext/tls/lib.rs +++ b/ext/tls/lib.rs @@ -9,17 +9,12 @@ pub use rustls_tokio_stream::*; pub use webpki; pub use webpki_roots; -use deno_core::anyhow::anyhow; -use deno_core::error::custom_error; -use deno_core::error::AnyError; - use rustls::client::danger::HandshakeSignatureValid; use rustls::client::danger::ServerCertVerified; use rustls::client::danger::ServerCertVerifier; use rustls::client::WebPkiServerVerifier; use rustls::ClientConfig; use rustls::DigitallySignedStruct; -use rustls::Error; use rustls::RootCertStore; use rustls_pemfile::certs; use rustls_pemfile::ec_private_keys; @@ -35,12 +30,30 @@ use std::sync::Arc; mod tls_key; pub use tls_key::*; +#[derive(Debug, thiserror::Error)] +pub enum TlsError { + #[error(transparent)] + Rustls(#[from] rustls::Error), + #[error("Unable to add pem file to certificate store: {0}")] + UnableAddPemFileToCert(std::io::Error), + #[error("Unable to decode certificate")] + CertInvalid, + #[error("No certificates found in certificate data")] + CertsNotFound, + #[error("No keys found in key data")] + KeysNotFound, + #[error("Unable to decode key")] + KeyDecode, +} + /// Lazily resolves the root cert store. /// /// This was done because the root cert store is not needed in all cases /// and takes a bit of time to initialize. pub trait RootCertStoreProvider: Send + Sync { - fn get_or_try_init(&self) -> Result<&RootCertStore, AnyError>; + fn get_or_try_init( + &self, + ) -> Result<&RootCertStore, deno_core::error::AnyError>; } // This extension has no runtime apis, it only exports some shared native functions. @@ -77,7 +90,7 @@ impl ServerCertVerifier for NoCertificateVerification { server_name: &rustls::pki_types::ServerName<'_>, ocsp_response: &[u8], now: rustls::pki_types::UnixTime, - ) -> Result { + ) -> Result { if self.ic_allowlist.is_empty() { return Ok(ServerCertVerified::assertion()); } @@ -89,7 +102,9 @@ impl ServerCertVerifier for NoCertificateVerification { _ => { // NOTE(bartlomieju): `ServerName` is a non-exhaustive enum // so we have this catch all errors here. - return Err(Error::General("Unknown `ServerName` variant".to_string())); + return Err(rustls::Error::General( + "Unknown `ServerName` variant".to_string(), + )); } }; if self.ic_allowlist.contains(&dns_name_or_ip_address) { @@ -110,7 +125,7 @@ impl ServerCertVerifier for NoCertificateVerification { message: &[u8], cert: &rustls::pki_types::CertificateDer, dss: &DigitallySignedStruct, - ) -> Result { + ) -> Result { if self.ic_allowlist.is_empty() { return Ok(HandshakeSignatureValid::assertion()); } @@ -126,7 +141,7 @@ impl ServerCertVerifier for NoCertificateVerification { message: &[u8], cert: &rustls::pki_types::CertificateDer, dss: &DigitallySignedStruct, - ) -> Result { + ) -> Result { if self.ic_allowlist.is_empty() { return Ok(HandshakeSignatureValid::assertion()); } @@ -178,7 +193,7 @@ pub fn create_client_config( unsafely_ignore_certificate_errors: Option>, maybe_cert_chain_and_key: TlsKeys, socket_use: SocketUse, -) -> Result { +) -> Result { if let Some(ic_allowlist) = unsafely_ignore_certificate_errors { let client_config = ClientConfig::builder() .dangerous() @@ -214,10 +229,7 @@ pub fn create_client_config( root_cert_store.add(cert)?; } Err(e) => { - return Err(anyhow!( - "Unable to add pem file to certificate store: {}", - e - )); + return Err(TlsError::UnableAddPemFileToCert(e)); } } } @@ -255,74 +267,61 @@ fn add_alpn(client: &mut ClientConfig, socket_use: SocketUse) { pub fn load_certs( reader: &mut dyn BufRead, -) -> Result>, AnyError> { +) -> Result>, TlsError> { let certs: Result, _> = certs(reader).collect(); - let certs = certs - .map_err(|_| custom_error("InvalidData", "Unable to decode certificate"))?; + let certs = certs.map_err(|_| TlsError::CertInvalid)?; if certs.is_empty() { - return Err(cert_not_found_err()); + return Err(TlsError::CertsNotFound); } Ok(certs) } -fn key_decode_err() -> AnyError { - custom_error("InvalidData", "Unable to decode key") -} - -fn key_not_found_err() -> AnyError { - custom_error("InvalidData", "No keys found in key data") -} - -fn cert_not_found_err() -> AnyError { - custom_error("InvalidData", "No certificates found in certificate data") -} - /// Starts with -----BEGIN RSA PRIVATE KEY----- fn load_rsa_keys( mut bytes: &[u8], -) -> Result>, AnyError> { +) -> Result>, TlsError> { let keys: Result, _> = rsa_private_keys(&mut bytes).collect(); - let keys = keys.map_err(|_| key_decode_err())?; + let keys = keys.map_err(|_| TlsError::KeyDecode)?; Ok(keys.into_iter().map(PrivateKeyDer::Pkcs1).collect()) } /// Starts with -----BEGIN EC PRIVATE KEY----- fn load_ec_keys( mut bytes: &[u8], -) -> Result>, AnyError> { +) -> Result>, TlsError> { let keys: Result, std::io::Error> = ec_private_keys(&mut bytes).collect(); - let keys2 = keys.map_err(|_| key_decode_err())?; + let keys2 = keys.map_err(|_| TlsError::KeyDecode)?; Ok(keys2.into_iter().map(PrivateKeyDer::Sec1).collect()) } /// Starts with -----BEGIN PRIVATE KEY----- fn load_pkcs8_keys( mut bytes: &[u8], -) -> Result>, AnyError> { +) -> Result>, TlsError> { let keys: Result, std::io::Error> = pkcs8_private_keys(&mut bytes).collect(); - let keys2 = keys.map_err(|_| key_decode_err())?; + let keys2 = keys.map_err(|_| TlsError::KeyDecode)?; Ok(keys2.into_iter().map(PrivateKeyDer::Pkcs8).collect()) } fn filter_invalid_encoding_err( - to_be_filtered: Result, -) -> Result { + to_be_filtered: Result, +) -> Result { match to_be_filtered { - Err(Error::InvalidCertificate(rustls::CertificateError::BadEncoding)) => { - Ok(HandshakeSignatureValid::assertion()) - } + Err(rustls::Error::InvalidCertificate( + rustls::CertificateError::BadEncoding, + )) => Ok(HandshakeSignatureValid::assertion()), res => res, } } pub fn load_private_keys( bytes: &[u8], -) -> Result>, AnyError> { +) -> Result>, TlsError> { let mut keys = load_rsa_keys(bytes)?; if keys.is_empty() { @@ -334,7 +333,7 @@ pub fn load_private_keys( } if keys.is_empty() { - return Err(key_not_found_err()); + return Err(TlsError::KeysNotFound); } Ok(keys) diff --git a/ext/tls/tls_key.rs b/ext/tls/tls_key.rs index 66fac86f87..b7baa604b9 100644 --- a/ext/tls/tls_key.rs +++ b/ext/tls/tls_key.rs @@ -11,8 +11,6 @@ //! key lookup can handle closing one end of the pair, in which case they will just //! attempt to clean up the associated resources. -use deno_core::anyhow::anyhow; -use deno_core::error::AnyError; use deno_core::futures::future::poll_fn; use deno_core::futures::future::Either; use deno_core::futures::FutureExt; @@ -33,7 +31,19 @@ use tokio::sync::oneshot; use webpki::types::CertificateDer; use webpki::types::PrivateKeyDer; -type ErrorType = Rc; +#[derive(Debug, thiserror::Error)] +pub enum TlsKeyError { + #[error(transparent)] + Rustls(#[from] rustls::Error), + #[error("Failed: {0}")] + Failed(ErrorType), + #[error(transparent)] + JoinError(#[from] tokio::task::JoinError), + #[error(transparent)] + RecvError(#[from] tokio::sync::broadcast::error::RecvError), +} + +type ErrorType = Arc>; /// A TLS certificate/private key pair. /// see https://docs.rs/rustls-pki-types/latest/rustls_pki_types/#cloning-private-keys @@ -114,7 +124,7 @@ impl TlsKeyResolver { &self, sni: String, alpn: Vec>, - ) -> Result, AnyError> { + ) -> Result, TlsKeyError> { let key = self.resolve(sni).await?; let mut tls_config = ServerConfig::builder() @@ -183,7 +193,7 @@ impl TlsKeyResolver { pub fn resolve( &self, sni: String, - ) -> impl Future> { + ) -> impl Future> { let mut cache = self.inner.cache.borrow_mut(); let mut recv = match cache.get(&sni) { None => { @@ -194,7 +204,7 @@ impl TlsKeyResolver { } Some(TlsKeyState::Resolving(recv)) => recv.resubscribe(), Some(TlsKeyState::Resolved(res)) => { - return Either::Left(ready(res.clone().map_err(|_| anyhow!("Failed")))); + return Either::Left(ready(res.clone().map_err(TlsKeyError::Failed))); } }; drop(cache); @@ -212,7 +222,7 @@ impl TlsKeyResolver { // Someone beat us to it } } - res.map_err(|_| anyhow!("Failed")) + res.map_err(TlsKeyError::Failed) }); Either::Right(async move { handle.await? }) } @@ -247,13 +257,13 @@ impl TlsKeyLookup { } /// Resolve a previously polled item. - pub fn resolve(&self, sni: String, res: Result) { + pub fn resolve(&self, sni: String, res: Result) { _ = self .pending .borrow_mut() .remove(&sni) .unwrap() - .send(res.map_err(Rc::new)); + .send(res.map_err(|e| Arc::new(e.into_boxed_str()))); } } diff --git a/ext/websocket/lib.rs b/ext/websocket/lib.rs index 9e320040b9..b8043516b0 100644 --- a/ext/websocket/lib.rs +++ b/ext/websocket/lib.rs @@ -349,6 +349,7 @@ pub fn create_ws_client_config( TlsKeys::Null, socket_use, ) + .map_err(|e| e.into()) } /// Headers common to both http/1.1 and h2 requests. diff --git a/runtime/errors.rs b/runtime/errors.rs index 51d6d96fa3..4c6aeab98a 100644 --- a/runtime/errors.rs +++ b/runtime/errors.rs @@ -17,6 +17,7 @@ use deno_core::serde_json; use deno_core::url; use deno_core::ModuleResolutionError; use deno_cron::CronError; +use deno_tls::TlsError; use std::env; use std::error::Error; use std::io; @@ -157,6 +158,17 @@ pub fn get_nix_error_class(error: &nix::Error) -> &'static str { } } +fn get_tls_error_class(e: &TlsError) -> &'static str { + match e { + TlsError::Rustls(_) => "Error", + TlsError::UnableAddPemFileToCert(e) => get_io_error_class(e), + TlsError::CertInvalid + | TlsError::CertsNotFound + | TlsError::KeysNotFound + | TlsError::KeyDecode => "InvalidData", + } +} + pub fn get_cron_error_class(e: &CronError) -> &'static str { match e { CronError::Resource(e) => { @@ -211,6 +223,7 @@ pub fn get_error_class_name(e: &AnyError) -> Option<&'static str> { .or_else(|| deno_web::get_error_class_name(e)) .or_else(|| deno_webstorage::get_not_supported_error_class_name(e)) .or_else(|| deno_websocket::get_network_error_class_name(e)) + .or_else(|| e.downcast_ref::().map(get_tls_error_class)) .or_else(|| e.downcast_ref::().map(get_cron_error_class)) .or_else(|| e.downcast_ref::().map(get_canvas_error)) .or_else(|| e.downcast_ref::().map(get_cache_error)) From 7c3da2ec1c12c8c16b04d89b7aec5e04cd2ff3be Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Mon, 14 Oct 2024 11:10:27 +0530 Subject: [PATCH 08/52] fix(ext/webgpu): allow GL backend on Windows (#26206) It should be supported according to [this](https://github.com/gfx-rs/wgpu?tab=readme-ov-file#supported-platforms). Fixes https://github.com/denoland/deno/issues/26144 --- ext/webgpu/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ext/webgpu/lib.rs b/ext/webgpu/lib.rs index df2ab323af..9cf4ca914b 100644 --- a/ext/webgpu/lib.rs +++ b/ext/webgpu/lib.rs @@ -44,7 +44,7 @@ mod macros { #[cfg(all(not(target_arch = "wasm32"), windows))] wgpu_types::Backend::Dx12 => $($c)*.$method:: $params, #[cfg(any( - all(unix, not(target_os = "macos"), not(target_os = "ios")), + all(not(target_os = "macos"), not(target_os = "ios")), feature = "angle", target_arch = "wasm32" ))] From d22195e7416e7923e2868e3f250abb457f115fc6 Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Mon, 14 Oct 2024 12:41:34 +0530 Subject: [PATCH 09/52] fix(ext/napi): pass user context to napi_threadsafe_fn finalizers (#26229) Fixes https://github.com/denoland/deno/issues/26228 --- cli/napi/node_api.rs | 2 +- tests/napi/src/tsfn.rs | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/cli/napi/node_api.rs b/cli/napi/node_api.rs index 2efb71c267..4497a4695a 100644 --- a/cli/napi/node_api.rs +++ b/cli/napi/node_api.rs @@ -692,7 +692,7 @@ impl Drop for TsFn { if let Some(finalizer) = self.thread_finalize_cb { unsafe { - (finalizer)(self.env as _, self.thread_finalize_data, ptr::null_mut()); + (finalizer)(self.env as _, self.thread_finalize_data, self.context); } } } diff --git a/tests/napi/src/tsfn.rs b/tests/napi/src/tsfn.rs index dabc96f835..a3a231cec1 100644 --- a/tests/napi/src/tsfn.rs +++ b/tests/napi/src/tsfn.rs @@ -46,6 +46,7 @@ fn create_custom_gc(env: sys::napi_env) { "Create async resource string in napi_register_module_v1 napi_register_module_v1" ); let mut custom_gc_tsfn = ptr::null_mut(); + let context = Box::into_raw(Box::new(0)) as *mut c_void; check_status_or_panic!( unsafe { sys::napi_create_threadsafe_function( @@ -57,7 +58,7 @@ fn create_custom_gc(env: sys::napi_env) { 1, ptr::null_mut(), Some(custom_gc_finalize), - ptr::null_mut(), + context, Some(custom_gc), &mut custom_gc_tsfn, ) @@ -80,8 +81,9 @@ unsafe extern "C" fn empty( unsafe extern "C" fn custom_gc_finalize( _env: sys::napi_env, _finalize_data: *mut c_void, - _finalize_hint: *mut c_void, + finalize_hint: *mut c_void, ) { + let _ = Box::from_raw(finalize_hint as *mut i32); } extern "C" fn custom_gc( From 68b388a93a3efe443fc5e306e883847bfb8551db Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Mon, 14 Oct 2024 14:00:02 +0530 Subject: [PATCH 10/52] fix(ext/node): allow writing to tty columns (#26201) Behave similar to Node.js where modifying `stdout.columns` doesn't really resize the terminal. Ref https://github.com/nodejs/node/issues/17529 Fixes https://github.com/denoland/deno/issues/26196 --- ext/node/polyfills/_process/streams.mjs | 9 +++++++-- tests/unit_node/process_test.ts | 5 +++++ 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/ext/node/polyfills/_process/streams.mjs b/ext/node/polyfills/_process/streams.mjs index 7936e82aa9..3573956c9d 100644 --- a/ext/node/polyfills/_process/streams.mjs +++ b/ext/node/polyfills/_process/streams.mjs @@ -66,14 +66,19 @@ export function createWritableStdioStream(writer, name, warmup = false) { // We cannot call `writer?.isTerminal()` eagerly here let getIsTTY = () => writer?.isTerminal(); + const getColumns = () => + stream._columns || + (writer?.isTerminal() ? Deno.consoleSize?.().columns : undefined); ObjectDefineProperties(stream, { columns: { __proto__: null, enumerable: true, configurable: true, - get: () => - writer?.isTerminal() ? Deno.consoleSize?.().columns : undefined, + get: () => getColumns(), + set: (value) => { + stream._columns = value; + }, }, rows: { __proto__: null, diff --git a/tests/unit_node/process_test.ts b/tests/unit_node/process_test.ts index 0e0f169d69..9506fb6091 100644 --- a/tests/unit_node/process_test.ts +++ b/tests/unit_node/process_test.ts @@ -1175,3 +1175,8 @@ Deno.test("process.cpuUsage()", () => { assert(typeof cpuUsage.user === "number"); assert(typeof cpuUsage.system === "number"); }); + +Deno.test("process.stdout.columns writable", () => { + process.stdout.columns = 80; + assertEquals(process.stdout.columns, 80); +}); From bbad7c592282dace88c77b0e089d53cb32878673 Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Mon, 14 Oct 2024 14:24:26 +0530 Subject: [PATCH 11/52] fix(ext/node): compute pem length (upper bound) for key exports (#26231) Fixes https://github.com/denoland/deno/issues/26188 --- ext/node/ops/crypto/keys.rs | 8 ++++++-- tests/unit_node/crypto/crypto_key_test.ts | 21 +++++++++++++++++++++ 2 files changed, 27 insertions(+), 2 deletions(-) diff --git a/ext/node/ops/crypto/keys.rs b/ext/node/ops/crypto/keys.rs index 867b34e044..ac62f5ccae 100644 --- a/ext/node/ops/crypto/keys.rs +++ b/ext/node/ops/crypto/keys.rs @@ -2024,7 +2024,9 @@ pub fn op_node_export_public_key_pem( _ => unreachable!("export_der would have errored"), }; - let mut out = vec![0; 2048]; + let pem_len = der::pem::encapsulated_len(label, LineEnding::LF, data.len()) + .map_err(|_| type_error("very large data"))?; + let mut out = vec![0; pem_len]; let mut writer = PemWriter::new(label, LineEnding::LF, &mut out)?; writer.write(&data)?; let len = writer.finish()?; @@ -2063,7 +2065,9 @@ pub fn op_node_export_private_key_pem( _ => unreachable!("export_der would have errored"), }; - let mut out = vec![0; 2048]; + let pem_len = der::pem::encapsulated_len(label, LineEnding::LF, data.len()) + .map_err(|_| type_error("very large data"))?; + let mut out = vec![0; pem_len]; let mut writer = PemWriter::new(label, LineEnding::LF, &mut out)?; writer.write(&data)?; let len = writer.finish()?; diff --git a/tests/unit_node/crypto/crypto_key_test.ts b/tests/unit_node/crypto/crypto_key_test.ts index 7995ce5d3f..3c7ad44232 100644 --- a/tests/unit_node/crypto/crypto_key_test.ts +++ b/tests/unit_node/crypto/crypto_key_test.ts @@ -656,3 +656,24 @@ z6TExWlQMjt66nV7R8cRAkzmABrG+NW3e8Zpac7Lkuv+zu0S+K7c assertEquals(publicKey.type, "public"); assertEquals(publicKey.asymmetricKeyType, "rsa"); }); + +// https://github.com/denoland/deno/issues/26188 +Deno.test("generateKeyPair large pem", function () { + const passphrase = "mypassphrase"; + const cipher = "aes-256-cbc"; + const modulusLength = 4096; + + generateKeyPairSync("rsa", { + modulusLength, + publicKeyEncoding: { + type: "spki", + format: "pem", + }, + privateKeyEncoding: { + type: "pkcs8", + format: "pem", + cipher, + passphrase, + }, + }); +}); From 3eda179220370e9c9093427fb694eed746548008 Mon Sep 17 00:00:00 2001 From: Luca Casonato Date: Mon, 14 Oct 2024 13:29:50 +0100 Subject: [PATCH 12/52] feat(cli): improve deno info output for npm packages (#25906) --- cli/tools/info.rs | 69 +++++++-------- .../cjs_with_deps/main_info_json.out | 52 ++++++++--- .../cjs_with_deps/main_info_json.out | 52 ++++++++--- .../info_cli_chalk_json/info/chalk_json.out | 18 ++-- .../main_info_json.out | 30 +++++-- tests/specs/npm/npmrc/__test__.jsonc | 4 + tests/specs/npm/npmrc/info.out | 87 +++++++++++++++++++ 7 files changed, 231 insertions(+), 81 deletions(-) create mode 100644 tests/specs/npm/npmrc/info.out diff --git a/cli/tools/info.rs b/cli/tools/info.rs index 1c83abe3bb..7f8d68ae1c 100644 --- a/cli/tools/info.rs +++ b/cli/tools/info.rs @@ -17,6 +17,7 @@ use deno_graph::Module; use deno_graph::ModuleError; use deno_graph::ModuleGraph; use deno_graph::Resolution; +use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::resolution::NpmResolutionSnapshot; use deno_npm::NpmPackageId; use deno_npm::NpmResolutionPackage; @@ -47,6 +48,7 @@ pub async fn info( let module_graph_creator = factory.module_graph_creator().await?; let npm_resolver = factory.npm_resolver().await?; let maybe_lockfile = cli_options.maybe_lockfile(); + let npmrc = cli_options.npmrc(); let resolver = factory.workspace_resolver().await?; let maybe_import_specifier = @@ -88,7 +90,8 @@ pub async fn info( JSON_SCHEMA_VERSION.into(), ); } - add_npm_packages_to_json(&mut json_graph, npm_resolver.as_ref()); + + add_npm_packages_to_json(&mut json_graph, npm_resolver.as_ref(), npmrc); display::write_json_to_stdout(&json_graph)?; } else { let mut output = String::new(); @@ -185,6 +188,7 @@ fn print_cache_info( fn add_npm_packages_to_json( json: &mut serde_json::Value, npm_resolver: &dyn CliNpmResolver, + npmrc: &ResolvedNpmRc, ) { let Some(npm_resolver) = npm_resolver.as_managed() else { return; // does not include byonm to deno info's output @@ -195,45 +199,28 @@ fn add_npm_packages_to_json( let json = json.as_object_mut().unwrap(); let modules = json.get_mut("modules").and_then(|m| m.as_array_mut()); if let Some(modules) = modules { - if modules.len() == 1 - && modules[0].get("kind").and_then(|k| k.as_str()) == Some("npm") - { - // If there is only one module and it's "external", then that means - // someone provided an npm specifier as a cli argument. In this case, - // we want to show which npm package the cli argument resolved to. - let module = &mut modules[0]; - let maybe_package = module - .get("specifier") - .and_then(|k| k.as_str()) - .and_then(|specifier| NpmPackageNvReference::from_str(specifier).ok()) - .and_then(|package_ref| { - snapshot - .resolve_package_from_deno_module(package_ref.nv()) - .ok() - }); - if let Some(pkg) = maybe_package { - if let Some(module) = module.as_object_mut() { - module - .insert("npmPackage".to_string(), pkg.id.as_serialized().into()); - } - } - } else { - // Filter out npm package references from the modules and instead - // have them only listed as dependencies. This is done because various - // npm specifiers modules in the graph are really just unresolved - // references. So there could be listed multiple npm specifiers - // that would resolve to a single npm package. - for i in (0..modules.len()).rev() { - if matches!( - modules[i].get("kind").and_then(|k| k.as_str()), - Some("npm") | Some("external") - ) { - modules.remove(i); - } - } - } - for module in modules.iter_mut() { + if matches!(module.get("kind").and_then(|k| k.as_str()), Some("npm")) { + // If there is only one module and it's "external", then that means + // someone provided an npm specifier as a cli argument. In this case, + // we want to show which npm package the cli argument resolved to. + let maybe_package = module + .get("specifier") + .and_then(|k| k.as_str()) + .and_then(|specifier| NpmPackageNvReference::from_str(specifier).ok()) + .and_then(|package_ref| { + snapshot + .resolve_package_from_deno_module(package_ref.nv()) + .ok() + }); + if let Some(pkg) = maybe_package { + if let Some(module) = module.as_object_mut() { + module + .insert("npmPackage".to_string(), pkg.id.as_serialized().into()); + } + } + } + let dependencies = module .get_mut("dependencies") .and_then(|d| d.as_array_mut()); @@ -265,7 +252,7 @@ fn add_npm_packages_to_json( let mut json_packages = serde_json::Map::with_capacity(sorted_packages.len()); for pkg in sorted_packages { let mut kv = serde_json::Map::new(); - kv.insert("name".to_string(), pkg.id.nv.name.to_string().into()); + kv.insert("name".to_string(), pkg.id.nv.name.clone().into()); kv.insert("version".to_string(), pkg.id.nv.version.to_string().into()); let mut deps = pkg.dependencies.values().collect::>(); deps.sort(); @@ -274,6 +261,8 @@ fn add_npm_packages_to_json( .map(|id| serde_json::Value::String(id.as_serialized())) .collect::>(); kv.insert("dependencies".to_string(), deps.into()); + let registry_url = npmrc.get_registry_url(&pkg.id.nv.name); + kv.insert("registryUrl".to_string(), registry_url.to_string().into()); json_packages.insert(pkg.id.as_serialized(), kv.into()); } diff --git a/tests/specs/npm/info_chalk_json/cjs_with_deps/main_info_json.out b/tests/specs/npm/info_chalk_json/cjs_with_deps/main_info_json.out index 137b9f8ce5..2f7dde2d98 100644 --- a/tests/specs/npm/info_chalk_json/cjs_with_deps/main_info_json.out +++ b/tests/specs/npm/info_chalk_json/cjs_with_deps/main_info_json.out @@ -46,6 +46,16 @@ "size": 325, "mediaType": "JavaScript", "specifier": "[WILDCARD]/main.js" + }, + { + "kind": "npm", + "specifier": "npm:/chai@4.3.6", + "npmPackage": "chai@4.3.6" + }, + { + "kind": "npm", + "specifier": "npm:/chalk@4.1.2", + "npmPackage": "chalk@4.1.2" } ], "redirects": { @@ -58,12 +68,14 @@ "version": "4.3.0", "dependencies": [ "color-convert@2.0.1" - ] + ], + "registryUrl": "http://localhost:4260/" }, "assertion-error@1.1.0": { "name": "assertion-error", "version": "1.1.0", - "dependencies": [] + "dependencies": [], + "registryUrl": "http://localhost:4260/" }, "chai@4.3.6": { "name": "chai", @@ -76,7 +88,8 @@ "loupe@2.3.4", "pathval@1.1.1", "type-detect@4.0.8" - ] + ], + "registryUrl": "http://localhost:4260/" }, "chalk@4.1.2": { "name": "chalk", @@ -84,65 +97,76 @@ "dependencies": [ "ansi-styles@4.3.0", "supports-color@7.2.0" - ] + ], + "registryUrl": "http://localhost:4260/" }, "check-error@1.0.2": { "name": "check-error", "version": "1.0.2", - "dependencies": [] + "dependencies": [], + "registryUrl": "http://localhost:4260/" }, "color-convert@2.0.1": { "name": "color-convert", "version": "2.0.1", "dependencies": [ "color-name@1.1.4" - ] + ], + "registryUrl": "http://localhost:4260/" }, "color-name@1.1.4": { "name": "color-name", "version": "1.1.4", - "dependencies": [] + "dependencies": [], + "registryUrl": "http://localhost:4260/" }, "deep-eql@3.0.1": { "name": "deep-eql", "version": "3.0.1", "dependencies": [ "type-detect@4.0.8" - ] + ], + "registryUrl": "http://localhost:4260/" }, "get-func-name@2.0.0": { "name": "get-func-name", "version": "2.0.0", - "dependencies": [] + "dependencies": [], + "registryUrl": "http://localhost:4260/" }, "has-flag@4.0.0": { "name": "has-flag", "version": "4.0.0", - "dependencies": [] + "dependencies": [], + "registryUrl": "http://localhost:4260/" }, "loupe@2.3.4": { "name": "loupe", "version": "2.3.4", "dependencies": [ "get-func-name@2.0.0" - ] + ], + "registryUrl": "http://localhost:4260/" }, "pathval@1.1.1": { "name": "pathval", "version": "1.1.1", - "dependencies": [] + "dependencies": [], + "registryUrl": "http://localhost:4260/" }, "supports-color@7.2.0": { "name": "supports-color", "version": "7.2.0", "dependencies": [ "has-flag@4.0.0" - ] + ], + "registryUrl": "http://localhost:4260/" }, "type-detect@4.0.8": { "name": "type-detect", "version": "4.0.8", - "dependencies": [] + "dependencies": [], + "registryUrl": "http://localhost:4260/" } } } diff --git a/tests/specs/npm/info_chalk_json_node_modules_dir/cjs_with_deps/main_info_json.out b/tests/specs/npm/info_chalk_json_node_modules_dir/cjs_with_deps/main_info_json.out index 137b9f8ce5..2f7dde2d98 100644 --- a/tests/specs/npm/info_chalk_json_node_modules_dir/cjs_with_deps/main_info_json.out +++ b/tests/specs/npm/info_chalk_json_node_modules_dir/cjs_with_deps/main_info_json.out @@ -46,6 +46,16 @@ "size": 325, "mediaType": "JavaScript", "specifier": "[WILDCARD]/main.js" + }, + { + "kind": "npm", + "specifier": "npm:/chai@4.3.6", + "npmPackage": "chai@4.3.6" + }, + { + "kind": "npm", + "specifier": "npm:/chalk@4.1.2", + "npmPackage": "chalk@4.1.2" } ], "redirects": { @@ -58,12 +68,14 @@ "version": "4.3.0", "dependencies": [ "color-convert@2.0.1" - ] + ], + "registryUrl": "http://localhost:4260/" }, "assertion-error@1.1.0": { "name": "assertion-error", "version": "1.1.0", - "dependencies": [] + "dependencies": [], + "registryUrl": "http://localhost:4260/" }, "chai@4.3.6": { "name": "chai", @@ -76,7 +88,8 @@ "loupe@2.3.4", "pathval@1.1.1", "type-detect@4.0.8" - ] + ], + "registryUrl": "http://localhost:4260/" }, "chalk@4.1.2": { "name": "chalk", @@ -84,65 +97,76 @@ "dependencies": [ "ansi-styles@4.3.0", "supports-color@7.2.0" - ] + ], + "registryUrl": "http://localhost:4260/" }, "check-error@1.0.2": { "name": "check-error", "version": "1.0.2", - "dependencies": [] + "dependencies": [], + "registryUrl": "http://localhost:4260/" }, "color-convert@2.0.1": { "name": "color-convert", "version": "2.0.1", "dependencies": [ "color-name@1.1.4" - ] + ], + "registryUrl": "http://localhost:4260/" }, "color-name@1.1.4": { "name": "color-name", "version": "1.1.4", - "dependencies": [] + "dependencies": [], + "registryUrl": "http://localhost:4260/" }, "deep-eql@3.0.1": { "name": "deep-eql", "version": "3.0.1", "dependencies": [ "type-detect@4.0.8" - ] + ], + "registryUrl": "http://localhost:4260/" }, "get-func-name@2.0.0": { "name": "get-func-name", "version": "2.0.0", - "dependencies": [] + "dependencies": [], + "registryUrl": "http://localhost:4260/" }, "has-flag@4.0.0": { "name": "has-flag", "version": "4.0.0", - "dependencies": [] + "dependencies": [], + "registryUrl": "http://localhost:4260/" }, "loupe@2.3.4": { "name": "loupe", "version": "2.3.4", "dependencies": [ "get-func-name@2.0.0" - ] + ], + "registryUrl": "http://localhost:4260/" }, "pathval@1.1.1": { "name": "pathval", "version": "1.1.1", - "dependencies": [] + "dependencies": [], + "registryUrl": "http://localhost:4260/" }, "supports-color@7.2.0": { "name": "supports-color", "version": "7.2.0", "dependencies": [ "has-flag@4.0.0" - ] + ], + "registryUrl": "http://localhost:4260/" }, "type-detect@4.0.8": { "name": "type-detect", "version": "4.0.8", - "dependencies": [] + "dependencies": [], + "registryUrl": "http://localhost:4260/" } } } diff --git a/tests/specs/npm/info_cli_chalk_json/info/chalk_json.out b/tests/specs/npm/info_cli_chalk_json/info/chalk_json.out index 21fc7edf1a..fd2aa9ab0d 100644 --- a/tests/specs/npm/info_cli_chalk_json/info/chalk_json.out +++ b/tests/specs/npm/info_cli_chalk_json/info/chalk_json.out @@ -19,7 +19,8 @@ "version": "4.3.0", "dependencies": [ "color-convert@2.0.1" - ] + ], + "registryUrl": "http://localhost:4260/" }, "chalk@4.1.2": { "name": "chalk", @@ -27,31 +28,36 @@ "dependencies": [ "ansi-styles@4.3.0", "supports-color@7.2.0" - ] + ], + "registryUrl": "http://localhost:4260/" }, "color-convert@2.0.1": { "name": "color-convert", "version": "2.0.1", "dependencies": [ "color-name@1.1.4" - ] + ], + "registryUrl": "http://localhost:4260/" }, "color-name@1.1.4": { "name": "color-name", "version": "1.1.4", - "dependencies": [] + "dependencies": [], + "registryUrl": "http://localhost:4260/" }, "has-flag@4.0.0": { "name": "has-flag", "version": "4.0.0", - "dependencies": [] + "dependencies": [], + "registryUrl": "http://localhost:4260/" }, "supports-color@7.2.0": { "name": "supports-color", "version": "7.2.0", "dependencies": [ "has-flag@4.0.0" - ] + ], + "registryUrl": "http://localhost:4260/" } } } diff --git a/tests/specs/npm/info_peer_deps_json/peer_deps_with_copied_folders/main_info_json.out b/tests/specs/npm/info_peer_deps_json/peer_deps_with_copied_folders/main_info_json.out index 46cc35c650..2ee6268f55 100644 --- a/tests/specs/npm/info_peer_deps_json/peer_deps_with_copied_folders/main_info_json.out +++ b/tests/specs/npm/info_peer_deps_json/peer_deps_with_copied_folders/main_info_json.out @@ -1,7 +1,7 @@ { "version": 1, "roots": [ - "[WILDCARD]/peer_deps_with_copied_folders/main.ts" + "file://[WILDCARD]/main.ts" ], "modules": [ { @@ -46,6 +46,16 @@ "size": 171, "mediaType": "TypeScript", "specifier": "file://[WILDCARD]/main.ts" + }, + { + "kind": "npm", + "specifier": "npm:/@denotest/peer-dep-test-child@1.0.0", + "npmPackage": "@denotest/peer-dep-test-child@1.0.0_@denotest+peer-dep-test-peer@1.0.0" + }, + { + "kind": "npm", + "specifier": "npm:/@denotest/peer-dep-test-child@2.0.0", + "npmPackage": "@denotest/peer-dep-test-child@2.0.0_@denotest+peer-dep-test-peer@2.0.0" } ], "redirects": { @@ -59,7 +69,8 @@ "dependencies": [ "@denotest/peer-dep-test-grandchild@1.0.0_@denotest+peer-dep-test-peer@1.0.0", "@denotest/peer-dep-test-peer@1.0.0" - ] + ], + "registryUrl": "http://localhost:4260/" }, "@denotest/peer-dep-test-child@2.0.0_@denotest+peer-dep-test-peer@2.0.0": { "name": "@denotest/peer-dep-test-child", @@ -67,31 +78,36 @@ "dependencies": [ "@denotest/peer-dep-test-grandchild@1.0.0_@denotest+peer-dep-test-peer@2.0.0", "@denotest/peer-dep-test-peer@2.0.0" - ] + ], + "registryUrl": "http://localhost:4260/" }, "@denotest/peer-dep-test-grandchild@1.0.0_@denotest+peer-dep-test-peer@1.0.0": { "name": "@denotest/peer-dep-test-grandchild", "version": "1.0.0", "dependencies": [ "@denotest/peer-dep-test-peer@1.0.0" - ] + ], + "registryUrl": "http://localhost:4260/" }, "@denotest/peer-dep-test-grandchild@1.0.0_@denotest+peer-dep-test-peer@2.0.0": { "name": "@denotest/peer-dep-test-grandchild", "version": "1.0.0", "dependencies": [ "@denotest/peer-dep-test-peer@2.0.0" - ] + ], + "registryUrl": "http://localhost:4260/" }, "@denotest/peer-dep-test-peer@1.0.0": { "name": "@denotest/peer-dep-test-peer", "version": "1.0.0", - "dependencies": [] + "dependencies": [], + "registryUrl": "http://localhost:4260/" }, "@denotest/peer-dep-test-peer@2.0.0": { "name": "@denotest/peer-dep-test-peer", "version": "2.0.0", - "dependencies": [] + "dependencies": [], + "registryUrl": "http://localhost:4260/" } } } diff --git a/tests/specs/npm/npmrc/__test__.jsonc b/tests/specs/npm/npmrc/__test__.jsonc index e7588a7797..88ef2d8102 100644 --- a/tests/specs/npm/npmrc/__test__.jsonc +++ b/tests/specs/npm/npmrc/__test__.jsonc @@ -13,6 +13,10 @@ "run_node_modules_dir": { "args": "run --node-modules-dir=auto -A --quiet main.js", "output": "main.out" + }, + "info": { + "args": "info --node-modules-dir=auto --json main.js", + "output": "info.out" } } } diff --git a/tests/specs/npm/npmrc/info.out b/tests/specs/npm/npmrc/info.out new file mode 100644 index 0000000000..8f82b10c9a --- /dev/null +++ b/tests/specs/npm/npmrc/info.out @@ -0,0 +1,87 @@ +[UNORDERED_START] +Download http://localhost:4262/@denotest2/basic +Download http://localhost:4261/@denotest/basic +Download http://localhost:4261/@denotest/basic/1.0.0.tgz +Download http://localhost:4262/@denotest2/basic/1.0.0.tgz +[UNORDERED_END] +[UNORDERED_START] +Initialize @denotest/basic@1.0.0 +Initialize @denotest2/basic@1.0.0 +[UNORDERED_END] +{ + "version": 1, + "roots": [ + "file://[WILDCARD]/main.js" + ], + "modules": [ + { + "kind": "esm", + "dependencies": [ + { + "specifier": "@denotest/basic", + "code": { + "specifier": "npm:@denotest/basic@1.0.0", + "span": { + "start": { + "line": 0, + "character": 35 + }, + "end": { + "line": 0, + "character": 52 + } + } + } + }, + { + "specifier": "@denotest2/basic", + "code": { + "specifier": "npm:@denotest2/basic@1.0.0", + "span": { + "start": { + "line": 1, + "character": 22 + }, + "end": { + "line": 1, + "character": 40 + } + } + } + } + ], + "local": "[WILDCARD]main.js", + "size": 192, + "mediaType": "JavaScript", + "specifier": "file://[WILDCARD]/main.js" + }, + { + "kind": "npm", + "specifier": "npm:/@denotest/basic@1.0.0", + "npmPackage": "@denotest/basic@1.0.0" + }, + { + "kind": "npm", + "specifier": "npm:/@denotest2/basic@1.0.0", + "npmPackage": "@denotest2/basic@1.0.0" + } + ], + "redirects": { + "npm:@denotest/basic@1.0.0": "npm:/@denotest/basic@1.0.0", + "npm:@denotest2/basic@1.0.0": "npm:/@denotest2/basic@1.0.0" + }, + "npmPackages": { + "@denotest/basic@1.0.0": { + "name": "@denotest/basic", + "version": "1.0.0", + "dependencies": [], + "registryUrl": "http://localhost:4261/" + }, + "@denotest2/basic@1.0.0": { + "name": "@denotest2/basic", + "version": "1.0.0", + "dependencies": [], + "registryUrl": "http://localhost:4262/" + } + } +} From dfbf03eee798da4f42a1bffeebd8edd1d0095406 Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Mon, 14 Oct 2024 18:01:51 +0530 Subject: [PATCH 13/52] perf: use fast calls for microtask ops (#26236) Updates deno_core to 0.312.0 --- Cargo.lock | 12 ++++++------ Cargo.toml | 2 +- cli/util/fs.rs | 2 +- ext/fs/std_fs.rs | 2 +- ext/node/ops/require.rs | 2 +- ext/node/ops/worker_threads.rs | 2 +- 6 files changed, 11 insertions(+), 11 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c6297d0b56..9625365e6b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1422,9 +1422,9 @@ dependencies = [ [[package]] name = "deno_core" -version = "0.311.0" +version = "0.312.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e09bd55da542fa1fde753aff617c355b5d782e763ab2a19e4371a56d7844cac" +checksum = "ed5b20008c84715322782af2f94ff7ebc34eb50294ae098daf36b63a5b9bdd24" dependencies = [ "anyhow", "bincode", @@ -1920,9 +1920,9 @@ dependencies = [ [[package]] name = "deno_ops" -version = "0.187.0" +version = "0.188.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e040fd4def8a67538fe38c9955fd970efc9f44284bd69d44f8992a456afd665d" +checksum = "ba1c7a8d9169e23de729000d6903dd0dee39e521e226d1ffbb9c5336665afae6" dependencies = [ "proc-macro-rules", "proc-macro2", @@ -6171,9 +6171,9 @@ dependencies = [ [[package]] name = "serde_v8" -version = "0.220.0" +version = "0.221.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e7a65d91d79acc82aa229aeb084f4a39bda269069bc1520df40f679495388e4" +checksum = "3ca142cf34468e22063560302e6cc5e45f48e95a0dd25b3e22d6c32c32251135" dependencies = [ "num-bigint", "serde", diff --git a/Cargo.toml b/Cargo.toml index 1128c64e34..a4ec99953b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -46,7 +46,7 @@ repository = "https://github.com/denoland/deno" [workspace.dependencies] deno_ast = { version = "=0.42.2", features = ["transpiling"] } -deno_core = { version = "0.311.0" } +deno_core = { version = "0.312.0" } deno_bench_util = { version = "0.165.0", path = "./bench_util" } deno_lockfile = "=0.23.1" diff --git a/cli/util/fs.rs b/cli/util/fs.rs index a021ec19c0..2ad3affc8d 100644 --- a/cli/util/fs.rs +++ b/cli/util/fs.rs @@ -277,7 +277,7 @@ pub fn write_file_2>( /// Similar to `std::fs::canonicalize()` but strips UNC prefixes on Windows. pub fn canonicalize_path(path: &Path) -> Result { - Ok(deno_core::strip_unc_prefix(path.canonicalize()?)) + Ok(deno_path_util::strip_unc_prefix(path.canonicalize()?)) } /// Canonicalizes a path which might be non-existent by going up the diff --git a/ext/fs/std_fs.rs b/ext/fs/std_fs.rs index 41a8569ba8..1a83c97c53 100644 --- a/ext/fs/std_fs.rs +++ b/ext/fs/std_fs.rs @@ -929,7 +929,7 @@ fn exists(path: &Path) -> bool { } fn realpath(path: &Path) -> FsResult { - Ok(deno_core::strip_unc_prefix(path.canonicalize()?)) + Ok(deno_path_util::strip_unc_prefix(path.canonicalize()?)) } fn read_dir(path: &Path) -> FsResult> { diff --git a/ext/node/ops/require.rs b/ext/node/ops/require.rs index 5473369815..7d85ee8532 100644 --- a/ext/node/ops/require.rs +++ b/ext/node/ops/require.rs @@ -295,7 +295,7 @@ where let path = ensure_read_permission::

(state, &path)?; let fs = state.borrow::(); let canonicalized_path = - deno_core::strip_unc_prefix(fs.realpath_sync(&path)?); + deno_path_util::strip_unc_prefix(fs.realpath_sync(&path)?); Ok(canonicalized_path.to_string_lossy().into_owned()) } diff --git a/ext/node/ops/worker_threads.rs b/ext/node/ops/worker_threads.rs index 4c50092f28..5f139c5dc8 100644 --- a/ext/node/ops/worker_threads.rs +++ b/ext/node/ops/worker_threads.rs @@ -52,7 +52,7 @@ where let path = ensure_read_permission::

(state, &path)?; let fs = state.borrow::(); let canonicalized_path = - deno_core::strip_unc_prefix(fs.realpath_sync(&path)?); + deno_path_util::strip_unc_prefix(fs.realpath_sync(&path)?); Url::from_file_path(canonicalized_path) .map_err(|e| generic_error(format!("URL from Path-String: {:#?}", e)))? }; From f3530c858f873b0f4561a52fa92ddd1d099612b3 Mon Sep 17 00:00:00 2001 From: Keith Maxwell Date: Mon, 14 Oct 2024 13:46:26 +0100 Subject: [PATCH 14/52] chore: generate the checksums from the final archives (#26151) Fixes https://github.com/denoland/deno/issues/7253 --- .github/workflows/ci.generate.ts | 32 ++++++++++++++++---------------- .github/workflows/ci.yml | 32 ++++++++++++++++---------------- 2 files changed, 32 insertions(+), 32 deletions(-) diff --git a/.github/workflows/ci.generate.ts b/.github/workflows/ci.generate.ts index c03f10bc92..9abedad9c0 100755 --- a/.github/workflows/ci.generate.ts +++ b/.github/workflows/ci.generate.ts @@ -751,11 +751,11 @@ const ci = { ].join("\n"), run: [ "cd target/release", - "shasum -a 256 deno > deno-${{ matrix.arch }}-unknown-linux-gnu.sha256sum", "zip -r deno-${{ matrix.arch }}-unknown-linux-gnu.zip deno", + "shasum -a 256 deno-${{ matrix.arch }}-unknown-linux-gnu.zip > deno-${{ matrix.arch }}-unknown-linux-gnu.zip.sha256sum", "strip denort", - "shasum -a 256 denort > denort-${{ matrix.arch }}-unknown-linux-gnu.sha256sum", "zip -r denort-${{ matrix.arch }}-unknown-linux-gnu.zip denort", + "shasum -a 256 denort-${{ matrix.arch }}-unknown-linux-gnu.zip > denort-${{ matrix.arch }}-unknown-linux-gnu.zip.sha256sum", "./deno types > lib.deno.d.ts", ].join("\n"), }, @@ -779,11 +779,11 @@ const ci = { "--p12-file=<(echo $APPLE_CODESIGN_KEY | base64 -d) " + "--entitlements-xml-file=cli/entitlements.plist", "cd target/release", - "shasum -a 256 deno > deno-${{ matrix.arch }}-apple-darwin.sha256sum", "zip -r deno-${{ matrix.arch }}-apple-darwin.zip deno", + "shasum -a 256 deno-${{ matrix.arch }}-apple-darwin.zip > deno-${{ matrix.arch }}-apple-darwin.zip.sha256sum", "strip denort", - "shasum -a 256 denort > denort-${{ matrix.arch }}-apple-darwin.sha256sum", "zip -r denort-${{ matrix.arch }}-apple-darwin.zip denort", + "shasum -a 256 denort-${{ matrix.arch }}-apple-darwin.zip > denort-${{ matrix.arch }}-apple-darwin.zip.sha256sum", ] .join("\n"), }, @@ -797,10 +797,10 @@ const ci = { ].join("\n"), shell: "pwsh", run: [ - "Get-FileHash target/release/deno.exe -Algorithm SHA256 | Format-List > target/release/deno-${{ matrix.arch }}-pc-windows-msvc.sha256sum", "Compress-Archive -CompressionLevel Optimal -Force -Path target/release/deno.exe -DestinationPath target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip", - "Get-FileHash target/release/denort.exe -Algorithm SHA256 | Format-List > target/release/denort-${{ matrix.arch }}-pc-windows-msvc.sha256sum", + "Get-FileHash target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip -Algorithm SHA256 | Format-List > target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip.sha256sum", "Compress-Archive -CompressionLevel Optimal -Force -Path target/release/denort.exe -DestinationPath target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip", + "Get-FileHash target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip -Algorithm SHA256 | Format-List > target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip.sha256sum", ].join("\n"), }, { @@ -1045,25 +1045,25 @@ const ci = { with: { files: [ "target/release/deno-x86_64-pc-windows-msvc.zip", - "target/release/deno-x86_64-pc-windows-msvc.sha256sum", + "target/release/deno-x86_64-pc-windows-msvc.zip.sha256sum", "target/release/denort-x86_64-pc-windows-msvc.zip", - "target/release/denort-x86_64-pc-windows-msvc.sha256sum", + "target/release/denort-x86_64-pc-windows-msvc.zip.sha256sum", "target/release/deno-x86_64-unknown-linux-gnu.zip", - "target/release/deno-x86_64-unknown-linux-gnu.sha256sum", + "target/release/deno-x86_64-unknown-linux-gnu.zip.sha256sum", "target/release/denort-x86_64-unknown-linux-gnu.zip", - "target/release/denort-x86_64-unknown-linux-gnu.sha256sum", + "target/release/denort-x86_64-unknown-linux-gnu.zip.sha256sum", "target/release/deno-x86_64-apple-darwin.zip", - "target/release/deno-x86_64-apple-darwin.sha256sum", + "target/release/deno-x86_64-apple-darwin.zip.sha256sum", "target/release/denort-x86_64-apple-darwin.zip", - "target/release/denort-x86_64-apple-darwin.sha256sum", + "target/release/denort-x86_64-apple-darwin.zip.sha256sum", "target/release/deno-aarch64-unknown-linux-gnu.zip", - "target/release/deno-aarch64-unknown-linux-gnu.sha256sum", + "target/release/deno-aarch64-unknown-linux-gnu.zip.sha256sum", "target/release/denort-aarch64-unknown-linux-gnu.zip", - "target/release/denort-aarch64-unknown-linux-gnu.sha256sum", + "target/release/denort-aarch64-unknown-linux-gnu.zip.sha256sum", "target/release/deno-aarch64-apple-darwin.zip", - "target/release/deno-aarch64-apple-darwin.sha256sum", + "target/release/deno-aarch64-apple-darwin.zip.sha256sum", "target/release/denort-aarch64-apple-darwin.zip", - "target/release/denort-aarch64-apple-darwin.sha256sum", + "target/release/denort-aarch64-apple-darwin.zip.sha256sum", "target/release/deno_src.tar.gz", "target/release/lib.deno.d.ts", ].join("\n"), diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 701d67f93c..d8a0682cb0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -442,11 +442,11 @@ jobs: github.repository == 'denoland/deno') run: |- cd target/release - shasum -a 256 deno > deno-${{ matrix.arch }}-unknown-linux-gnu.sha256sum zip -r deno-${{ matrix.arch }}-unknown-linux-gnu.zip deno + shasum -a 256 deno-${{ matrix.arch }}-unknown-linux-gnu.zip > deno-${{ matrix.arch }}-unknown-linux-gnu.zip.sha256sum strip denort - shasum -a 256 denort > denort-${{ matrix.arch }}-unknown-linux-gnu.sha256sum zip -r denort-${{ matrix.arch }}-unknown-linux-gnu.zip denort + shasum -a 256 denort-${{ matrix.arch }}-unknown-linux-gnu.zip > denort-${{ matrix.arch }}-unknown-linux-gnu.zip.sha256sum ./deno types > lib.deno.d.ts - name: Pre-release (mac) if: |- @@ -461,11 +461,11 @@ jobs: echo "Key is $(echo $APPLE_CODESIGN_KEY | base64 -d | wc -c) bytes" rcodesign sign target/release/deno --code-signature-flags=runtime --p12-password="$APPLE_CODESIGN_PASSWORD" --p12-file=<(echo $APPLE_CODESIGN_KEY | base64 -d) --entitlements-xml-file=cli/entitlements.plist cd target/release - shasum -a 256 deno > deno-${{ matrix.arch }}-apple-darwin.sha256sum zip -r deno-${{ matrix.arch }}-apple-darwin.zip deno + shasum -a 256 deno-${{ matrix.arch }}-apple-darwin.zip > deno-${{ matrix.arch }}-apple-darwin.zip.sha256sum strip denort - shasum -a 256 denort > denort-${{ matrix.arch }}-apple-darwin.sha256sum zip -r denort-${{ matrix.arch }}-apple-darwin.zip denort + shasum -a 256 denort-${{ matrix.arch }}-apple-darwin.zip > denort-${{ matrix.arch }}-apple-darwin.zip.sha256sum - name: Pre-release (windows) if: |- !(matrix.skip) && (matrix.os == 'windows' && @@ -474,10 +474,10 @@ jobs: github.repository == 'denoland/deno') shell: pwsh run: |- - Get-FileHash target/release/deno.exe -Algorithm SHA256 | Format-List > target/release/deno-${{ matrix.arch }}-pc-windows-msvc.sha256sum Compress-Archive -CompressionLevel Optimal -Force -Path target/release/deno.exe -DestinationPath target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip - Get-FileHash target/release/denort.exe -Algorithm SHA256 | Format-List > target/release/denort-${{ matrix.arch }}-pc-windows-msvc.sha256sum + Get-FileHash target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip -Algorithm SHA256 | Format-List > target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip.sha256sum Compress-Archive -CompressionLevel Optimal -Force -Path target/release/denort.exe -DestinationPath target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip + Get-FileHash target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip -Algorithm SHA256 | Format-List > target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip.sha256sum - name: Upload canary to dl.deno.land if: |- !(matrix.skip) && (matrix.job == 'test' && @@ -652,25 +652,25 @@ jobs: with: files: |- target/release/deno-x86_64-pc-windows-msvc.zip - target/release/deno-x86_64-pc-windows-msvc.sha256sum + target/release/deno-x86_64-pc-windows-msvc.zip.sha256sum target/release/denort-x86_64-pc-windows-msvc.zip - target/release/denort-x86_64-pc-windows-msvc.sha256sum + target/release/denort-x86_64-pc-windows-msvc.zip.sha256sum target/release/deno-x86_64-unknown-linux-gnu.zip - target/release/deno-x86_64-unknown-linux-gnu.sha256sum + target/release/deno-x86_64-unknown-linux-gnu.zip.sha256sum target/release/denort-x86_64-unknown-linux-gnu.zip - target/release/denort-x86_64-unknown-linux-gnu.sha256sum + target/release/denort-x86_64-unknown-linux-gnu.zip.sha256sum target/release/deno-x86_64-apple-darwin.zip - target/release/deno-x86_64-apple-darwin.sha256sum + target/release/deno-x86_64-apple-darwin.zip.sha256sum target/release/denort-x86_64-apple-darwin.zip - target/release/denort-x86_64-apple-darwin.sha256sum + target/release/denort-x86_64-apple-darwin.zip.sha256sum target/release/deno-aarch64-unknown-linux-gnu.zip - target/release/deno-aarch64-unknown-linux-gnu.sha256sum + target/release/deno-aarch64-unknown-linux-gnu.zip.sha256sum target/release/denort-aarch64-unknown-linux-gnu.zip - target/release/denort-aarch64-unknown-linux-gnu.sha256sum + target/release/denort-aarch64-unknown-linux-gnu.zip.sha256sum target/release/deno-aarch64-apple-darwin.zip - target/release/deno-aarch64-apple-darwin.sha256sum + target/release/deno-aarch64-apple-darwin.zip.sha256sum target/release/denort-aarch64-apple-darwin.zip - target/release/denort-aarch64-apple-darwin.sha256sum + target/release/denort-aarch64-apple-darwin.zip.sha256sum target/release/deno_src.tar.gz target/release/lib.deno.d.ts body_path: target/release/release-notes.md From c5449d71da2d623e866d733b6db180a6f94ff7c6 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Mon, 14 Oct 2024 15:35:52 -0400 Subject: [PATCH 15/52] fix(install): support installing npm package with alias (#26246) Just tried this out today and it wasn't properly implemented in https://github.com/denoland/deno/pull/24156 --- cli/tools/registry/pm.rs | 12 +++++++++++- tests/specs/add/alias/__test__.jsonc | 13 +++++++++++++ tests/specs/add/alias/package.json | 4 ++++ tests/specs/add/alias/package.json.out | 5 +++++ tests/specs/add/dev/package.json.out | 4 +++- .../future_install_local_add_npm/package.json.out | 4 +++- .../specs/install/install_add_dev/package.json.out | 4 +++- .../install_deprecated_package/package.json.out | 4 +++- .../remove/package_json/rm_add_package.json.out | 4 +++- 9 files changed, 48 insertions(+), 6 deletions(-) create mode 100644 tests/specs/add/alias/__test__.jsonc create mode 100644 tests/specs/add/alias/package.json create mode 100644 tests/specs/add/alias/package.json.out diff --git a/cli/tools/registry/pm.rs b/cli/tools/registry/pm.rs index 5dc0426202..3276acfbf7 100644 --- a/cli/tools/registry/pm.rs +++ b/cli/tools/registry/pm.rs @@ -363,7 +363,14 @@ fn package_json_dependency_entry( selected: SelectedPackage, ) -> (String, String) { if let Some(npm_package) = selected.package_name.strip_prefix("npm:") { - (npm_package.into(), selected.version_req) + if selected.import_name == npm_package { + (npm_package.into(), selected.version_req) + } else { + ( + selected.import_name, + format!("npm:{}@{}", npm_package, selected.version_req), + ) + } } else if let Some(jsr_package) = selected.package_name.strip_prefix("jsr:") { let jsr_package = jsr_package.strip_prefix('@').unwrap_or(jsr_package); let scope_replaced = jsr_package.replace('/', "__"); @@ -741,6 +748,9 @@ fn generate_imports(mut packages_to_version: Vec<(String, String)>) -> String { let mut contents = vec![]; let len = packages_to_version.len(); for (index, (package, version)) in packages_to_version.iter().enumerate() { + if index == 0 { + contents.push(String::new()); // force a newline at the start + } // TODO(bartlomieju): fix it, once we start support specifying version on the cli contents.push(format!("\"{}\": \"{}\"", package, version)); if index != len - 1 { diff --git a/tests/specs/add/alias/__test__.jsonc b/tests/specs/add/alias/__test__.jsonc new file mode 100644 index 0000000000..8a16ad3b46 --- /dev/null +++ b/tests/specs/add/alias/__test__.jsonc @@ -0,0 +1,13 @@ +{ + "tempDir": true, + "steps": [{ + "args": "install my-alias@npm:@denotest/add", + "output": "[WILDCARD]" + }, { + "args": [ + "eval", + "console.log(Deno.readTextFileSync('package.json').trim())" + ], + "output": "package.json.out" + }] +} diff --git a/tests/specs/add/alias/package.json b/tests/specs/add/alias/package.json new file mode 100644 index 0000000000..9664f260a7 --- /dev/null +++ b/tests/specs/add/alias/package.json @@ -0,0 +1,4 @@ +{ + "dependencies": { + } +} diff --git a/tests/specs/add/alias/package.json.out b/tests/specs/add/alias/package.json.out new file mode 100644 index 0000000000..b6326e8bfb --- /dev/null +++ b/tests/specs/add/alias/package.json.out @@ -0,0 +1,5 @@ +{ + "dependencies": { + "my-alias": "npm:@denotest/add@^1.0.0" + } +} diff --git a/tests/specs/add/dev/package.json.out b/tests/specs/add/dev/package.json.out index d5ca56e004..866724397f 100644 --- a/tests/specs/add/dev/package.json.out +++ b/tests/specs/add/dev/package.json.out @@ -1,3 +1,5 @@ { - "devDependencies": { "@denotest/esm-basic": "^1.0.0" } + "devDependencies": { + "@denotest/esm-basic": "^1.0.0" + } } diff --git a/tests/specs/install/future_install_local_add_npm/package.json.out b/tests/specs/install/future_install_local_add_npm/package.json.out index ad8518e791..613d986360 100644 --- a/tests/specs/install/future_install_local_add_npm/package.json.out +++ b/tests/specs/install/future_install_local_add_npm/package.json.out @@ -1,3 +1,5 @@ { - "dependencies": { "@denotest/esm-basic": "^1.0.0" } + "dependencies": { + "@denotest/esm-basic": "^1.0.0" + } } diff --git a/tests/specs/install/install_add_dev/package.json.out b/tests/specs/install/install_add_dev/package.json.out index d5ca56e004..866724397f 100644 --- a/tests/specs/install/install_add_dev/package.json.out +++ b/tests/specs/install/install_add_dev/package.json.out @@ -1,3 +1,5 @@ { - "devDependencies": { "@denotest/esm-basic": "^1.0.0" } + "devDependencies": { + "@denotest/esm-basic": "^1.0.0" + } } diff --git a/tests/specs/install/install_deprecated_package/package.json.out b/tests/specs/install/install_deprecated_package/package.json.out index 4b4b08087a..0cf36cd24f 100644 --- a/tests/specs/install/install_deprecated_package/package.json.out +++ b/tests/specs/install/install_deprecated_package/package.json.out @@ -1,3 +1,5 @@ { - "dependencies": { "@denotest/deprecated-package": "^1.0.0" } + "dependencies": { + "@denotest/deprecated-package": "^1.0.0" + } } diff --git a/tests/specs/remove/package_json/rm_add_package.json.out b/tests/specs/remove/package_json/rm_add_package.json.out index d5ca56e004..866724397f 100644 --- a/tests/specs/remove/package_json/rm_add_package.json.out +++ b/tests/specs/remove/package_json/rm_add_package.json.out @@ -1,3 +1,5 @@ { - "devDependencies": { "@denotest/esm-basic": "^1.0.0" } + "devDependencies": { + "@denotest/esm-basic": "^1.0.0" + } } From cb385d9e4acbd81235c3784d7e56b49c3fa41dd3 Mon Sep 17 00:00:00 2001 From: Leo Kettmeir Date: Mon, 14 Oct 2024 13:53:17 -0700 Subject: [PATCH 16/52] refactor(ext/webstorage): use concrete error types (#26173) --- Cargo.lock | 1 + ext/webstorage/Cargo.toml | 1 + ext/webstorage/lib.rs | 77 +++++++++++++------------------------- runtime/errors.rs | 15 +++++++- runtime/ops/worker_host.rs | 11 +++--- 5 files changed, 47 insertions(+), 58 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9625365e6b..a8e8608ddf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2235,6 +2235,7 @@ dependencies = [ "deno_core", "deno_web", "rusqlite", + "thiserror", ] [[package]] diff --git a/ext/webstorage/Cargo.toml b/ext/webstorage/Cargo.toml index 08ed8b0f29..bf9bbf23be 100644 --- a/ext/webstorage/Cargo.toml +++ b/ext/webstorage/Cargo.toml @@ -17,3 +17,4 @@ path = "lib.rs" deno_core.workspace = true deno_web.workspace = true rusqlite.workspace = true +thiserror.workspace = true diff --git a/ext/webstorage/lib.rs b/ext/webstorage/lib.rs index 99e61a1804..40946f05a7 100644 --- a/ext/webstorage/lib.rs +++ b/ext/webstorage/lib.rs @@ -2,10 +2,8 @@ // NOTE to all: use **cached** prepared statements when interfacing with SQLite. -use std::fmt; use std::path::PathBuf; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::OpState; use rusqlite::params; @@ -14,6 +12,18 @@ use rusqlite::OptionalExtension; pub use rusqlite; +#[derive(Debug, thiserror::Error)] +pub enum WebStorageError { + #[error("LocalStorage is not supported in this context.")] + ContextNotSupported, + #[error(transparent)] + Sqlite(#[from] rusqlite::Error), + #[error(transparent)] + Io(std::io::Error), + #[error("Exceeded maximum storage size")] + StorageExceeded, +} + #[derive(Clone)] struct OriginStorageDir(PathBuf); @@ -51,15 +61,13 @@ struct SessionStorage(Connection); fn get_webstorage( state: &mut OpState, persistent: bool, -) -> Result<&Connection, AnyError> { +) -> Result<&Connection, WebStorageError> { let conn = if persistent { if state.try_borrow::().is_none() { - let path = state.try_borrow::().ok_or_else(|| { - DomExceptionNotSupportedError::new( - "LocalStorage is not supported in this context.", - ) - })?; - std::fs::create_dir_all(&path.0)?; + let path = state + .try_borrow::() + .ok_or(WebStorageError::ContextNotSupported)?; + std::fs::create_dir_all(&path.0).map_err(WebStorageError::Io)?; let conn = Connection::open(path.0.join("local_storage"))?; // Enable write-ahead-logging and tweak some other stuff. let initial_pragmas = " @@ -106,7 +114,7 @@ fn get_webstorage( pub fn op_webstorage_length( state: &mut OpState, persistent: bool, -) -> Result { +) -> Result { let conn = get_webstorage(state, persistent)?; let mut stmt = conn.prepare_cached("SELECT COUNT(*) FROM data")?; @@ -121,7 +129,7 @@ pub fn op_webstorage_key( state: &mut OpState, #[smi] index: u32, persistent: bool, -) -> Result, AnyError> { +) -> Result, WebStorageError> { let conn = get_webstorage(state, persistent)?; let mut stmt = @@ -135,14 +143,9 @@ pub fn op_webstorage_key( } #[inline] -fn size_check(input: usize) -> Result<(), AnyError> { +fn size_check(input: usize) -> Result<(), WebStorageError> { if input >= MAX_STORAGE_BYTES { - return Err( - deno_web::DomExceptionQuotaExceededError::new( - "Exceeded maximum storage size", - ) - .into(), - ); + return Err(WebStorageError::StorageExceeded); } Ok(()) @@ -154,7 +157,7 @@ pub fn op_webstorage_set( #[string] key: &str, #[string] value: &str, persistent: bool, -) -> Result<(), AnyError> { +) -> Result<(), WebStorageError> { let conn = get_webstorage(state, persistent)?; size_check(key.len() + value.len())?; @@ -178,7 +181,7 @@ pub fn op_webstorage_get( state: &mut OpState, #[string] key_name: String, persistent: bool, -) -> Result, AnyError> { +) -> Result, WebStorageError> { let conn = get_webstorage(state, persistent)?; let mut stmt = conn.prepare_cached("SELECT value FROM data WHERE key = ?")?; @@ -194,7 +197,7 @@ pub fn op_webstorage_remove( state: &mut OpState, #[string] key_name: &str, persistent: bool, -) -> Result<(), AnyError> { +) -> Result<(), WebStorageError> { let conn = get_webstorage(state, persistent)?; let mut stmt = conn.prepare_cached("DELETE FROM data WHERE key = ?")?; @@ -207,7 +210,7 @@ pub fn op_webstorage_remove( pub fn op_webstorage_clear( state: &mut OpState, persistent: bool, -) -> Result<(), AnyError> { +) -> Result<(), WebStorageError> { let conn = get_webstorage(state, persistent)?; let mut stmt = conn.prepare_cached("DELETE FROM data")?; @@ -221,7 +224,7 @@ pub fn op_webstorage_clear( pub fn op_webstorage_iterate_keys( state: &mut OpState, persistent: bool, -) -> Result, AnyError> { +) -> Result, WebStorageError> { let conn = get_webstorage(state, persistent)?; let mut stmt = conn.prepare_cached("SELECT key FROM data")?; @@ -232,31 +235,3 @@ pub fn op_webstorage_iterate_keys( Ok(keys) } - -#[derive(Debug)] -pub struct DomExceptionNotSupportedError { - pub msg: String, -} - -impl DomExceptionNotSupportedError { - pub fn new(msg: &str) -> Self { - DomExceptionNotSupportedError { - msg: msg.to_string(), - } - } -} - -impl fmt::Display for DomExceptionNotSupportedError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.pad(&self.msg) - } -} - -impl std::error::Error for DomExceptionNotSupportedError {} - -pub fn get_not_supported_error_class_name( - e: &AnyError, -) -> Option<&'static str> { - e.downcast_ref::() - .map(|_| "DOMExceptionNotSupportedError") -} diff --git a/runtime/errors.rs b/runtime/errors.rs index 4c6aeab98a..bc28339ad7 100644 --- a/runtime/errors.rs +++ b/runtime/errors.rs @@ -18,6 +18,7 @@ use deno_core::url; use deno_core::ModuleResolutionError; use deno_cron::CronError; use deno_tls::TlsError; +use deno_webstorage::WebStorageError; use std::env; use std::error::Error; use std::io; @@ -158,6 +159,15 @@ pub fn get_nix_error_class(error: &nix::Error) -> &'static str { } } +fn get_webstorage_class_name(e: &WebStorageError) -> &'static str { + match e { + WebStorageError::ContextNotSupported => "DOMExceptionNotSupportedError", + WebStorageError::Sqlite(_) => todo!(), + WebStorageError::Io(e) => get_io_error_class(e), + WebStorageError::StorageExceeded => "DOMExceptionQuotaExceededError", + } +} + fn get_tls_error_class(e: &TlsError) -> &'static str { match e { TlsError::Rustls(_) => "Error", @@ -221,7 +231,6 @@ pub fn get_error_class_name(e: &AnyError) -> Option<&'static str> { deno_core::error::get_custom_error_class(e) .or_else(|| deno_webgpu::error::get_error_class_name(e)) .or_else(|| deno_web::get_error_class_name(e)) - .or_else(|| deno_webstorage::get_not_supported_error_class_name(e)) .or_else(|| deno_websocket::get_network_error_class_name(e)) .or_else(|| e.downcast_ref::().map(get_tls_error_class)) .or_else(|| e.downcast_ref::().map(get_cron_error_class)) @@ -231,6 +240,10 @@ pub fn get_error_class_name(e: &AnyError) -> Option<&'static str> { e.downcast_ref::() .map(get_broadcast_channel_error) }) + .or_else(|| { + e.downcast_ref::() + .map(get_webstorage_class_name) + }) .or_else(|| { e.downcast_ref::() .map(get_dlopen_error_class) diff --git a/runtime/ops/worker_host.rs b/runtime/ops/worker_host.rs index b9fd066543..61e5ef3e0a 100644 --- a/runtime/ops/worker_host.rs +++ b/runtime/ops/worker_host.rs @@ -10,6 +10,7 @@ use crate::web_worker::WorkerControlEvent; use crate::web_worker::WorkerId; use crate::web_worker::WorkerMetadata; use crate::worker::FormatJsErrorFn; +use deno_core::error::custom_error; use deno_core::error::AnyError; use deno_core::op2; use deno_core::serde::Deserialize; @@ -136,12 +137,10 @@ fn op_create_worker( let worker_type = args.worker_type; if let WebWorkerType::Classic = worker_type { if let TestingFeaturesEnabled(false) = state.borrow() { - return Err( - deno_webstorage::DomExceptionNotSupportedError::new( - "Classic workers are not supported.", - ) - .into(), - ); + return Err(custom_error( + "DOMExceptionNotSupportedError", + "Classic workers are not supported.", + )); } } From 48cbf85add1a9a5c3e583c68c80d16420e606502 Mon Sep 17 00:00:00 2001 From: Leo Kettmeir Date: Mon, 14 Oct 2024 14:15:31 -0700 Subject: [PATCH 17/52] refactor(ext/url): use concrete error types (#26172) --- Cargo.lock | 1 + ext/url/Cargo.toml | 1 + ext/url/lib.rs | 2 ++ ext/url/urlpattern.rs | 28 ++++++++++++++-------------- runtime/errors.rs | 4 ++++ 5 files changed, 22 insertions(+), 14 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a8e8608ddf..504182136c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2163,6 +2163,7 @@ dependencies = [ "deno_console", "deno_core", "deno_webidl", + "thiserror", "urlpattern", ] diff --git a/ext/url/Cargo.toml b/ext/url/Cargo.toml index 13aca99539..ca4fa444da 100644 --- a/ext/url/Cargo.toml +++ b/ext/url/Cargo.toml @@ -15,6 +15,7 @@ path = "lib.rs" [dependencies] deno_core.workspace = true +thiserror.workspace = true urlpattern = "0.3.0" [dev-dependencies] diff --git a/ext/url/lib.rs b/ext/url/lib.rs index 6869d656b5..f8946532ae 100644 --- a/ext/url/lib.rs +++ b/ext/url/lib.rs @@ -15,6 +15,8 @@ use std::path::PathBuf; use crate::urlpattern::op_urlpattern_parse; use crate::urlpattern::op_urlpattern_process_match_input; +pub use urlpattern::UrlPatternError; + deno_core::extension!( deno_url, deps = [deno_webidl], diff --git a/ext/url/urlpattern.rs b/ext/url/urlpattern.rs index b6d9a13828..7d4e8ee71b 100644 --- a/ext/url/urlpattern.rs +++ b/ext/url/urlpattern.rs @@ -1,7 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::op2; use urlpattern::quirks; @@ -9,21 +7,23 @@ use urlpattern::quirks::MatchInput; use urlpattern::quirks::StringOrInit; use urlpattern::quirks::UrlPattern; +#[derive(Debug, thiserror::Error)] +#[error(transparent)] +pub struct UrlPatternError(urlpattern::Error); + #[op2] #[serde] pub fn op_urlpattern_parse( #[serde] input: StringOrInit, #[string] base_url: Option, #[serde] options: urlpattern::UrlPatternOptions, -) -> Result { - let init = urlpattern::quirks::process_construct_pattern_input( - input, - base_url.as_deref(), - ) - .map_err(|e| type_error(e.to_string()))?; +) -> Result { + let init = + quirks::process_construct_pattern_input(input, base_url.as_deref()) + .map_err(UrlPatternError)?; - let pattern = urlpattern::quirks::parse_pattern(init, options) - .map_err(|e| type_error(e.to_string()))?; + let pattern = + quirks::parse_pattern(init, options).map_err(UrlPatternError)?; Ok(pattern) } @@ -33,14 +33,14 @@ pub fn op_urlpattern_parse( pub fn op_urlpattern_process_match_input( #[serde] input: StringOrInit, #[string] base_url: Option, -) -> Result, AnyError> { - let res = urlpattern::quirks::process_match_input(input, base_url.as_deref()) - .map_err(|e| type_error(e.to_string()))?; +) -> Result, UrlPatternError> { + let res = quirks::process_match_input(input, base_url.as_deref()) + .map_err(UrlPatternError)?; let (input, inputs) = match res { Some((input, inputs)) => (input, inputs), None => return Ok(None), }; - Ok(urlpattern::quirks::parse_match_input(input).map(|input| (input, inputs))) + Ok(quirks::parse_match_input(input).map(|input| (input, inputs))) } diff --git a/runtime/errors.rs b/runtime/errors.rs index bc28339ad7..5735635957 100644 --- a/runtime/errors.rs +++ b/runtime/errors.rs @@ -244,6 +244,10 @@ pub fn get_error_class_name(e: &AnyError) -> Option<&'static str> { e.downcast_ref::() .map(get_webstorage_class_name) }) + .or_else(|| { + e.downcast_ref::() + .map(|_| "TypeError") + }) .or_else(|| { e.downcast_ref::() .map(get_dlopen_error_class) From 8dbe77dd29a3791db58fda392dea45d8249b6bc9 Mon Sep 17 00:00:00 2001 From: Mohammad Sulaiman Date: Tue, 15 Oct 2024 01:04:18 +0300 Subject: [PATCH 18/52] fix(console/ext/repl): support using parseFloat() (#25900) Fixes #21428 Co-authored-by: tannal --- ext/console/01_console.js | 15 +++++++-------- tests/integration/repl_tests.rs | 14 ++++++++++++++ .../repl/console_log/093_console_log_format.js | 16 ++++++++++++++++ .../repl/console_log/093_console_log_format.out | 1 + tests/specs/repl/console_log/__test__.jsonc | 4 ++++ tests/unit/console_test.ts | 8 ++++---- 6 files changed, 46 insertions(+), 12 deletions(-) create mode 100644 tests/specs/repl/console_log/093_console_log_format.js create mode 100644 tests/specs/repl/console_log/093_console_log_format.out create mode 100644 tests/specs/repl/console_log/__test__.jsonc diff --git a/ext/console/01_console.js b/ext/console/01_console.js index d9acc958a0..a977a27195 100644 --- a/ext/console/01_console.js +++ b/ext/console/01_console.js @@ -84,6 +84,7 @@ const { NumberIsInteger, NumberIsNaN, NumberParseInt, + NumberParseFloat, NumberPrototypeToFixed, NumberPrototypeToString, NumberPrototypeValueOf, @@ -3010,20 +3011,18 @@ function inspectArgs(args, inspectOptions = { __proto__: null }) { } else if (ArrayPrototypeIncludes(["d", "i"], char)) { // Format as an integer. const value = args[a++]; - if (typeof value == "bigint") { - formattedArg = `${value}n`; - } else if (typeof value == "number") { - formattedArg = `${NumberParseInt(String(value))}`; - } else { + if (typeof value === "symbol") { formattedArg = "NaN"; + } else { + formattedArg = `${NumberParseInt(value)}`; } } else if (char == "f") { // Format as a floating point value. const value = args[a++]; - if (typeof value == "number") { - formattedArg = `${value}`; - } else { + if (typeof value === "symbol") { formattedArg = "NaN"; + } else { + formattedArg = `${NumberParseFloat(value)}`; } } else if (ArrayPrototypeIncludes(["O", "o"], char)) { // Format as an object. diff --git a/tests/integration/repl_tests.rs b/tests/integration/repl_tests.rs index 4e00398ce6..9eceb2f05d 100644 --- a/tests/integration/repl_tests.rs +++ b/tests/integration/repl_tests.rs @@ -255,6 +255,20 @@ fn console_log() { console.write_line("'world'"); console.expect("\"world\""); }); + + // https://github.com/denoland/deno/issues/21428 + let (out, err) = util::run_and_collect_output_with_args( + true, + vec![ + "repl", + "--eval-file=./../specs/repl/console_log/093_console_log_format.js", + ], + None, + None, + false, + ); + assert_contains!(out, "0.5"); + assert!(err.is_empty()); } #[test] diff --git a/tests/specs/repl/console_log/093_console_log_format.js b/tests/specs/repl/console_log/093_console_log_format.js new file mode 100644 index 0000000000..15022411ca --- /dev/null +++ b/tests/specs/repl/console_log/093_console_log_format.js @@ -0,0 +1,16 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +class Frac { + constructor(num, den) { + this.num = num; + this.den = den; + } + [Symbol.toPrimitive]() { + return this.num / this.den; + } + display() { + const result = this.num / this.den; + process.stdout.write(`${result}`); + } +} +const f = new Frac(1, 2); +f.display(); diff --git a/tests/specs/repl/console_log/093_console_log_format.out b/tests/specs/repl/console_log/093_console_log_format.out new file mode 100644 index 0000000000..ea2303bc03 --- /dev/null +++ b/tests/specs/repl/console_log/093_console_log_format.out @@ -0,0 +1 @@ +0.5 \ No newline at end of file diff --git a/tests/specs/repl/console_log/__test__.jsonc b/tests/specs/repl/console_log/__test__.jsonc new file mode 100644 index 0000000000..641bc1f68a --- /dev/null +++ b/tests/specs/repl/console_log/__test__.jsonc @@ -0,0 +1,4 @@ +{ + "args": "run -A --quiet 093_console_log_format.js", + "output": "093_console_log_format.out" +} diff --git a/tests/unit/console_test.ts b/tests/unit/console_test.ts index 878d17ae33..42cff7c705 100644 --- a/tests/unit/console_test.ts +++ b/tests/unit/console_test.ts @@ -1162,7 +1162,7 @@ Deno.test(function consoleTestWithIntegerFormatSpecifier() { assertEquals(stringify("%i"), "%i"); assertEquals(stringify("%i", 42.0), "42"); assertEquals(stringify("%i", 42), "42"); - assertEquals(stringify("%i", "42"), "NaN"); + assertEquals(stringify("%i", "42"), "42"); assertEquals(stringify("%i", 1.5), "1"); assertEquals(stringify("%i", -0.5), "0"); assertEquals(stringify("%i", ""), "NaN"); @@ -1172,7 +1172,7 @@ Deno.test(function consoleTestWithIntegerFormatSpecifier() { assertEquals(stringify("%d", 12345678901234567890123), "1"); assertEquals( stringify("%i", 12345678901234567890123n), - "12345678901234567890123n", + "1.2345678901234568e+22", ); }); @@ -1180,13 +1180,13 @@ Deno.test(function consoleTestWithFloatFormatSpecifier() { assertEquals(stringify("%f"), "%f"); assertEquals(stringify("%f", 42.0), "42"); assertEquals(stringify("%f", 42), "42"); - assertEquals(stringify("%f", "42"), "NaN"); + assertEquals(stringify("%f", "42"), "42"); assertEquals(stringify("%f", 1.5), "1.5"); assertEquals(stringify("%f", -0.5), "-0.5"); assertEquals(stringify("%f", Math.PI), "3.141592653589793"); assertEquals(stringify("%f", ""), "NaN"); assertEquals(stringify("%f", Symbol("foo")), "NaN"); - assertEquals(stringify("%f", 5n), "NaN"); + assertEquals(stringify("%f", 5n), "5"); assertEquals(stringify("%f %f", 42, 43), "42 43"); assertEquals(stringify("%f %f", 42), "42 %f"); }); From ee7d4501435f0ebd655c8b50bd6e41ca19e71abc Mon Sep 17 00:00:00 2001 From: Leo Kettmeir Date: Mon, 14 Oct 2024 15:05:49 -0700 Subject: [PATCH 19/52] refactor(ext/ffi): use concrete error types (#26170) --- Cargo.lock | 2 + ext/ffi/Cargo.toml | 2 + ext/ffi/call.rs | 54 ++++++++---- ext/ffi/callback.rs | 51 +++++++---- ext/ffi/dlfcn.rs | 48 +++++++--- ext/ffi/ir.rs | 142 ++++++++++++++++------------- ext/ffi/lib.rs | 7 ++ ext/ffi/repr.rs | 211 ++++++++++++++++++++++++++++++-------------- ext/ffi/static.rs | 31 +++++-- runtime/errors.rs | 80 +++++++++++++++++ 10 files changed, 444 insertions(+), 184 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 504182136c..56121705b2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1587,6 +1587,8 @@ dependencies = [ "serde", "serde-value", "serde_json", + "thiserror", + "tokio", "winapi", ] diff --git a/ext/ffi/Cargo.toml b/ext/ffi/Cargo.toml index 23a2aee1cd..496e8634ec 100644 --- a/ext/ffi/Cargo.toml +++ b/ext/ffi/Cargo.toml @@ -24,6 +24,8 @@ log.workspace = true serde.workspace = true serde-value = "0.7" serde_json = "1.0" +thiserror.workspace = true +tokio.workspace = true [target.'cfg(windows)'.dependencies] winapi = { workspace = true, features = ["errhandlingapi", "minwindef", "ntdef", "winbase", "winnt"] } diff --git a/ext/ffi/call.rs b/ext/ffi/call.rs index 3572b9e813..d337b29b00 100644 --- a/ext/ffi/call.rs +++ b/ext/ffi/call.rs @@ -7,9 +7,6 @@ use crate::symbol::NativeType; use crate::symbol::Symbol; use crate::FfiPermissions; use crate::ForeignFunction; -use deno_core::anyhow::anyhow; -use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::serde_json::Value; use deno_core::serde_v8::ExternalPointer; @@ -24,6 +21,20 @@ use std::ffi::c_void; use std::future::Future; use std::rc::Rc; +#[derive(Debug, thiserror::Error)] +pub enum CallError { + #[error(transparent)] + IR(#[from] IRError), + #[error("Nonblocking FFI call failed: {0}")] + NonblockingCallFailure(#[source] tokio::task::JoinError), + #[error("Invalid FFI symbol name: '{0}'")] + InvalidSymbol(String), + #[error(transparent)] + Permission(deno_core::error::AnyError), + #[error(transparent)] + Callback(#[from] super::CallbackError), +} + // SAFETY: Makes an FFI call unsafe fn ffi_call_rtype_struct( cif: &libffi::middle::Cif, @@ -45,7 +56,7 @@ pub(crate) fn ffi_call_sync<'scope>( args: v8::FunctionCallbackArguments, symbol: &Symbol, out_buffer: Option, -) -> Result +) -> Result where 'scope: 'scope, { @@ -201,7 +212,7 @@ fn ffi_call( parameter_types: &[NativeType], result_type: NativeType, out_buffer: Option, -) -> Result { +) -> FfiValue { let call_args: Vec = call_args .iter() .enumerate() @@ -214,7 +225,7 @@ fn ffi_call( // SAFETY: types in the `Cif` match the actual calling convention and // types of symbol. unsafe { - Ok(match result_type { + match result_type { NativeType::Void => { cif.call::<()>(fun_ptr, &call_args); FfiValue::Value(Value::from(())) @@ -267,7 +278,7 @@ fn ffi_call( ffi_call_rtype_struct(cif, &fun_ptr, call_args, out_buffer.unwrap().0); FfiValue::Value(Value::Null) } - }) + } } } @@ -280,14 +291,16 @@ pub fn op_ffi_call_ptr_nonblocking( #[serde] def: ForeignFunction, parameters: v8::Local, out_buffer: Option>, -) -> Result>, AnyError> +) -> Result>, CallError> where FP: FfiPermissions + 'static, { { let mut state = state.borrow_mut(); let permissions = state.borrow_mut::(); - permissions.check_partial_no_path()?; + permissions + .check_partial_no_path() + .map_err(CallError::Permission)?; }; let symbol = PtrSymbol::new(pointer, &def)?; @@ -309,7 +322,7 @@ where Ok(async move { let result = join_handle .await - .map_err(|err| anyhow!("Nonblocking FFI call failed: {}", err))??; + .map_err(CallError::NonblockingCallFailure)?; // SAFETY: Same return type declared to libffi; trust user to have it right beyond that. Ok(result) }) @@ -325,16 +338,17 @@ pub fn op_ffi_call_nonblocking( #[string] symbol: String, parameters: v8::Local, out_buffer: Option>, -) -> Result>, AnyError> { +) -> Result>, CallError> { let symbol = { let state = state.borrow(); - let resource = state.resource_table.get::(rid)?; + let resource = state + .resource_table + .get::(rid) + .map_err(CallError::Permission)?; let symbols = &resource.symbols; *symbols .get(&symbol) - .ok_or_else(|| { - type_error(format!("Invalid FFI symbol name: '{symbol}'")) - })? + .ok_or_else(|| CallError::InvalidSymbol(symbol))? .clone() }; @@ -362,7 +376,7 @@ pub fn op_ffi_call_nonblocking( Ok(async move { let result = join_handle .await - .map_err(|err| anyhow!("Nonblocking FFI call failed: {}", err))??; + .map_err(CallError::NonblockingCallFailure)?; // SAFETY: Same return type declared to libffi; trust user to have it right beyond that. Ok(result) }) @@ -377,14 +391,16 @@ pub fn op_ffi_call_ptr( #[serde] def: ForeignFunction, parameters: v8::Local, out_buffer: Option>, -) -> Result +) -> Result where FP: FfiPermissions + 'static, { { let mut state = state.borrow_mut(); let permissions = state.borrow_mut::(); - permissions.check_partial_no_path()?; + permissions + .check_partial_no_path() + .map_err(CallError::Permission)?; }; let symbol = PtrSymbol::new(pointer, &def)?; @@ -399,7 +415,7 @@ where &def.parameters, def.result.clone(), out_buffer_ptr, - )?; + ); // SAFETY: Same return type declared to libffi; trust user to have it right beyond that. Ok(result) } diff --git a/ext/ffi/callback.rs b/ext/ffi/callback.rs index 6fa166f52b..f33e0413a3 100644 --- a/ext/ffi/callback.rs +++ b/ext/ffi/callback.rs @@ -3,7 +3,6 @@ use crate::symbol::NativeType; use crate::FfiPermissions; use crate::ForeignFunction; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::v8; use deno_core::v8::TryCatch; @@ -34,6 +33,16 @@ thread_local! { static LOCAL_THREAD_ID: RefCell = const { RefCell::new(0) }; } +#[derive(Debug, thiserror::Error)] +pub enum CallbackError { + #[error(transparent)] + Resource(deno_core::error::AnyError), + #[error(transparent)] + Permission(deno_core::error::AnyError), + #[error(transparent)] + Other(deno_core::error::AnyError), +} + #[derive(Clone)] pub struct PtrSymbol { pub cif: libffi::middle::Cif, @@ -44,7 +53,7 @@ impl PtrSymbol { pub fn new( fn_ptr: *mut c_void, def: &ForeignFunction, - ) -> Result { + ) -> Result { let ptr = libffi::middle::CodePtr::from_ptr(fn_ptr as _); let cif = libffi::middle::Cif::new( def @@ -52,8 +61,13 @@ impl PtrSymbol { .clone() .into_iter() .map(libffi::middle::Type::try_from) - .collect::, _>>()?, - def.result.clone().try_into()?, + .collect::, _>>() + .map_err(CallbackError::Other)?, + def + .result + .clone() + .try_into() + .map_err(CallbackError::Other)?, ); Ok(Self { cif, ptr }) @@ -522,10 +536,12 @@ unsafe fn do_ffi_callback( pub fn op_ffi_unsafe_callback_ref( state: Rc>, #[smi] rid: ResourceId, -) -> Result>, AnyError> { +) -> Result, CallbackError> { let state = state.borrow(); - let callback_resource = - state.resource_table.get::(rid)?; + let callback_resource = state + .resource_table + .get::(rid) + .map_err(CallbackError::Resource)?; Ok(async move { let info: &mut CallbackInfo = @@ -536,7 +552,6 @@ pub fn op_ffi_unsafe_callback_ref( .into_future() .or_cancel(callback_resource.cancel.clone()) .await; - Ok(()) }) } @@ -552,12 +567,14 @@ pub fn op_ffi_unsafe_callback_create( scope: &mut v8::HandleScope<'scope>, #[serde] args: RegisterCallbackArgs, cb: v8::Local, -) -> Result, AnyError> +) -> Result, CallbackError> where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions.check_partial_no_path()?; + permissions + .check_partial_no_path() + .map_err(CallbackError::Permission)?; let thread_id: u32 = LOCAL_THREAD_ID.with(|s| { let value = *s.borrow(); @@ -593,8 +610,10 @@ where .parameters .into_iter() .map(libffi::middle::Type::try_from) - .collect::, _>>()?, - libffi::middle::Type::try_from(args.result)?, + .collect::, _>>() + .map_err(CallbackError::Other)?, + libffi::middle::Type::try_from(args.result) + .map_err(CallbackError::Other)?, ); // SAFETY: CallbackInfo is leaked, is not null and stays valid as long as the callback exists. @@ -624,14 +643,16 @@ pub fn op_ffi_unsafe_callback_close( state: &mut OpState, scope: &mut v8::HandleScope, #[smi] rid: ResourceId, -) -> Result<(), AnyError> { +) -> Result<(), CallbackError> { // SAFETY: This drops the closure and the callback info associated with it. // Any retained function pointers to the closure become dangling pointers. // It is up to the user to know that it is safe to call the `close()` on the // UnsafeCallback instance. unsafe { - let callback_resource = - state.resource_table.take::(rid)?; + let callback_resource = state + .resource_table + .take::(rid) + .map_err(CallbackError::Resource)?; let info = Box::from_raw(callback_resource.info); let _ = v8::Global::from_raw(scope, info.callback); let _ = v8::Global::from_raw(scope, info.context); diff --git a/ext/ffi/dlfcn.rs b/ext/ffi/dlfcn.rs index 10199bf858..53bdcbc5cc 100644 --- a/ext/ffi/dlfcn.rs +++ b/ext/ffi/dlfcn.rs @@ -6,8 +6,6 @@ use crate::symbol::Symbol; use crate::turbocall; use crate::turbocall::Turbocall; use crate::FfiPermissions; -use deno_core::error::generic_error; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::v8; use deno_core::GarbageCollected; @@ -21,6 +19,22 @@ use std::collections::HashMap; use std::ffi::c_void; use std::rc::Rc; +#[derive(Debug, thiserror::Error)] +pub enum DlfcnError { + #[error("Failed to register symbol {symbol}: {error}")] + RegisterSymbol { + symbol: String, + #[source] + error: dlopen2::Error, + }, + #[error(transparent)] + Dlopen(#[from] dlopen2::Error), + #[error(transparent)] + Permission(deno_core::error::AnyError), + #[error(transparent)] + Other(deno_core::error::AnyError), +} + pub struct DynamicLibraryResource { lib: Library, pub symbols: HashMap>, @@ -37,7 +51,7 @@ impl Resource for DynamicLibraryResource { } impl DynamicLibraryResource { - pub fn get_static(&self, symbol: String) -> Result<*mut c_void, AnyError> { + pub fn get_static(&self, symbol: String) -> Result<*mut c_void, DlfcnError> { // By default, Err returned by this function does not tell // which symbol wasn't exported. So we'll modify the error // message to include the name of symbol. @@ -45,9 +59,7 @@ impl DynamicLibraryResource { // SAFETY: The obtained T symbol is the size of a pointer. match unsafe { self.lib.symbol::<*mut c_void>(&symbol) } { Ok(value) => Ok(Ok(value)), - Err(err) => Err(generic_error(format!( - "Failed to register symbol {symbol}: {err}" - ))), + Err(error) => Err(DlfcnError::RegisterSymbol { symbol, error }), }? } } @@ -116,12 +128,14 @@ pub fn op_ffi_load<'scope, FP>( scope: &mut v8::HandleScope<'scope>, state: &mut OpState, #[serde] args: FfiLoadArgs, -) -> Result, AnyError> +) -> Result, DlfcnError> where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - let path = permissions.check_partial_with_path(&args.path)?; + let path = permissions + .check_partial_with_path(&args.path) + .map_err(DlfcnError::Permission)?; let lib = Library::open(&path).map_err(|e| { dlopen2::Error::OpeningLibraryError(std::io::Error::new( @@ -152,15 +166,16 @@ where // SAFETY: The obtained T symbol is the size of a pointer. match unsafe { resource.lib.symbol::<*const c_void>(symbol) } { Ok(value) => Ok(value), - Err(err) => if foreign_fn.optional { + Err(error) => if foreign_fn.optional { let null: v8::Local = v8::null(scope).into(); let func_key = v8::String::new(scope, &symbol_key).unwrap(); obj.set(scope, func_key.into(), null); break 'register_symbol; } else { - Err(generic_error(format!( - "Failed to register symbol {symbol}: {err}" - ))) + Err(DlfcnError::RegisterSymbol { + symbol: symbol.to_owned(), + error, + }) }, }?; @@ -171,8 +186,13 @@ where .clone() .into_iter() .map(libffi::middle::Type::try_from) - .collect::, _>>()?, - foreign_fn.result.clone().try_into()?, + .collect::, _>>() + .map_err(DlfcnError::Other)?, + foreign_fn + .result + .clone() + .try_into() + .map_err(DlfcnError::Other)?, ); let func_key = v8::String::new(scope, &symbol_key).unwrap(); diff --git a/ext/ffi/ir.rs b/ext/ffi/ir.rs index ebf64945b4..2e80842166 100644 --- a/ext/ffi/ir.rs +++ b/ext/ffi/ir.rs @@ -1,13 +1,55 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. use crate::symbol::NativeType; -use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::v8; use libffi::middle::Arg; use std::ffi::c_void; use std::ptr; +#[derive(Debug, thiserror::Error)] +pub enum IRError { + #[error("Invalid FFI u8 type, expected boolean")] + InvalidU8ExpectedBoolean, + #[error("Invalid FFI u8 type, expected unsigned integer")] + InvalidU8ExpectedUnsignedInteger, + #[error("Invalid FFI i8 type, expected integer")] + InvalidI8, + #[error("Invalid FFI u16 type, expected unsigned integer")] + InvalidU16, + #[error("Invalid FFI i16 type, expected integer")] + InvalidI16, + #[error("Invalid FFI u32 type, expected unsigned integer")] + InvalidU32, + #[error("Invalid FFI i32 type, expected integer")] + InvalidI32, + #[error("Invalid FFI u64 type, expected unsigned integer")] + InvalidU64, + #[error("Invalid FFI i64 type, expected integer")] + InvalidI64, + #[error("Invalid FFI usize type, expected unsigned integer")] + InvalidUsize, + #[error("Invalid FFI isize type, expected integer")] + InvalidIsize, + #[error("Invalid FFI f32 type, expected number")] + InvalidF32, + #[error("Invalid FFI f64 type, expected number")] + InvalidF64, + #[error("Invalid FFI pointer type, expected null, or External")] + InvalidPointerType, + #[error( + "Invalid FFI buffer type, expected null, ArrayBuffer, or ArrayBufferView" + )] + InvalidBufferType, + #[error("Invalid FFI ArrayBufferView, expected data in the buffer")] + InvalidArrayBufferView, + #[error("Invalid FFI ArrayBuffer, expected data in buffer")] + InvalidArrayBuffer, + #[error("Invalid FFI struct type, expected ArrayBuffer, or ArrayBufferView")] + InvalidStructType, + #[error("Invalid FFI function type, expected null, or External")] + InvalidFunctionType, +} + pub struct OutBuffer(pub *mut u8); // SAFETY: OutBuffer is allocated by us in 00_ffi.js and is guaranteed to be @@ -126,9 +168,9 @@ unsafe impl Send for NativeValue {} #[inline] pub fn ffi_parse_bool_arg( arg: v8::Local, -) -> Result { +) -> Result { let bool_value = v8::Local::::try_from(arg) - .map_err(|_| type_error("Invalid FFI u8 type, expected boolean"))? + .map_err(|_| IRError::InvalidU8ExpectedBoolean)? .is_true(); Ok(NativeValue { bool_value }) } @@ -136,9 +178,9 @@ pub fn ffi_parse_bool_arg( #[inline] pub fn ffi_parse_u8_arg( arg: v8::Local, -) -> Result { +) -> Result { let u8_value = v8::Local::::try_from(arg) - .map_err(|_| type_error("Invalid FFI u8 type, expected unsigned integer"))? + .map_err(|_| IRError::InvalidU8ExpectedUnsignedInteger)? .value() as u8; Ok(NativeValue { u8_value }) } @@ -146,9 +188,9 @@ pub fn ffi_parse_u8_arg( #[inline] pub fn ffi_parse_i8_arg( arg: v8::Local, -) -> Result { +) -> Result { let i8_value = v8::Local::::try_from(arg) - .map_err(|_| type_error("Invalid FFI i8 type, expected integer"))? + .map_err(|_| IRError::InvalidI8)? .value() as i8; Ok(NativeValue { i8_value }) } @@ -156,9 +198,9 @@ pub fn ffi_parse_i8_arg( #[inline] pub fn ffi_parse_u16_arg( arg: v8::Local, -) -> Result { +) -> Result { let u16_value = v8::Local::::try_from(arg) - .map_err(|_| type_error("Invalid FFI u16 type, expected unsigned integer"))? + .map_err(|_| IRError::InvalidU16)? .value() as u16; Ok(NativeValue { u16_value }) } @@ -166,9 +208,9 @@ pub fn ffi_parse_u16_arg( #[inline] pub fn ffi_parse_i16_arg( arg: v8::Local, -) -> Result { +) -> Result { let i16_value = v8::Local::::try_from(arg) - .map_err(|_| type_error("Invalid FFI i16 type, expected integer"))? + .map_err(|_| IRError::InvalidI16)? .value() as i16; Ok(NativeValue { i16_value }) } @@ -176,9 +218,9 @@ pub fn ffi_parse_i16_arg( #[inline] pub fn ffi_parse_u32_arg( arg: v8::Local, -) -> Result { +) -> Result { let u32_value = v8::Local::::try_from(arg) - .map_err(|_| type_error("Invalid FFI u32 type, expected unsigned integer"))? + .map_err(|_| IRError::InvalidU32)? .value(); Ok(NativeValue { u32_value }) } @@ -186,9 +228,9 @@ pub fn ffi_parse_u32_arg( #[inline] pub fn ffi_parse_i32_arg( arg: v8::Local, -) -> Result { +) -> Result { let i32_value = v8::Local::::try_from(arg) - .map_err(|_| type_error("Invalid FFI i32 type, expected integer"))? + .map_err(|_| IRError::InvalidI32)? .value(); Ok(NativeValue { i32_value }) } @@ -197,7 +239,7 @@ pub fn ffi_parse_i32_arg( pub fn ffi_parse_u64_arg( scope: &mut v8::HandleScope, arg: v8::Local, -) -> Result { +) -> Result { // Order of checking: // 1. BigInt: Uncommon and not supported by Fast API, so optimise slow call for this case. // 2. Number: Common, supported by Fast API, so let that be the optimal case. @@ -207,9 +249,7 @@ pub fn ffi_parse_u64_arg( } else if let Ok(value) = v8::Local::::try_from(arg) { value.integer_value(scope).unwrap() as u64 } else { - return Err(type_error( - "Invalid FFI u64 type, expected unsigned integer", - )); + return Err(IRError::InvalidU64); }; Ok(NativeValue { u64_value }) } @@ -218,7 +258,7 @@ pub fn ffi_parse_u64_arg( pub fn ffi_parse_i64_arg( scope: &mut v8::HandleScope, arg: v8::Local, -) -> Result { +) -> Result { // Order of checking: // 1. BigInt: Uncommon and not supported by Fast API, so optimise slow call for this case. // 2. Number: Common, supported by Fast API, so let that be the optimal case. @@ -228,7 +268,7 @@ pub fn ffi_parse_i64_arg( } else if let Ok(value) = v8::Local::::try_from(arg) { value.integer_value(scope).unwrap() } else { - return Err(type_error("Invalid FFI i64 type, expected integer")); + return Err(IRError::InvalidI64); }; Ok(NativeValue { i64_value }) } @@ -237,7 +277,7 @@ pub fn ffi_parse_i64_arg( pub fn ffi_parse_usize_arg( scope: &mut v8::HandleScope, arg: v8::Local, -) -> Result { +) -> Result { // Order of checking: // 1. BigInt: Uncommon and not supported by Fast API, so optimise slow call for this case. // 2. Number: Common, supported by Fast API, so let that be the optimal case. @@ -247,7 +287,7 @@ pub fn ffi_parse_usize_arg( } else if let Ok(value) = v8::Local::::try_from(arg) { value.integer_value(scope).unwrap() as usize } else { - return Err(type_error("Invalid FFI usize type, expected integer")); + return Err(IRError::InvalidUsize); }; Ok(NativeValue { usize_value }) } @@ -256,7 +296,7 @@ pub fn ffi_parse_usize_arg( pub fn ffi_parse_isize_arg( scope: &mut v8::HandleScope, arg: v8::Local, -) -> Result { +) -> Result { // Order of checking: // 1. BigInt: Uncommon and not supported by Fast API, so optimise slow call for this case. // 2. Number: Common, supported by Fast API, so let that be the optimal case. @@ -266,7 +306,7 @@ pub fn ffi_parse_isize_arg( } else if let Ok(value) = v8::Local::::try_from(arg) { value.integer_value(scope).unwrap() as isize } else { - return Err(type_error("Invalid FFI isize type, expected integer")); + return Err(IRError::InvalidIsize); }; Ok(NativeValue { isize_value }) } @@ -274,9 +314,9 @@ pub fn ffi_parse_isize_arg( #[inline] pub fn ffi_parse_f32_arg( arg: v8::Local, -) -> Result { +) -> Result { let f32_value = v8::Local::::try_from(arg) - .map_err(|_| type_error("Invalid FFI f32 type, expected number"))? + .map_err(|_| IRError::InvalidF32)? .value() as f32; Ok(NativeValue { f32_value }) } @@ -284,9 +324,9 @@ pub fn ffi_parse_f32_arg( #[inline] pub fn ffi_parse_f64_arg( arg: v8::Local, -) -> Result { +) -> Result { let f64_value = v8::Local::::try_from(arg) - .map_err(|_| type_error("Invalid FFI f64 type, expected number"))? + .map_err(|_| IRError::InvalidF64)? .value(); Ok(NativeValue { f64_value }) } @@ -295,15 +335,13 @@ pub fn ffi_parse_f64_arg( pub fn ffi_parse_pointer_arg( _scope: &mut v8::HandleScope, arg: v8::Local, -) -> Result { +) -> Result { let pointer = if let Ok(value) = v8::Local::::try_from(arg) { value.value() } else if arg.is_null() { ptr::null_mut() } else { - return Err(type_error( - "Invalid FFI pointer type, expected null, or External", - )); + return Err(IRError::InvalidPointerType); }; Ok(NativeValue { pointer }) } @@ -312,7 +350,7 @@ pub fn ffi_parse_pointer_arg( pub fn ffi_parse_buffer_arg( scope: &mut v8::HandleScope, arg: v8::Local, -) -> Result { +) -> Result { // Order of checking: // 1. ArrayBuffer: Fairly common and not supported by Fast API, optimise this case. // 2. ArrayBufferView: Common and supported by Fast API @@ -328,9 +366,7 @@ pub fn ffi_parse_buffer_arg( let byte_offset = value.byte_offset(); let pointer = value .buffer(scope) - .ok_or_else(|| { - type_error("Invalid FFI ArrayBufferView, expected data in the buffer") - })? + .ok_or(IRError::InvalidArrayBufferView)? .data(); if let Some(non_null) = pointer { // SAFETY: Pointer is non-null, and V8 guarantees that the byte_offset @@ -342,9 +378,7 @@ pub fn ffi_parse_buffer_arg( } else if arg.is_null() { ptr::null_mut() } else { - return Err(type_error( - "Invalid FFI buffer type, expected null, ArrayBuffer, or ArrayBufferView", - )); + return Err(IRError::InvalidBufferType); }; Ok(NativeValue { pointer }) } @@ -353,7 +387,7 @@ pub fn ffi_parse_buffer_arg( pub fn ffi_parse_struct_arg( scope: &mut v8::HandleScope, arg: v8::Local, -) -> Result { +) -> Result { // Order of checking: // 1. ArrayBuffer: Fairly common and not supported by Fast API, optimise this case. // 2. ArrayBufferView: Common and supported by Fast API @@ -362,31 +396,23 @@ pub fn ffi_parse_struct_arg( if let Some(non_null) = value.data() { non_null.as_ptr() } else { - return Err(type_error( - "Invalid FFI ArrayBuffer, expected data in buffer", - )); + return Err(IRError::InvalidArrayBuffer); } } else if let Ok(value) = v8::Local::::try_from(arg) { let byte_offset = value.byte_offset(); let pointer = value .buffer(scope) - .ok_or_else(|| { - type_error("Invalid FFI ArrayBufferView, expected data in the buffer") - })? + .ok_or(IRError::InvalidArrayBufferView)? .data(); if let Some(non_null) = pointer { // SAFETY: Pointer is non-null, and V8 guarantees that the byte_offset // is within the buffer backing store. unsafe { non_null.as_ptr().add(byte_offset) } } else { - return Err(type_error( - "Invalid FFI ArrayBufferView, expected data in buffer", - )); + return Err(IRError::InvalidArrayBufferView); } } else { - return Err(type_error( - "Invalid FFI struct type, expected ArrayBuffer, or ArrayBufferView", - )); + return Err(IRError::InvalidStructType); }; Ok(NativeValue { pointer }) } @@ -395,15 +421,13 @@ pub fn ffi_parse_struct_arg( pub fn ffi_parse_function_arg( _scope: &mut v8::HandleScope, arg: v8::Local, -) -> Result { +) -> Result { let pointer = if let Ok(value) = v8::Local::::try_from(arg) { value.value() } else if arg.is_null() { ptr::null_mut() } else { - return Err(type_error( - "Invalid FFI function type, expected null, or External", - )); + return Err(IRError::InvalidFunctionType); }; Ok(NativeValue { pointer }) } @@ -412,7 +436,7 @@ pub fn ffi_parse_args<'scope>( scope: &mut v8::HandleScope<'scope>, args: v8::Local, parameter_types: &[NativeType], -) -> Result, AnyError> +) -> Result, IRError> where 'scope: 'scope, { diff --git a/ext/ffi/lib.rs b/ext/ffi/lib.rs index 77ec3c85e3..237f8c3b05 100644 --- a/ext/ffi/lib.rs +++ b/ext/ffi/lib.rs @@ -29,6 +29,13 @@ use repr::*; use symbol::NativeType; use symbol::Symbol; +pub use call::CallError; +pub use callback::CallbackError; +pub use dlfcn::DlfcnError; +pub use ir::IRError; +pub use r#static::StaticError; +pub use repr::ReprError; + #[cfg(not(target_pointer_width = "64"))] compile_error!("platform not supported"); diff --git a/ext/ffi/repr.rs b/ext/ffi/repr.rs index 315e6d53bc..2f04f4feb4 100644 --- a/ext/ffi/repr.rs +++ b/ext/ffi/repr.rs @@ -1,9 +1,6 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. use crate::FfiPermissions; -use deno_core::error::range_error; -use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::v8; use deno_core::OpState; @@ -12,16 +9,58 @@ use std::ffi::c_void; use std::ffi::CStr; use std::ptr; +#[derive(Debug, thiserror::Error)] +pub enum ReprError { + #[error("Invalid pointer to offset, pointer is null")] + InvalidOffset, + #[error("Invalid ArrayBuffer pointer, pointer is null")] + InvalidArrayBuffer, + #[error("Destination length is smaller than source length")] + DestinationLengthTooShort, + #[error("Invalid CString pointer, pointer is null")] + InvalidCString, + #[error("Invalid CString pointer, string exceeds max length")] + CStringTooLong, + #[error("Invalid bool pointer, pointer is null")] + InvalidBool, + #[error("Invalid u8 pointer, pointer is null")] + InvalidU8, + #[error("Invalid i8 pointer, pointer is null")] + InvalidI8, + #[error("Invalid u16 pointer, pointer is null")] + InvalidU16, + #[error("Invalid i16 pointer, pointer is null")] + InvalidI16, + #[error("Invalid u32 pointer, pointer is null")] + InvalidU32, + #[error("Invalid i32 pointer, pointer is null")] + InvalidI32, + #[error("Invalid u64 pointer, pointer is null")] + InvalidU64, + #[error("Invalid i64 pointer, pointer is null")] + InvalidI64, + #[error("Invalid f32 pointer, pointer is null")] + InvalidF32, + #[error("Invalid f64 pointer, pointer is null")] + InvalidF64, + #[error("Invalid pointer pointer, pointer is null")] + InvalidPointer, + #[error(transparent)] + Permission(deno_core::error::AnyError), +} + #[op2(fast)] pub fn op_ffi_ptr_create( state: &mut OpState, #[bigint] ptr_number: usize, -) -> Result<*mut c_void, AnyError> +) -> Result<*mut c_void, ReprError> where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions.check_partial_no_path()?; + permissions + .check_partial_no_path() + .map_err(ReprError::Permission)?; Ok(ptr_number as *mut c_void) } @@ -31,12 +70,14 @@ pub fn op_ffi_ptr_equals( state: &mut OpState, a: *const c_void, b: *const c_void, -) -> Result +) -> Result where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions.check_partial_no_path()?; + permissions + .check_partial_no_path() + .map_err(ReprError::Permission)?; Ok(a == b) } @@ -45,12 +86,14 @@ where pub fn op_ffi_ptr_of( state: &mut OpState, #[anybuffer] buf: *const u8, -) -> Result<*mut c_void, AnyError> +) -> Result<*mut c_void, ReprError> where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions.check_partial_no_path()?; + permissions + .check_partial_no_path() + .map_err(ReprError::Permission)?; Ok(buf as *mut c_void) } @@ -59,12 +102,14 @@ where pub fn op_ffi_ptr_of_exact( state: &mut OpState, buf: v8::Local, -) -> Result<*mut c_void, AnyError> +) -> Result<*mut c_void, ReprError> where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions.check_partial_no_path()?; + permissions + .check_partial_no_path() + .map_err(ReprError::Permission)?; let Some(buf) = buf.get_backing_store() else { return Ok(0 as _); @@ -80,15 +125,17 @@ pub fn op_ffi_ptr_offset( state: &mut OpState, ptr: *mut c_void, #[number] offset: isize, -) -> Result<*mut c_void, AnyError> +) -> Result<*mut c_void, ReprError> where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions.check_partial_no_path()?; + permissions + .check_partial_no_path() + .map_err(ReprError::Permission)?; if ptr.is_null() { - return Err(type_error("Invalid pointer to offset, pointer is null")); + return Err(ReprError::InvalidOffset); } // TODO(mmastrac): Create a RawPointer that can safely do pointer math. @@ -110,12 +157,14 @@ unsafe extern "C" fn noop_deleter_callback( pub fn op_ffi_ptr_value( state: &mut OpState, ptr: *mut c_void, -) -> Result +) -> Result where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions.check_partial_no_path()?; + permissions + .check_partial_no_path() + .map_err(ReprError::Permission)?; Ok(ptr as usize) } @@ -127,15 +176,17 @@ pub fn op_ffi_get_buf( ptr: *mut c_void, #[number] offset: isize, #[number] len: usize, -) -> Result, AnyError> +) -> Result, ReprError> where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions.check_partial_no_path()?; + permissions + .check_partial_no_path() + .map_err(ReprError::Permission)?; if ptr.is_null() { - return Err(type_error("Invalid ArrayBuffer pointer, pointer is null")); + return Err(ReprError::InvalidArrayBuffer); } // SAFETY: Trust the user to have provided a real pointer, offset, and a valid matching size to it. Since this is a foreign pointer, we should not do any deletion. @@ -144,7 +195,7 @@ where ptr.offset(offset), len, noop_deleter_callback, - std::ptr::null_mut(), + ptr::null_mut(), ) } .make_shared(); @@ -159,19 +210,19 @@ pub fn op_ffi_buf_copy_into( #[number] offset: isize, #[anybuffer] dst: &mut [u8], #[number] len: usize, -) -> Result<(), AnyError> +) -> Result<(), ReprError> where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions.check_partial_no_path()?; + permissions + .check_partial_no_path() + .map_err(ReprError::Permission)?; if src.is_null() { - Err(type_error("Invalid ArrayBuffer pointer, pointer is null")) + Err(ReprError::InvalidArrayBuffer) } else if dst.len() < len { - Err(range_error( - "Destination length is smaller than source length", - )) + Err(ReprError::DestinationLengthTooShort) } else { let src = src as *const c_void; @@ -190,24 +241,24 @@ pub fn op_ffi_cstr_read( state: &mut OpState, ptr: *mut c_void, #[number] offset: isize, -) -> Result, AnyError> +) -> Result, ReprError> where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions.check_partial_no_path()?; + permissions + .check_partial_no_path() + .map_err(ReprError::Permission)?; if ptr.is_null() { - return Err(type_error("Invalid CString pointer, pointer is null")); + return Err(ReprError::InvalidCString); } let cstr = // SAFETY: Pointer and offset are user provided. unsafe { CStr::from_ptr(ptr.offset(offset) as *const c_char) }.to_bytes(); let value = v8::String::new_from_utf8(scope, cstr, v8::NewStringType::Normal) - .ok_or_else(|| { - type_error("Invalid CString pointer, string exceeds max length") - })?; + .ok_or_else(|| ReprError::CStringTooLong)?; Ok(value) } @@ -216,15 +267,17 @@ pub fn op_ffi_read_bool( state: &mut OpState, ptr: *mut c_void, #[number] offset: isize, -) -> Result +) -> Result where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions.check_partial_no_path()?; + permissions + .check_partial_no_path() + .map_err(ReprError::Permission)?; if ptr.is_null() { - return Err(type_error("Invalid bool pointer, pointer is null")); + return Err(ReprError::InvalidBool); } // SAFETY: ptr and offset are user provided. @@ -236,15 +289,17 @@ pub fn op_ffi_read_u8( state: &mut OpState, ptr: *mut c_void, #[number] offset: isize, -) -> Result +) -> Result where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions.check_partial_no_path()?; + permissions + .check_partial_no_path() + .map_err(ReprError::Permission)?; if ptr.is_null() { - return Err(type_error("Invalid u8 pointer, pointer is null")); + return Err(ReprError::InvalidU8); } // SAFETY: ptr and offset are user provided. @@ -258,15 +313,17 @@ pub fn op_ffi_read_i8( state: &mut OpState, ptr: *mut c_void, #[number] offset: isize, -) -> Result +) -> Result where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions.check_partial_no_path()?; + permissions + .check_partial_no_path() + .map_err(ReprError::Permission)?; if ptr.is_null() { - return Err(type_error("Invalid i8 pointer, pointer is null")); + return Err(ReprError::InvalidI8); } // SAFETY: ptr and offset are user provided. @@ -280,15 +337,17 @@ pub fn op_ffi_read_u16( state: &mut OpState, ptr: *mut c_void, #[number] offset: isize, -) -> Result +) -> Result where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions.check_partial_no_path()?; + permissions + .check_partial_no_path() + .map_err(ReprError::Permission)?; if ptr.is_null() { - return Err(type_error("Invalid u16 pointer, pointer is null")); + return Err(ReprError::InvalidU16); } // SAFETY: ptr and offset are user provided. @@ -302,15 +361,17 @@ pub fn op_ffi_read_i16( state: &mut OpState, ptr: *mut c_void, #[number] offset: isize, -) -> Result +) -> Result where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions.check_partial_no_path()?; + permissions + .check_partial_no_path() + .map_err(ReprError::Permission)?; if ptr.is_null() { - return Err(type_error("Invalid i16 pointer, pointer is null")); + return Err(ReprError::InvalidI16); } // SAFETY: ptr and offset are user provided. @@ -324,15 +385,17 @@ pub fn op_ffi_read_u32( state: &mut OpState, ptr: *mut c_void, #[number] offset: isize, -) -> Result +) -> Result where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions.check_partial_no_path()?; + permissions + .check_partial_no_path() + .map_err(ReprError::Permission)?; if ptr.is_null() { - return Err(type_error("Invalid u32 pointer, pointer is null")); + return Err(ReprError::InvalidU32); } // SAFETY: ptr and offset are user provided. @@ -344,15 +407,17 @@ pub fn op_ffi_read_i32( state: &mut OpState, ptr: *mut c_void, #[number] offset: isize, -) -> Result +) -> Result where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions.check_partial_no_path()?; + permissions + .check_partial_no_path() + .map_err(ReprError::Permission)?; if ptr.is_null() { - return Err(type_error("Invalid i32 pointer, pointer is null")); + return Err(ReprError::InvalidI32); } // SAFETY: ptr and offset are user provided. @@ -367,15 +432,17 @@ pub fn op_ffi_read_u64( // Note: The representation of 64-bit integers is function-wide. We cannot // choose to take this parameter as a number while returning a bigint. #[bigint] offset: isize, -) -> Result +) -> Result where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions.check_partial_no_path()?; + permissions + .check_partial_no_path() + .map_err(ReprError::Permission)?; if ptr.is_null() { - return Err(type_error("Invalid u64 pointer, pointer is null")); + return Err(ReprError::InvalidU64); } let value = @@ -393,15 +460,17 @@ pub fn op_ffi_read_i64( // Note: The representation of 64-bit integers is function-wide. We cannot // choose to take this parameter as a number while returning a bigint. #[bigint] offset: isize, -) -> Result +) -> Result where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions.check_partial_no_path()?; + permissions + .check_partial_no_path() + .map_err(ReprError::Permission)?; if ptr.is_null() { - return Err(type_error("Invalid i64 pointer, pointer is null")); + return Err(ReprError::InvalidI64); } let value = @@ -416,15 +485,17 @@ pub fn op_ffi_read_f32( state: &mut OpState, ptr: *mut c_void, #[number] offset: isize, -) -> Result +) -> Result where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions.check_partial_no_path()?; + permissions + .check_partial_no_path() + .map_err(ReprError::Permission)?; if ptr.is_null() { - return Err(type_error("Invalid f32 pointer, pointer is null")); + return Err(ReprError::InvalidF32); } // SAFETY: ptr and offset are user provided. @@ -436,15 +507,17 @@ pub fn op_ffi_read_f64( state: &mut OpState, ptr: *mut c_void, #[number] offset: isize, -) -> Result +) -> Result where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions.check_partial_no_path()?; + permissions + .check_partial_no_path() + .map_err(ReprError::Permission)?; if ptr.is_null() { - return Err(type_error("Invalid f64 pointer, pointer is null")); + return Err(ReprError::InvalidF64); } // SAFETY: ptr and offset are user provided. @@ -456,15 +529,17 @@ pub fn op_ffi_read_ptr( state: &mut OpState, ptr: *mut c_void, #[number] offset: isize, -) -> Result<*mut c_void, AnyError> +) -> Result<*mut c_void, ReprError> where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions.check_partial_no_path()?; + permissions + .check_partial_no_path() + .map_err(ReprError::Permission)?; if ptr.is_null() { - return Err(type_error("Invalid pointer pointer, pointer is null")); + return Err(ReprError::InvalidPointer); } // SAFETY: ptr and offset are user provided. diff --git a/ext/ffi/static.rs b/ext/ffi/static.rs index f08605754b..61b4059336 100644 --- a/ext/ffi/static.rs +++ b/ext/ffi/static.rs @@ -2,14 +2,24 @@ use crate::dlfcn::DynamicLibraryResource; use crate::symbol::NativeType; -use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::v8; use deno_core::OpState; use deno_core::ResourceId; use std::ptr; +#[derive(Debug, thiserror::Error)] +pub enum StaticError { + #[error(transparent)] + Dlfcn(super::DlfcnError), + #[error("Invalid FFI static type 'void'")] + InvalidTypeVoid, + #[error("Invalid FFI static type 'struct'")] + InvalidTypeStruct, + #[error(transparent)] + Resource(deno_core::error::AnyError), +} + #[op2] pub fn op_ffi_get_static<'scope>( scope: &mut v8::HandleScope<'scope>, @@ -18,24 +28,27 @@ pub fn op_ffi_get_static<'scope>( #[string] name: String, #[serde] static_type: NativeType, optional: bool, -) -> Result, AnyError> { - let resource = state.resource_table.get::(rid)?; +) -> Result, StaticError> { + let resource = state + .resource_table + .get::(rid) + .map_err(StaticError::Resource)?; let data_ptr = match resource.get_static(name) { - Ok(data_ptr) => Ok(data_ptr), + Ok(data_ptr) => data_ptr, Err(err) => { if optional { let null: v8::Local = v8::null(scope).into(); return Ok(null); } else { - Err(err) + return Err(StaticError::Dlfcn(err)); } } - }?; + }; Ok(match static_type { NativeType::Void => { - return Err(type_error("Invalid FFI static type 'void'")); + return Err(StaticError::InvalidTypeVoid); } NativeType::Bool => { // SAFETY: ptr is user provided @@ -132,7 +145,7 @@ pub fn op_ffi_get_static<'scope>( external } NativeType::Struct(_) => { - return Err(type_error("Invalid FFI static type 'struct'")); + return Err(StaticError::InvalidTypeStruct); } }) } diff --git a/runtime/errors.rs b/runtime/errors.rs index 5735635957..612a66b6e2 100644 --- a/runtime/errors.rs +++ b/runtime/errors.rs @@ -17,6 +17,12 @@ use deno_core::serde_json; use deno_core::url; use deno_core::ModuleResolutionError; use deno_cron::CronError; +use deno_ffi::CallError; +use deno_ffi::CallbackError; +use deno_ffi::DlfcnError; +use deno_ffi::IRError; +use deno_ffi::ReprError; +use deno_ffi::StaticError; use deno_tls::TlsError; use deno_webstorage::WebStorageError; use std::env; @@ -159,6 +165,65 @@ pub fn get_nix_error_class(error: &nix::Error) -> &'static str { } } +fn get_ffi_repr_error_class(e: &ReprError) -> &'static str { + match e { + ReprError::InvalidOffset => "TypeError", + ReprError::InvalidArrayBuffer => "TypeError", + ReprError::DestinationLengthTooShort => "RangeError", + ReprError::InvalidCString => "TypeError", + ReprError::CStringTooLong => "TypeError", + ReprError::InvalidBool => "TypeError", + ReprError::InvalidU8 => "TypeError", + ReprError::InvalidI8 => "TypeError", + ReprError::InvalidU16 => "TypeError", + ReprError::InvalidI16 => "TypeError", + ReprError::InvalidU32 => "TypeError", + ReprError::InvalidI32 => "TypeError", + ReprError::InvalidU64 => "TypeError", + ReprError::InvalidI64 => "TypeError", + ReprError::InvalidF32 => "TypeError", + ReprError::InvalidF64 => "TypeError", + ReprError::InvalidPointer => "TypeError", + ReprError::Permission(e) => get_error_class_name(e).unwrap_or("Error"), + } +} + +fn get_ffi_dlfcn_error_class(e: &DlfcnError) -> &'static str { + match e { + DlfcnError::RegisterSymbol { .. } => "Error", + DlfcnError::Dlopen(_) => "Error", + DlfcnError::Permission(e) => get_error_class_name(e).unwrap_or("Error"), + DlfcnError::Other(e) => get_error_class_name(e).unwrap_or("Error"), + } +} + +fn get_ffi_static_error_class(e: &StaticError) -> &'static str { + match e { + StaticError::Dlfcn(e) => get_ffi_dlfcn_error_class(e), + StaticError::InvalidTypeVoid => "TypeError", + StaticError::InvalidTypeStruct => "TypeError", + StaticError::Resource(e) => get_error_class_name(e).unwrap_or("Error"), + } +} + +fn get_ffi_callback_error_class(e: &CallbackError) -> &'static str { + match e { + CallbackError::Resource(e) => get_error_class_name(e).unwrap_or("Error"), + CallbackError::Other(e) => get_error_class_name(e).unwrap_or("Error"), + CallbackError::Permission(e) => get_error_class_name(e).unwrap_or("Error"), + } +} + +fn get_ffi_call_error_class(e: &CallError) -> &'static str { + match e { + CallError::IR(_) => "TypeError", + CallError::NonblockingCallFailure(_) => "Error", + CallError::InvalidSymbol(_) => "TypeError", + CallError::Permission(e) => get_error_class_name(e).unwrap_or("Error"), + CallError::Callback(e) => get_ffi_callback_error_class(e), + } +} + fn get_webstorage_class_name(e: &WebStorageError) -> &'static str { match e { WebStorageError::ContextNotSupported => "DOMExceptionNotSupportedError", @@ -232,6 +297,21 @@ pub fn get_error_class_name(e: &AnyError) -> Option<&'static str> { .or_else(|| deno_webgpu::error::get_error_class_name(e)) .or_else(|| deno_web::get_error_class_name(e)) .or_else(|| deno_websocket::get_network_error_class_name(e)) + .or_else(|| e.downcast_ref::().map(|_| "TypeError")) + .or_else(|| e.downcast_ref::().map(get_ffi_repr_error_class)) + .or_else(|| { + e.downcast_ref::() + .map(get_ffi_dlfcn_error_class) + }) + .or_else(|| { + e.downcast_ref::() + .map(get_ffi_static_error_class) + }) + .or_else(|| { + e.downcast_ref::() + .map(get_ffi_callback_error_class) + }) + .or_else(|| e.downcast_ref::().map(get_ffi_call_error_class)) .or_else(|| e.downcast_ref::().map(get_tls_error_class)) .or_else(|| e.downcast_ref::().map(get_cron_error_class)) .or_else(|| e.downcast_ref::().map(get_canvas_error)) From 1a0cb5b5312941521ab021cfe9eaed498f35900b Mon Sep 17 00:00:00 2001 From: David Sherret Date: Mon, 14 Oct 2024 20:48:39 -0400 Subject: [PATCH 20/52] feat(unstable): `--unstable-detect-cjs` for respecting explicit `"type": "commonjs"` (#26149) When using the `--unstable-detect-cjs` flag or adding `"unstable": ["detect-cjs"]` to a deno.json, it will make a JS file CJS if the closest package.json contains `"type": "commonjs"` and the file is not an ESM module (no TLA, no `import.meta`, no `import`/`export`). --- cli/args/flags.rs | 117 +++++++++++------- cli/args/mod.rs | 31 ++--- cli/cache/mod.rs | 57 ++++++++- cli/cache/parsed_source.rs | 40 ++++++ cli/factory.rs | 13 ++ cli/graph_util.rs | 11 ++ cli/module_loader.rs | 89 ++++++------- cli/node.rs | 29 +++-- cli/resolver.rs | 50 ++++---- cli/schemas/config-file.v1.json | 1 + cli/standalone/binary.rs | 1 + cli/standalone/mod.rs | 5 +- cli/tools/compile.rs | 10 ++ cli/util/text_encoding.rs | 18 +++ cli/worker.rs | 32 ++++- tests/specs/compile/detect_cjs/__test__.jsonc | 24 ++++ tests/specs/compile/detect_cjs/add.js | 3 + tests/specs/compile/detect_cjs/compile.out | 3 + tests/specs/compile/detect_cjs/deno.json | 5 + tests/specs/compile/detect_cjs/main.js | 3 + tests/specs/compile/detect_cjs/output.out | 1 + tests/specs/compile/detect_cjs/package.json | 3 + .../package_json_type/commonjs/__test__.jsonc | 34 +++++ .../run/package_json_type/commonjs/add.js | 3 + .../run/package_json_type/commonjs/deno.jsonc | 5 + .../commonjs/import_import_meta.js | 3 + .../package_json_type/commonjs/import_meta.js | 1 + .../package_json_type/commonjs/main_cjs.js | 2 + .../package_json_type/commonjs/main_esm.js | 3 + .../commonjs/main_esm_import_meta.js | 2 + .../commonjs/main_esm_import_meta.out | 2 + .../package_json_type/commonjs/main_mix.js | 6 + .../package_json_type/commonjs/main_mix.out | 5 + .../commonjs/not_import_meta.js | 8 ++ .../package_json_type/commonjs/package.json | 3 + .../run/package_json_type/commonjs/tla.js | 2 + .../run/package_json_type/none/__test__.jsonc | 18 +++ tests/specs/run/package_json_type/none/add.js | 3 + .../package_json_type/none/commonjs/add.js | 3 + .../none/commonjs/package.json | 3 + .../run/package_json_type/none/deno.jsonc | 5 + .../run/package_json_type/none/main_cjs.js | 2 + .../run/package_json_type/none/main_cjs.out | 4 + .../run/package_json_type/none/main_esm.js | 3 + .../run/package_json_type/none/main_esm.out | 4 + .../run/package_json_type/none/package.json | 2 + .../package_json_type/none/sub_folder_cjs.js | 3 + 47 files changed, 520 insertions(+), 155 deletions(-) create mode 100644 tests/specs/compile/detect_cjs/__test__.jsonc create mode 100644 tests/specs/compile/detect_cjs/add.js create mode 100644 tests/specs/compile/detect_cjs/compile.out create mode 100644 tests/specs/compile/detect_cjs/deno.json create mode 100644 tests/specs/compile/detect_cjs/main.js create mode 100644 tests/specs/compile/detect_cjs/output.out create mode 100644 tests/specs/compile/detect_cjs/package.json create mode 100644 tests/specs/run/package_json_type/commonjs/__test__.jsonc create mode 100644 tests/specs/run/package_json_type/commonjs/add.js create mode 100644 tests/specs/run/package_json_type/commonjs/deno.jsonc create mode 100644 tests/specs/run/package_json_type/commonjs/import_import_meta.js create mode 100644 tests/specs/run/package_json_type/commonjs/import_meta.js create mode 100644 tests/specs/run/package_json_type/commonjs/main_cjs.js create mode 100644 tests/specs/run/package_json_type/commonjs/main_esm.js create mode 100644 tests/specs/run/package_json_type/commonjs/main_esm_import_meta.js create mode 100644 tests/specs/run/package_json_type/commonjs/main_esm_import_meta.out create mode 100644 tests/specs/run/package_json_type/commonjs/main_mix.js create mode 100644 tests/specs/run/package_json_type/commonjs/main_mix.out create mode 100644 tests/specs/run/package_json_type/commonjs/not_import_meta.js create mode 100644 tests/specs/run/package_json_type/commonjs/package.json create mode 100644 tests/specs/run/package_json_type/commonjs/tla.js create mode 100644 tests/specs/run/package_json_type/none/__test__.jsonc create mode 100644 tests/specs/run/package_json_type/none/add.js create mode 100644 tests/specs/run/package_json_type/none/commonjs/add.js create mode 100644 tests/specs/run/package_json_type/none/commonjs/package.json create mode 100644 tests/specs/run/package_json_type/none/deno.jsonc create mode 100644 tests/specs/run/package_json_type/none/main_cjs.js create mode 100644 tests/specs/run/package_json_type/none/main_cjs.out create mode 100644 tests/specs/run/package_json_type/none/main_esm.js create mode 100644 tests/specs/run/package_json_type/none/main_esm.out create mode 100644 tests/specs/run/package_json_type/none/package.json create mode 100644 tests/specs/run/package_json_type/none/sub_folder_cjs.js diff --git a/cli/args/flags.rs b/cli/args/flags.rs index d59e5ac1ac..acaf74a67e 100644 --- a/cli/args/flags.rs +++ b/cli/args/flags.rs @@ -575,7 +575,8 @@ fn parse_packages_allowed_scripts(s: &str) -> Result { pub struct UnstableConfig { // TODO(bartlomieju): remove in Deno 2.5 pub legacy_flag_enabled: bool, // --unstable - pub bare_node_builtins: bool, // --unstable-bare-node-builts + pub bare_node_builtins: bool, + pub detect_cjs: bool, pub sloppy_imports: bool, pub features: Vec, // --unstabe-kv --unstable-cron } @@ -1528,7 +1529,7 @@ pub fn clap_root() -> Command { ); run_args(Command::new("deno"), true) - .args(unstable_args(UnstableArgsConfig::ResolutionAndRuntime)) + .with_unstable_args(UnstableArgsConfig::ResolutionAndRuntime) .next_line_help(false) .bin_name("deno") .styles( @@ -1630,7 +1631,7 @@ fn command( ) -> Command { Command::new(name) .about(about) - .args(unstable_args(unstable_args_config)) + .with_unstable_args(unstable_args_config) } fn help_subcommand(app: &Command) -> Command { @@ -4142,23 +4143,29 @@ enum UnstableArgsConfig { ResolutionAndRuntime, } -struct UnstableArgsIter { - idx: usize, - cfg: UnstableArgsConfig, +trait CommandExt { + fn with_unstable_args(self, cfg: UnstableArgsConfig) -> Self; } -impl Iterator for UnstableArgsIter { - type Item = Arg; +impl CommandExt for Command { + fn with_unstable_args(self, cfg: UnstableArgsConfig) -> Self { + let mut next_display_order = { + let mut value = 1000; + move || { + value += 1; + value + } + }; - fn next(&mut self) -> Option { - let arg = if self.idx == 0 { + let mut cmd = self.arg( Arg::new("unstable") - .long("unstable") - .help(cstr!("Enable all unstable features and APIs. Instead of using this flag, consider enabling individual unstable features + .long("unstable") + .help(cstr!("Enable all unstable features and APIs. Instead of using this flag, consider enabling individual unstable features To view the list of individual unstable feature flags, run this command again with --help=unstable")) - .action(ArgAction::SetTrue) - .hide(matches!(self.cfg, UnstableArgsConfig::None)) - } else if self.idx == 1 { + .action(ArgAction::SetTrue) + .hide(matches!(cfg, UnstableArgsConfig::None)) + .display_order(next_display_order()) + ).arg( Arg::new("unstable-bare-node-builtins") .long("unstable-bare-node-builtins") .help("Enable unstable bare node builtins feature") @@ -4166,20 +4173,36 @@ impl Iterator for UnstableArgsIter { .value_parser(FalseyValueParser::new()) .action(ArgAction::SetTrue) .hide(true) - .long_help(match self.cfg { + .long_help(match cfg { UnstableArgsConfig::None => None, UnstableArgsConfig::ResolutionOnly | UnstableArgsConfig::ResolutionAndRuntime => Some("true"), }) .help_heading(UNSTABLE_HEADING) - } else if self.idx == 2 { + .display_order(next_display_order()), + ).arg( + Arg::new("unstable-detect-cjs") + .long("unstable-detect-cjs") + .help("Reads the package.json type field in a project to treat .js files as .cjs") + .value_parser(FalseyValueParser::new()) + .action(ArgAction::SetTrue) + .hide(true) + .long_help(match cfg { + UnstableArgsConfig::None => None, + UnstableArgsConfig::ResolutionOnly + | UnstableArgsConfig::ResolutionAndRuntime => Some("true"), + }) + .help_heading(UNSTABLE_HEADING) + .display_order(next_display_order()) + ).arg( Arg::new("unstable-byonm") .long("unstable-byonm") .value_parser(FalseyValueParser::new()) .action(ArgAction::SetTrue) .hide(true) .help_heading(UNSTABLE_HEADING) - } else if self.idx == 3 { + .display_order(next_display_order()), + ).arg( Arg::new("unstable-sloppy-imports") .long("unstable-sloppy-imports") .help("Enable unstable resolving of specifiers by extension probing, .js to .ts, and directory probing") @@ -4187,40 +4210,39 @@ impl Iterator for UnstableArgsIter { .value_parser(FalseyValueParser::new()) .action(ArgAction::SetTrue) .hide(true) - .long_help(match self.cfg { + .long_help(match cfg { UnstableArgsConfig::None => None, UnstableArgsConfig::ResolutionOnly | UnstableArgsConfig::ResolutionAndRuntime => Some("true") }) .help_heading(UNSTABLE_HEADING) - } else if self.idx > 3 { - let granular_flag = crate::UNSTABLE_GRANULAR_FLAGS.get(self.idx - 4)?; - Arg::new(format!("unstable-{}", granular_flag.name)) - .long(format!("unstable-{}", granular_flag.name)) - .help(granular_flag.help_text) - .action(ArgAction::SetTrue) - .hide(true) - .help_heading(UNSTABLE_HEADING) - // we don't render long help, so using it here as a sort of metadata - .long_help(if granular_flag.show_in_help { - match self.cfg { - UnstableArgsConfig::None | UnstableArgsConfig::ResolutionOnly => { - None - } - UnstableArgsConfig::ResolutionAndRuntime => Some("true"), - } - } else { - None - }) - } else { - return None; - }; - self.idx += 1; - Some(arg.display_order(self.idx + 1000)) - } -} + .display_order(next_display_order()) + ); -fn unstable_args(cfg: UnstableArgsConfig) -> impl IntoIterator { - UnstableArgsIter { idx: 0, cfg } + for granular_flag in crate::UNSTABLE_GRANULAR_FLAGS.iter() { + cmd = cmd.arg( + Arg::new(format!("unstable-{}", granular_flag.name)) + .long(format!("unstable-{}", granular_flag.name)) + .help(granular_flag.help_text) + .action(ArgAction::SetTrue) + .hide(true) + .help_heading(UNSTABLE_HEADING) + // we don't render long help, so using it here as a sort of metadata + .long_help(if granular_flag.show_in_help { + match cfg { + UnstableArgsConfig::None | UnstableArgsConfig::ResolutionOnly => { + None + } + UnstableArgsConfig::ResolutionAndRuntime => Some("true"), + } + } else { + None + }) + .display_order(next_display_order()), + ); + } + + cmd + } } fn allow_scripts_arg_parse( @@ -5678,6 +5700,7 @@ fn unstable_args_parse( flags.unstable_config.bare_node_builtins = matches.get_flag("unstable-bare-node-builtins"); + flags.unstable_config.detect_cjs = matches.get_flag("unstable-detect-cjs"); flags.unstable_config.sloppy_imports = matches.get_flag("unstable-sloppy-imports"); diff --git a/cli/args/mod.rs b/cli/args/mod.rs index 07906a86ac..f905e186ba 100644 --- a/cli/args/mod.rs +++ b/cli/args/mod.rs @@ -1576,6 +1576,11 @@ impl CliOptions { || self.workspace().has_unstable("bare-node-builtins") } + pub fn unstable_detect_cjs(&self) -> bool { + self.flags.unstable_config.detect_cjs + || self.workspace().has_unstable("detect-cjs") + } + fn byonm_enabled(&self) -> bool { // check if enabled via unstable self.node_modules_dir().ok().flatten() == Some(NodeModulesDirMode::Manual) @@ -1620,21 +1625,17 @@ impl CliOptions { }); if !from_config_file.is_empty() { - // collect unstable granular flags - let mut all_valid_unstable_flags: Vec<&str> = - crate::UNSTABLE_GRANULAR_FLAGS - .iter() - .map(|granular_flag| granular_flag.name) - .collect(); - - let mut another_unstable_flags = Vec::from([ - "sloppy-imports", - "byonm", - "bare-node-builtins", - "fmt-component", - ]); - // add more unstable flags to the same vector holding granular flags - all_valid_unstable_flags.append(&mut another_unstable_flags); + let all_valid_unstable_flags: Vec<&str> = crate::UNSTABLE_GRANULAR_FLAGS + .iter() + .map(|granular_flag| granular_flag.name) + .chain([ + "sloppy-imports", + "byonm", + "bare-node-builtins", + "fmt-component", + "detect-cjs", + ]) + .collect(); // check and warn if the unstable flag of config file isn't supported, by // iterating through the vector holding the unstable flags diff --git a/cli/cache/mod.rs b/cli/cache/mod.rs index 628502c506..ded163b4e4 100644 --- a/cli/cache/mod.rs +++ b/cli/cache/mod.rs @@ -9,10 +9,13 @@ use crate::file_fetcher::FetchPermissionsOptionRef; use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileOrRedirect; use crate::npm::CliNpmResolver; +use crate::resolver::CliNodeResolver; use crate::util::fs::atomic_write_file_with_retries; use crate::util::fs::atomic_write_file_with_retries_and_fs; use crate::util::fs::AtomicWriteFileFsAdapter; use crate::util::path::specifier_has_extension; +use crate::util::text_encoding::arc_str_to_bytes; +use crate::util::text_encoding::from_utf8_lossy_owned; use deno_ast::MediaType; use deno_core::futures; @@ -57,6 +60,7 @@ pub use fast_check::FastCheckCache; pub use incremental::IncrementalCache; pub use module_info::ModuleInfoCache; pub use node::NodeAnalysisCache; +pub use parsed_source::EsmOrCjsChecker; pub use parsed_source::LazyGraphSourceParser; pub use parsed_source::ParsedSourceCache; @@ -177,37 +181,46 @@ pub struct FetchCacherOptions { pub permissions: PermissionsContainer, /// If we're publishing for `deno publish`. pub is_deno_publish: bool, + pub unstable_detect_cjs: bool, } /// A "wrapper" for the FileFetcher and DiskCache for the Deno CLI that provides /// a concise interface to the DENO_DIR when building module graphs. pub struct FetchCacher { - file_fetcher: Arc, pub file_header_overrides: HashMap>, + esm_or_cjs_checker: Arc, + file_fetcher: Arc, global_http_cache: Arc, + node_resolver: Arc, npm_resolver: Arc, module_info_cache: Arc, permissions: PermissionsContainer, - cache_info_enabled: bool, is_deno_publish: bool, + unstable_detect_cjs: bool, + cache_info_enabled: bool, } impl FetchCacher { pub fn new( + esm_or_cjs_checker: Arc, file_fetcher: Arc, global_http_cache: Arc, + node_resolver: Arc, npm_resolver: Arc, module_info_cache: Arc, options: FetchCacherOptions, ) -> Self { Self { file_fetcher, + esm_or_cjs_checker, global_http_cache, + node_resolver, npm_resolver, module_info_cache, file_header_overrides: options.file_header_overrides, permissions: options.permissions, is_deno_publish: options.is_deno_publish, + unstable_detect_cjs: options.unstable_detect_cjs, cache_info_enabled: false, } } @@ -282,6 +295,46 @@ impl Loader for FetchCacher { }, )))); } + + if self.unstable_detect_cjs && specifier_has_extension(specifier, "js") { + if let Ok(Some(pkg_json)) = + self.node_resolver.get_closest_package_json(specifier) + { + if pkg_json.typ == "commonjs" { + if let Ok(path) = specifier.to_file_path() { + if let Ok(bytes) = std::fs::read(&path) { + let text: Arc = from_utf8_lossy_owned(bytes).into(); + let is_es_module = match self.esm_or_cjs_checker.is_esm( + specifier, + text.clone(), + MediaType::JavaScript, + ) { + Ok(value) => value, + Err(err) => { + return Box::pin(futures::future::ready(Err(err.into()))); + } + }; + if !is_es_module { + self.node_resolver.mark_cjs_resolution(specifier.clone()); + return Box::pin(futures::future::ready(Ok(Some( + LoadResponse::External { + specifier: specifier.clone(), + }, + )))); + } else { + return Box::pin(futures::future::ready(Ok(Some( + LoadResponse::Module { + specifier: specifier.clone(), + content: arc_str_to_bytes(text), + maybe_headers: None, + }, + )))); + } + } + } + } + } + } } if self.is_deno_publish diff --git a/cli/cache/parsed_source.rs b/cli/cache/parsed_source.rs index e956361f46..df6e45c35e 100644 --- a/cli/cache/parsed_source.rs +++ b/cli/cache/parsed_source.rs @@ -5,6 +5,7 @@ use std::sync::Arc; use deno_ast::MediaType; use deno_ast::ModuleSpecifier; +use deno_ast::ParseDiagnostic; use deno_ast::ParsedSource; use deno_core::parking_lot::Mutex; use deno_graph::CapturingModuleParser; @@ -149,3 +150,42 @@ impl deno_graph::ParsedSourceStore for ParsedSourceCache { } } } + +pub struct EsmOrCjsChecker { + parsed_source_cache: Arc, +} + +impl EsmOrCjsChecker { + pub fn new(parsed_source_cache: Arc) -> Self { + Self { + parsed_source_cache, + } + } + + pub fn is_esm( + &self, + specifier: &ModuleSpecifier, + source: Arc, + media_type: MediaType, + ) -> Result { + // todo(dsherret): add a file cache here to avoid parsing with swc on each run + let source = match self.parsed_source_cache.get_parsed_source(specifier) { + Some(source) => source.clone(), + None => { + let source = deno_ast::parse_program(deno_ast::ParseParams { + specifier: specifier.clone(), + text: source, + media_type, + capture_tokens: true, // capture because it's used for cjs export analysis + scope_analysis: false, + maybe_syntax: None, + })?; + self + .parsed_source_cache + .set_parsed_source(specifier.clone(), source.clone()); + source + } + }; + Ok(source.is_module()) + } +} diff --git a/cli/factory.rs b/cli/factory.rs index b96a133e98..25f3551102 100644 --- a/cli/factory.rs +++ b/cli/factory.rs @@ -14,6 +14,7 @@ use crate::cache::CodeCache; use crate::cache::DenoDir; use crate::cache::DenoDirProvider; use crate::cache::EmitCache; +use crate::cache::EsmOrCjsChecker; use crate::cache::GlobalHttpCache; use crate::cache::HttpCache; use crate::cache::LocalHttpCache; @@ -171,6 +172,7 @@ struct CliFactoryServices { http_client_provider: Deferred>, emit_cache: Deferred>, emitter: Deferred>, + esm_or_cjs_checker: Deferred>, fs: Deferred>, main_graph_container: Deferred>, maybe_inspector_server: Deferred>>, @@ -298,6 +300,12 @@ impl CliFactory { .get_or_init(|| ProgressBar::new(ProgressBarStyle::TextOnly)) } + pub fn esm_or_cjs_checker(&self) -> &Arc { + self.services.esm_or_cjs_checker.get_or_init(|| { + Arc::new(EsmOrCjsChecker::new(self.parsed_source_cache().clone())) + }) + } + pub fn global_http_cache(&self) -> Result<&Arc, AnyError> { self.services.global_http_cache.get_or_try_init(|| { Ok(Arc::new(GlobalHttpCache::new( @@ -579,6 +587,7 @@ impl CliFactory { node_analysis_cache, self.fs().clone(), node_resolver, + Some(self.parsed_source_cache().clone()), ); Ok(Arc::new(NodeCodeTranslator::new( @@ -619,8 +628,10 @@ impl CliFactory { Ok(Arc::new(ModuleGraphBuilder::new( cli_options.clone(), self.caches()?.clone(), + self.esm_or_cjs_checker().clone(), self.fs().clone(), self.resolver().await?.clone(), + self.cli_node_resolver().await?.clone(), self.npm_resolver().await?.clone(), self.module_info_cache()?.clone(), self.parsed_source_cache().clone(), @@ -792,6 +803,7 @@ impl CliFactory { Ok(CliMainWorkerFactory::new( self.blob_store().clone(), + self.cjs_resolutions().clone(), if cli_options.code_cache_enabled() { Some(self.code_cache()?.clone()) } else { @@ -896,6 +908,7 @@ impl CliFactory { node_ipc: cli_options.node_ipc_fd(), serve_port: cli_options.serve_port(), serve_host: cli_options.serve_host(), + unstable_detect_cjs: cli_options.unstable_detect_cjs(), }) } } diff --git a/cli/graph_util.rs b/cli/graph_util.rs index e2f6246e74..e67ae7821b 100644 --- a/cli/graph_util.rs +++ b/cli/graph_util.rs @@ -6,6 +6,7 @@ use crate::args::CliLockfile; use crate::args::CliOptions; use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS; use crate::cache; +use crate::cache::EsmOrCjsChecker; use crate::cache::GlobalHttpCache; use crate::cache::ModuleInfoCache; use crate::cache::ParsedSourceCache; @@ -14,6 +15,7 @@ use crate::errors::get_error_class_name; use crate::file_fetcher::FileFetcher; use crate::npm::CliNpmResolver; use crate::resolver::CliGraphResolver; +use crate::resolver::CliNodeResolver; use crate::resolver::CliSloppyImportsResolver; use crate::resolver::SloppyImportsCachedFs; use crate::tools::check; @@ -379,8 +381,10 @@ pub struct BuildFastCheckGraphOptions<'a> { pub struct ModuleGraphBuilder { options: Arc, caches: Arc, + esm_or_cjs_checker: Arc, fs: Arc, resolver: Arc, + node_resolver: Arc, npm_resolver: Arc, module_info_cache: Arc, parsed_source_cache: Arc, @@ -396,8 +400,10 @@ impl ModuleGraphBuilder { pub fn new( options: Arc, caches: Arc, + esm_or_cjs_checker: Arc, fs: Arc, resolver: Arc, + node_resolver: Arc, npm_resolver: Arc, module_info_cache: Arc, parsed_source_cache: Arc, @@ -410,8 +416,10 @@ impl ModuleGraphBuilder { Self { options, caches, + esm_or_cjs_checker, fs, resolver, + node_resolver, npm_resolver, module_info_cache, parsed_source_cache, @@ -691,8 +699,10 @@ impl ModuleGraphBuilder { permissions: PermissionsContainer, ) -> cache::FetchCacher { cache::FetchCacher::new( + self.esm_or_cjs_checker.clone(), self.file_fetcher.clone(), self.global_http_cache.clone(), + self.node_resolver.clone(), self.npm_resolver.clone(), self.module_info_cache.clone(), cache::FetchCacherOptions { @@ -702,6 +712,7 @@ impl ModuleGraphBuilder { self.options.sub_command(), crate::args::DenoSubcommand::Publish { .. } ), + unstable_detect_cjs: self.options.unstable_detect_cjs(), }, ) } diff --git a/cli/module_loader.rs b/cli/module_loader.rs index 293f41ddac..37d42f78e5 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -331,15 +331,23 @@ impl maybe_referrer: Option<&ModuleSpecifier>, requested_module_type: RequestedModuleType, ) -> Result { - let code_source = if let Some(result) = self - .shared - .npm_module_loader - .load_if_in_npm_package(specifier, maybe_referrer) - .await - { - result? - } else { - self.load_prepared_module(specifier, maybe_referrer).await? + let code_source = match self.load_prepared_module(specifier).await? { + Some(code_source) => code_source, + None => { + if self.shared.npm_module_loader.if_in_npm_package(specifier) { + self + .shared + .npm_module_loader + .load(specifier, maybe_referrer) + .await? + } else { + let mut msg = format!("Loading unprepared module: {specifier}"); + if let Some(referrer) = maybe_referrer { + msg = format!("{}, imported from: {}", msg, referrer.as_str()); + } + return Err(anyhow!(msg)); + } + } }; let code = if self.shared.is_inspecting { // we need the code with the source map in order for @@ -514,17 +522,12 @@ impl async fn load_prepared_module( &self, specifier: &ModuleSpecifier, - maybe_referrer: Option<&ModuleSpecifier>, - ) -> Result { + ) -> Result, AnyError> { // Note: keep this in sync with the sync version below let graph = self.graph_container.graph(); - match self.load_prepared_module_or_defer_emit( - &graph, - specifier, - maybe_referrer, - ) { - Ok(CodeOrDeferredEmit::Code(code_source)) => Ok(code_source), - Ok(CodeOrDeferredEmit::DeferredEmit { + match self.load_prepared_module_or_defer_emit(&graph, specifier)? { + Some(CodeOrDeferredEmit::Code(code_source)) => Ok(Some(code_source)), + Some(CodeOrDeferredEmit::DeferredEmit { specifier, media_type, source, @@ -537,30 +540,25 @@ impl // at this point, we no longer need the parsed source in memory, so free it self.parsed_source_cache.free(specifier); - Ok(ModuleCodeStringSource { + Ok(Some(ModuleCodeStringSource { code: ModuleSourceCode::Bytes(transpile_result), found_url: specifier.clone(), media_type, - }) + })) } - Err(err) => Err(err), + None => Ok(None), } } fn load_prepared_module_sync( &self, specifier: &ModuleSpecifier, - maybe_referrer: Option<&ModuleSpecifier>, - ) -> Result { + ) -> Result, AnyError> { // Note: keep this in sync with the async version above let graph = self.graph_container.graph(); - match self.load_prepared_module_or_defer_emit( - &graph, - specifier, - maybe_referrer, - ) { - Ok(CodeOrDeferredEmit::Code(code_source)) => Ok(code_source), - Ok(CodeOrDeferredEmit::DeferredEmit { + match self.load_prepared_module_or_defer_emit(&graph, specifier)? { + Some(CodeOrDeferredEmit::Code(code_source)) => Ok(Some(code_source)), + Some(CodeOrDeferredEmit::DeferredEmit { specifier, media_type, source, @@ -572,13 +570,13 @@ impl // at this point, we no longer need the parsed source in memory, so free it self.parsed_source_cache.free(specifier); - Ok(ModuleCodeStringSource { + Ok(Some(ModuleCodeStringSource { code: ModuleSourceCode::Bytes(transpile_result), found_url: specifier.clone(), media_type, - }) + })) } - Err(err) => Err(err), + None => Ok(None), } } @@ -586,8 +584,7 @@ impl &self, graph: &'graph ModuleGraph, specifier: &ModuleSpecifier, - maybe_referrer: Option<&ModuleSpecifier>, - ) -> Result, AnyError> { + ) -> Result>, AnyError> { if specifier.scheme() == "node" { // Node built-in modules should be handled internally. unreachable!("Deno bug. {} was misconfigured internally.", specifier); @@ -599,11 +596,11 @@ impl media_type, specifier, .. - })) => Ok(CodeOrDeferredEmit::Code(ModuleCodeStringSource { + })) => Ok(Some(CodeOrDeferredEmit::Code(ModuleCodeStringSource { code: ModuleSourceCode::String(source.clone().into()), found_url: specifier.clone(), media_type: *media_type, - })), + }))), Some(deno_graph::Module::Js(JsModule { source, media_type, @@ -624,11 +621,11 @@ impl | MediaType::Cts | MediaType::Jsx | MediaType::Tsx => { - return Ok(CodeOrDeferredEmit::DeferredEmit { + return Ok(Some(CodeOrDeferredEmit::DeferredEmit { specifier, media_type: *media_type, source, - }); + })); } MediaType::TsBuildInfo | MediaType::Wasm | MediaType::SourceMap => { panic!("Unexpected media type {media_type} for {specifier}") @@ -638,24 +635,18 @@ impl // at this point, we no longer need the parsed source in memory, so free it self.parsed_source_cache.free(specifier); - Ok(CodeOrDeferredEmit::Code(ModuleCodeStringSource { + Ok(Some(CodeOrDeferredEmit::Code(ModuleCodeStringSource { code: ModuleSourceCode::String(code), found_url: specifier.clone(), media_type: *media_type, - })) + }))) } Some( deno_graph::Module::External(_) | deno_graph::Module::Node(_) | deno_graph::Module::Npm(_), ) - | None => { - let mut msg = format!("Loading unprepared module: {specifier}"); - if let Some(referrer) = maybe_referrer { - msg = format!("{}, imported from: {}", msg, referrer.as_str()); - } - Err(anyhow!(msg)) - } + | None => Ok(None), } } } @@ -828,7 +819,7 @@ impl ModuleLoader "wasm" | "file" | "http" | "https" | "data" | "blob" => (), _ => return None, } - let source = self.0.load_prepared_module_sync(&specifier, None).ok()?; + let source = self.0.load_prepared_module_sync(&specifier).ok()??; source_map_from_code(source.code.as_bytes()) } diff --git a/cli/node.rs b/cli/node.rs index a3cee8ddee..733d5f8717 100644 --- a/cli/node.rs +++ b/cli/node.rs @@ -5,6 +5,7 @@ use std::sync::Arc; use deno_ast::MediaType; use deno_ast::ModuleSpecifier; use deno_core::error::AnyError; +use deno_graph::ParsedSourceStore; use deno_runtime::deno_fs; use deno_runtime::deno_node::DenoFsNodeResolverEnv; use node_resolver::analyze::CjsAnalysis as ExtNodeCjsAnalysis; @@ -16,6 +17,7 @@ use serde::Serialize; use crate::cache::CacheDBHash; use crate::cache::NodeAnalysisCache; +use crate::cache::ParsedSourceCache; use crate::resolver::CliNodeResolver; use crate::util::fs::canonicalize_path_maybe_not_exists; @@ -56,6 +58,7 @@ pub struct CliCjsCodeAnalyzer { cache: NodeAnalysisCache, fs: deno_fs::FileSystemRc, node_resolver: Arc, + parsed_source_cache: Option>, } impl CliCjsCodeAnalyzer { @@ -63,11 +66,13 @@ impl CliCjsCodeAnalyzer { cache: NodeAnalysisCache, fs: deno_fs::FileSystemRc, node_resolver: Arc, + parsed_source_cache: Option>, ) -> Self { Self { cache, fs, node_resolver, + parsed_source_cache, } } @@ -107,18 +112,26 @@ impl CliCjsCodeAnalyzer { } } + let maybe_parsed_source = self + .parsed_source_cache + .as_ref() + .and_then(|c| c.remove_parsed_source(specifier)); + let analysis = deno_core::unsync::spawn_blocking({ let specifier = specifier.clone(); let source: Arc = source.into(); move || -> Result<_, deno_ast::ParseDiagnostic> { - let parsed_source = deno_ast::parse_program(deno_ast::ParseParams { - specifier, - text: source, - media_type, - capture_tokens: true, - scope_analysis: false, - maybe_syntax: None, - })?; + let parsed_source = + maybe_parsed_source.map(Ok).unwrap_or_else(|| { + deno_ast::parse_program(deno_ast::ParseParams { + specifier, + text: source, + media_type, + capture_tokens: true, + scope_analysis: false, + maybe_syntax: None, + }) + })?; if parsed_source.is_script() { let analysis = parsed_source.analyze_cjs(); Ok(CliCjsAnalysis::Cjs { diff --git a/cli/resolver.rs b/cli/resolver.rs index 7804261b8b..84c671268a 100644 --- a/cli/resolver.rs +++ b/cli/resolver.rs @@ -43,7 +43,6 @@ use node_resolver::NodeModuleKind; use node_resolver::NodeResolution; use node_resolver::NodeResolutionMode; use node_resolver::PackageJson; -use std::borrow::Cow; use std::path::Path; use std::path::PathBuf; use std::sync::Arc; @@ -53,7 +52,9 @@ use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS; use crate::node::CliNodeCodeTranslator; use crate::npm::CliNpmResolver; use crate::npm::InnerCliNpmResolverRef; +use crate::util::path::specifier_has_extension; use crate::util::sync::AtomicFlag; +use crate::util::text_encoding::from_utf8_lossy_owned; pub struct ModuleCodeStringSource { pub code: ModuleSourceCode, @@ -215,7 +216,7 @@ impl CliNodeResolver { referrer: &ModuleSpecifier, mode: NodeResolutionMode, ) -> Result { - let referrer_kind = if self.cjs_resolutions.contains(referrer) { + let referrer_kind = if self.cjs_resolutions.is_known_cjs(referrer) { NodeModuleKind::Cjs } else { NodeModuleKind::Esm @@ -310,9 +311,7 @@ impl CliNodeResolver { if self.in_npm_package(&specifier) { let resolution = self.node_resolver.url_to_node_resolution(specifier)?; - if let NodeResolution::CommonJs(specifier) = &resolution { - self.cjs_resolutions.insert(specifier.clone()); - } + let resolution = self.handle_node_resolution(resolution); return Ok(Some(resolution.into_url())); } } @@ -333,12 +332,17 @@ impl CliNodeResolver { ) -> NodeResolution { if let NodeResolution::CommonJs(specifier) = &resolution { // remember that this was a common js resolution - self.cjs_resolutions.insert(specifier.clone()); + self.mark_cjs_resolution(specifier.clone()); } resolution } + + pub fn mark_cjs_resolution(&self, specifier: ModuleSpecifier) { + self.cjs_resolutions.insert(specifier); + } } +// todo(dsherret): move to module_loader.rs #[derive(Clone)] pub struct NpmModuleLoader { cjs_resolutions: Arc, @@ -362,18 +366,9 @@ impl NpmModuleLoader { } } - pub async fn load_if_in_npm_package( - &self, - specifier: &ModuleSpecifier, - maybe_referrer: Option<&ModuleSpecifier>, - ) -> Option> { - if self.node_resolver.in_npm_package(specifier) - || (specifier.scheme() == "file" && specifier.path().ends_with(".cjs")) - { - Some(self.load(specifier, maybe_referrer).await) - } else { - None - } + pub fn if_in_npm_package(&self, specifier: &ModuleSpecifier) -> bool { + self.node_resolver.in_npm_package(specifier) + || self.cjs_resolutions.is_known_cjs(specifier) } pub async fn load( @@ -418,16 +413,9 @@ impl NpmModuleLoader { } })?; - let code = if self.cjs_resolutions.contains(specifier) - || (specifier.scheme() == "file" && specifier.path().ends_with(".cjs")) - { + let code = if self.cjs_resolutions.is_known_cjs(specifier) { // translate cjs to esm if it's cjs and inject node globals - let code = match String::from_utf8_lossy(&code) { - Cow::Owned(code) => code, - // SAFETY: `String::from_utf8_lossy` guarantees that the result is valid - // UTF-8 if `Cow::Borrowed` is returned. - Cow::Borrowed(_) => unsafe { String::from_utf8_unchecked(code) }, - }; + let code = from_utf8_lossy_owned(code); ModuleSourceCode::String( self .node_code_translator @@ -452,8 +440,12 @@ impl NpmModuleLoader { pub struct CjsResolutionStore(DashSet); impl CjsResolutionStore { - pub fn contains(&self, specifier: &ModuleSpecifier) -> bool { - self.0.contains(specifier) + pub fn is_known_cjs(&self, specifier: &ModuleSpecifier) -> bool { + if specifier.scheme() != "file" { + return false; + } + + specifier_has_extension(specifier, "cjs") || self.0.contains(specifier) } pub fn insert(&self, specifier: ModuleSpecifier) { diff --git a/cli/schemas/config-file.v1.json b/cli/schemas/config-file.v1.json index af18e6f21c..4a6239f0d4 100644 --- a/cli/schemas/config-file.v1.json +++ b/cli/schemas/config-file.v1.json @@ -528,6 +528,7 @@ "bare-node-builtins", "byonm", "cron", + "detect-cjs", "ffi", "fs", "http", diff --git a/cli/standalone/binary.rs b/cli/standalone/binary.rs index 1290a238f3..6e747bed42 100644 --- a/cli/standalone/binary.rs +++ b/cli/standalone/binary.rs @@ -622,6 +622,7 @@ impl<'a> DenoCompileBinaryWriter<'a> { unstable_config: UnstableConfig { legacy_flag_enabled: false, bare_node_builtins: cli_options.unstable_bare_node_builtins(), + detect_cjs: cli_options.unstable_detect_cjs(), sloppy_imports: cli_options.unstable_sloppy_imports(), features: cli_options.unstable_features(), }, diff --git a/cli/standalone/mod.rs b/cli/standalone/mod.rs index 258de0dad3..60018228b7 100644 --- a/cli/standalone/mod.rs +++ b/cli/standalone/mod.rs @@ -586,6 +586,7 @@ pub async fn run( node_analysis_cache, fs.clone(), cli_node_resolver.clone(), + None, ); let node_code_translator = Arc::new(NodeCodeTranslator::new( cjs_esm_code_analyzer, @@ -651,7 +652,7 @@ pub async fn run( workspace_resolver, node_resolver: cli_node_resolver.clone(), npm_module_loader: Arc::new(NpmModuleLoader::new( - cjs_resolutions, + cjs_resolutions.clone(), node_code_translator, fs.clone(), cli_node_resolver, @@ -696,6 +697,7 @@ pub async fn run( }); let worker_factory = CliMainWorkerFactory::new( Arc::new(BlobStore::default()), + cjs_resolutions, // Code cache is not supported for standalone binary yet. None, feature_checker, @@ -738,6 +740,7 @@ pub async fn run( node_ipc: None, serve_port: None, serve_host: None, + unstable_detect_cjs: metadata.unstable_config.detect_cjs, }, ); diff --git a/cli/tools/compile.rs b/cli/tools/compile.rs index c1f98bc08a..3cc4414fcb 100644 --- a/cli/tools/compile.rs +++ b/cli/tools/compile.rs @@ -54,6 +54,16 @@ pub async fn compile( ); } + if cli_options.unstable_detect_cjs() { + log::warn!( + concat!( + "{} --unstable-detect-cjs is not properly supported in deno compile. ", + "The compiled executable may encounter runtime errors.", + ), + crate::colors::yellow("Warning"), + ); + } + let output_path = resolve_compile_executable_output_path( http_client, &compile_flags, diff --git a/cli/util/text_encoding.rs b/cli/util/text_encoding.rs index d2e0832c95..0b7601cb9c 100644 --- a/cli/util/text_encoding.rs +++ b/cli/util/text_encoding.rs @@ -1,6 +1,8 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +use std::borrow::Cow; use std::ops::Range; +use std::sync::Arc; use base64::prelude::BASE64_STANDARD; use base64::Engine; @@ -9,6 +11,15 @@ use deno_core::ModuleSourceCode; static SOURCE_MAP_PREFIX: &[u8] = b"//# sourceMappingURL=data:application/json;base64,"; +pub fn from_utf8_lossy_owned(bytes: Vec) -> String { + match String::from_utf8_lossy(&bytes) { + Cow::Owned(code) => code, + // SAFETY: `String::from_utf8_lossy` guarantees that the result is valid + // UTF-8 if `Cow::Borrowed` is returned. + Cow::Borrowed(_) => unsafe { String::from_utf8_unchecked(bytes) }, + } +} + pub fn source_map_from_code(code: &[u8]) -> Option> { let range = find_source_map_range(code)?; let source_map_range = &code[range]; @@ -85,6 +96,13 @@ fn find_source_map_range(code: &[u8]) -> Option> { } } +/// Converts an `Arc` to an `Arc<[u8]>`. +pub fn arc_str_to_bytes(arc_str: Arc) -> Arc<[u8]> { + let raw = Arc::into_raw(arc_str); + // SAFETY: This is safe because they have the same memory layout. + unsafe { Arc::from_raw(raw as *const [u8]) } +} + #[cfg(test)] mod tests { use std::sync::Arc; diff --git a/cli/worker.rs b/cli/worker.rs index cc18c0d154..489b2dd931 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -51,9 +51,11 @@ use crate::args::DenoSubcommand; use crate::args::StorageKeyResolver; use crate::errors; use crate::npm::CliNpmResolver; +use crate::resolver::CjsResolutionStore; use crate::util::checksum; use crate::util::file_watcher::WatcherCommunicator; use crate::util::file_watcher::WatcherRestartMode; +use crate::util::path::specifier_has_extension; use crate::version; pub struct ModuleLoaderAndSourceMapGetter { @@ -120,11 +122,13 @@ pub struct CliMainWorkerOptions { pub node_ipc: Option, pub serve_port: Option, pub serve_host: Option, + pub unstable_detect_cjs: bool, } struct SharedWorkerState { blob_store: Arc, broadcast_channel: InMemoryBroadcastChannel, + cjs_resolution_store: Arc, code_cache: Option>, compiled_wasm_module_store: CompiledWasmModuleStore, feature_checker: Arc, @@ -422,6 +426,7 @@ impl CliMainWorkerFactory { #[allow(clippy::too_many_arguments)] pub fn new( blob_store: Arc, + cjs_resolution_store: Arc, code_cache: Option>, feature_checker: Arc, fs: Arc, @@ -441,6 +446,7 @@ impl CliMainWorkerFactory { shared: Arc::new(SharedWorkerState { blob_store, broadcast_channel: Default::default(), + cjs_resolution_store, code_cache, compiled_wasm_module_store: Default::default(), feature_checker, @@ -486,6 +492,9 @@ impl CliMainWorkerFactory { stdio: deno_runtime::deno_io::Stdio, ) -> Result { let shared = &self.shared; + let ModuleLoaderAndSourceMapGetter { module_loader } = shared + .module_loader_factory + .create_for_main(permissions.clone()); let (main_module, is_main_cjs) = if let Ok(package_ref) = NpmPackageReqReference::from_specifier(&main_module) { @@ -526,13 +535,28 @@ impl CliMainWorkerFactory { let is_main_cjs = matches!(node_resolution, NodeResolution::CommonJs(_)); (node_resolution.into_url(), is_main_cjs) } else { - let is_cjs = main_module.path().ends_with(".cjs"); + let is_maybe_cjs_js_ext = self.shared.options.unstable_detect_cjs + && specifier_has_extension(&main_module, "js") + && self + .shared + .node_resolver + .get_closest_package_json(&main_module) + .ok() + .flatten() + .map(|pkg_json| pkg_json.typ == "commonjs") + .unwrap_or(false); + let is_cjs = if is_maybe_cjs_js_ext { + // fill the cjs resolution store by preparing the module load + module_loader + .prepare_load(&main_module, None, false) + .await?; + self.shared.cjs_resolution_store.is_known_cjs(&main_module) + } else { + specifier_has_extension(&main_module, "cjs") + }; (main_module, is_cjs) }; - let ModuleLoaderAndSourceMapGetter { module_loader } = shared - .module_loader_factory - .create_for_main(permissions.clone()); let maybe_inspector_server = shared.maybe_inspector_server.clone(); let create_web_worker_cb = diff --git a/tests/specs/compile/detect_cjs/__test__.jsonc b/tests/specs/compile/detect_cjs/__test__.jsonc new file mode 100644 index 0000000000..32bebb7a57 --- /dev/null +++ b/tests/specs/compile/detect_cjs/__test__.jsonc @@ -0,0 +1,24 @@ +{ + "tempDir": true, + "steps": [{ + "if": "unix", + "args": "compile --allow-read --output main main.js", + "output": "compile.out" + }, { + "if": "unix", + "commandName": "./main", + "args": [], + "output": "output.out", + "exitCode": 1 + }, { + "if": "windows", + "args": "compile --allow-read --output main.exe main.js", + "output": "compile.out" + }, { + "if": "windows", + "commandName": "./main.exe", + "args": [], + "output": "output.out", + "exitCode": 1 + }] +} diff --git a/tests/specs/compile/detect_cjs/add.js b/tests/specs/compile/detect_cjs/add.js new file mode 100644 index 0000000000..2a886fbc18 --- /dev/null +++ b/tests/specs/compile/detect_cjs/add.js @@ -0,0 +1,3 @@ +module.exports.add = function (a, b) { + return a + b; +}; diff --git a/tests/specs/compile/detect_cjs/compile.out b/tests/specs/compile/detect_cjs/compile.out new file mode 100644 index 0000000000..6509b7f29c --- /dev/null +++ b/tests/specs/compile/detect_cjs/compile.out @@ -0,0 +1,3 @@ +Warning --unstable-detect-cjs is not properly supported in deno compile. The compiled executable may encounter runtime errors. +Check file:///[WILDLINE]/main.js +Compile file:///[WILDLINE] diff --git a/tests/specs/compile/detect_cjs/deno.json b/tests/specs/compile/detect_cjs/deno.json new file mode 100644 index 0000000000..35f64c86f4 --- /dev/null +++ b/tests/specs/compile/detect_cjs/deno.json @@ -0,0 +1,5 @@ +{ + "unstable": [ + "detect-cjs" + ] +} diff --git a/tests/specs/compile/detect_cjs/main.js b/tests/specs/compile/detect_cjs/main.js new file mode 100644 index 0000000000..8c55f673b0 --- /dev/null +++ b/tests/specs/compile/detect_cjs/main.js @@ -0,0 +1,3 @@ +import { add } from "./add.js"; + +console.log(add(1, 2)); diff --git a/tests/specs/compile/detect_cjs/output.out b/tests/specs/compile/detect_cjs/output.out new file mode 100644 index 0000000000..b53c443698 --- /dev/null +++ b/tests/specs/compile/detect_cjs/output.out @@ -0,0 +1 @@ +error: Module not found: file:///[WILDLINE]/add.js diff --git a/tests/specs/compile/detect_cjs/package.json b/tests/specs/compile/detect_cjs/package.json new file mode 100644 index 0000000000..5bbefffbab --- /dev/null +++ b/tests/specs/compile/detect_cjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/tests/specs/run/package_json_type/commonjs/__test__.jsonc b/tests/specs/run/package_json_type/commonjs/__test__.jsonc new file mode 100644 index 0000000000..85b7219fa0 --- /dev/null +++ b/tests/specs/run/package_json_type/commonjs/__test__.jsonc @@ -0,0 +1,34 @@ +{ + "tests": { + "main_cjs": { + "args": "run --allow-read=. main_cjs.js", + "output": "3\n" + }, + "main_esm": { + "args": "run --allow-read=. main_esm.js", + "output": "3\n" + }, + "main_mix": { + "args": "run --allow-read=. main_mix.js", + "output": "main_mix.out", + "exitCode": 1 + }, + "import_import_meta": { + "args": "run import_import_meta.js", + "output": "[WILDLINE]/import_meta.js\n" + }, + "main_import_meta": { + "args": "run main_esm_import_meta.js", + "output": "main_esm_import_meta.out", + "exitCode": 1 + }, + "not_import_meta": { + "args": "run --allow-read=. not_import_meta.js", + "output": "3\n" + }, + "tla": { + "args": "run --allow-read=. tla.js", + "output": "loaded\n" + } + } +} diff --git a/tests/specs/run/package_json_type/commonjs/add.js b/tests/specs/run/package_json_type/commonjs/add.js new file mode 100644 index 0000000000..2a886fbc18 --- /dev/null +++ b/tests/specs/run/package_json_type/commonjs/add.js @@ -0,0 +1,3 @@ +module.exports.add = function (a, b) { + return a + b; +}; diff --git a/tests/specs/run/package_json_type/commonjs/deno.jsonc b/tests/specs/run/package_json_type/commonjs/deno.jsonc new file mode 100644 index 0000000000..35f64c86f4 --- /dev/null +++ b/tests/specs/run/package_json_type/commonjs/deno.jsonc @@ -0,0 +1,5 @@ +{ + "unstable": [ + "detect-cjs" + ] +} diff --git a/tests/specs/run/package_json_type/commonjs/import_import_meta.js b/tests/specs/run/package_json_type/commonjs/import_import_meta.js new file mode 100644 index 0000000000..f07518985a --- /dev/null +++ b/tests/specs/run/package_json_type/commonjs/import_import_meta.js @@ -0,0 +1,3 @@ +import { value } from "./import_meta.js"; + +console.log(value); diff --git a/tests/specs/run/package_json_type/commonjs/import_meta.js b/tests/specs/run/package_json_type/commonjs/import_meta.js new file mode 100644 index 0000000000..2bdbc30fe6 --- /dev/null +++ b/tests/specs/run/package_json_type/commonjs/import_meta.js @@ -0,0 +1 @@ +export const value = import.meta.url; diff --git a/tests/specs/run/package_json_type/commonjs/main_cjs.js b/tests/specs/run/package_json_type/commonjs/main_cjs.js new file mode 100644 index 0000000000..365e8e06f8 --- /dev/null +++ b/tests/specs/run/package_json_type/commonjs/main_cjs.js @@ -0,0 +1,2 @@ +const { add } = require("./add"); +console.log(add(1, 2)); diff --git a/tests/specs/run/package_json_type/commonjs/main_esm.js b/tests/specs/run/package_json_type/commonjs/main_esm.js new file mode 100644 index 0000000000..8c55f673b0 --- /dev/null +++ b/tests/specs/run/package_json_type/commonjs/main_esm.js @@ -0,0 +1,3 @@ +import { add } from "./add.js"; + +console.log(add(1, 2)); diff --git a/tests/specs/run/package_json_type/commonjs/main_esm_import_meta.js b/tests/specs/run/package_json_type/commonjs/main_esm_import_meta.js new file mode 100644 index 0000000000..f1dff20b57 --- /dev/null +++ b/tests/specs/run/package_json_type/commonjs/main_esm_import_meta.js @@ -0,0 +1,2 @@ +console.log(import.meta.url); +console.log(require("./add")); diff --git a/tests/specs/run/package_json_type/commonjs/main_esm_import_meta.out b/tests/specs/run/package_json_type/commonjs/main_esm_import_meta.out new file mode 100644 index 0000000000..e177defffd --- /dev/null +++ b/tests/specs/run/package_json_type/commonjs/main_esm_import_meta.out @@ -0,0 +1,2 @@ +[WILDLINE]main_esm_import_meta.js +error: Uncaught (in promise) ReferenceError: require is not defined[WILDCARD] diff --git a/tests/specs/run/package_json_type/commonjs/main_mix.js b/tests/specs/run/package_json_type/commonjs/main_mix.js new file mode 100644 index 0000000000..2a2c2c62a2 --- /dev/null +++ b/tests/specs/run/package_json_type/commonjs/main_mix.js @@ -0,0 +1,6 @@ +import { add } from "./add.js"; + +console.log(add(1, 2)); + +// will error +console.log(require("./add").add(1, 2)); diff --git a/tests/specs/run/package_json_type/commonjs/main_mix.out b/tests/specs/run/package_json_type/commonjs/main_mix.out new file mode 100644 index 0000000000..d6123d48b7 --- /dev/null +++ b/tests/specs/run/package_json_type/commonjs/main_mix.out @@ -0,0 +1,5 @@ +3 +error: Uncaught (in promise) ReferenceError: require is not defined +console.log(require("./add").add(1, 2)); + ^ + at file:///[WILDLINE]main_mix.js:[WILDLINE] diff --git a/tests/specs/run/package_json_type/commonjs/not_import_meta.js b/tests/specs/run/package_json_type/commonjs/not_import_meta.js new file mode 100644 index 0000000000..216b900df6 --- /dev/null +++ b/tests/specs/run/package_json_type/commonjs/not_import_meta.js @@ -0,0 +1,8 @@ +try { + console.log(test.import.meta.url); +} catch { + // ignore +} + +// should work because this is not an ESM file +console.log(require("./add").add(1, 2)); diff --git a/tests/specs/run/package_json_type/commonjs/package.json b/tests/specs/run/package_json_type/commonjs/package.json new file mode 100644 index 0000000000..5bbefffbab --- /dev/null +++ b/tests/specs/run/package_json_type/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/tests/specs/run/package_json_type/commonjs/tla.js b/tests/specs/run/package_json_type/commonjs/tla.js new file mode 100644 index 0000000000..978578de46 --- /dev/null +++ b/tests/specs/run/package_json_type/commonjs/tla.js @@ -0,0 +1,2 @@ +await new Promise((r) => r()); +console.log("loaded"); diff --git a/tests/specs/run/package_json_type/none/__test__.jsonc b/tests/specs/run/package_json_type/none/__test__.jsonc new file mode 100644 index 0000000000..fa1d22ca18 --- /dev/null +++ b/tests/specs/run/package_json_type/none/__test__.jsonc @@ -0,0 +1,18 @@ +{ + "tests": { + "main_cjs": { + "args": "run --allow-read=. main_cjs.js", + "output": "main_cjs.out", + "exitCode": 1 + }, + "main_esm": { + "args": "run --allow-read=. main_esm.js", + "output": "main_esm.out", + "exitCode": 1 + }, + "sub_folder_cjs": { + "args": "run --allow-read=. sub_folder_cjs.js", + "output": "3\n" + } + } +} diff --git a/tests/specs/run/package_json_type/none/add.js b/tests/specs/run/package_json_type/none/add.js new file mode 100644 index 0000000000..2a886fbc18 --- /dev/null +++ b/tests/specs/run/package_json_type/none/add.js @@ -0,0 +1,3 @@ +module.exports.add = function (a, b) { + return a + b; +}; diff --git a/tests/specs/run/package_json_type/none/commonjs/add.js b/tests/specs/run/package_json_type/none/commonjs/add.js new file mode 100644 index 0000000000..2a886fbc18 --- /dev/null +++ b/tests/specs/run/package_json_type/none/commonjs/add.js @@ -0,0 +1,3 @@ +module.exports.add = function (a, b) { + return a + b; +}; diff --git a/tests/specs/run/package_json_type/none/commonjs/package.json b/tests/specs/run/package_json_type/none/commonjs/package.json new file mode 100644 index 0000000000..5bbefffbab --- /dev/null +++ b/tests/specs/run/package_json_type/none/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/tests/specs/run/package_json_type/none/deno.jsonc b/tests/specs/run/package_json_type/none/deno.jsonc new file mode 100644 index 0000000000..35f64c86f4 --- /dev/null +++ b/tests/specs/run/package_json_type/none/deno.jsonc @@ -0,0 +1,5 @@ +{ + "unstable": [ + "detect-cjs" + ] +} diff --git a/tests/specs/run/package_json_type/none/main_cjs.js b/tests/specs/run/package_json_type/none/main_cjs.js new file mode 100644 index 0000000000..365e8e06f8 --- /dev/null +++ b/tests/specs/run/package_json_type/none/main_cjs.js @@ -0,0 +1,2 @@ +const { add } = require("./add"); +console.log(add(1, 2)); diff --git a/tests/specs/run/package_json_type/none/main_cjs.out b/tests/specs/run/package_json_type/none/main_cjs.out new file mode 100644 index 0000000000..fe9acf0094 --- /dev/null +++ b/tests/specs/run/package_json_type/none/main_cjs.out @@ -0,0 +1,4 @@ +error: Uncaught (in promise) ReferenceError: require is not defined +const { add } = require("./add"); + ^ + at file:///[WILDLINE] diff --git a/tests/specs/run/package_json_type/none/main_esm.js b/tests/specs/run/package_json_type/none/main_esm.js new file mode 100644 index 0000000000..8c55f673b0 --- /dev/null +++ b/tests/specs/run/package_json_type/none/main_esm.js @@ -0,0 +1,3 @@ +import { add } from "./add.js"; + +console.log(add(1, 2)); diff --git a/tests/specs/run/package_json_type/none/main_esm.out b/tests/specs/run/package_json_type/none/main_esm.out new file mode 100644 index 0000000000..5f16c5f350 --- /dev/null +++ b/tests/specs/run/package_json_type/none/main_esm.out @@ -0,0 +1,4 @@ +error: Uncaught SyntaxError: The requested module './add.js' does not provide an export named 'add' +import { add } from "./add.js"; + ^ + at (file:///[WILDLINE]) diff --git a/tests/specs/run/package_json_type/none/package.json b/tests/specs/run/package_json_type/none/package.json new file mode 100644 index 0000000000..2c63c08510 --- /dev/null +++ b/tests/specs/run/package_json_type/none/package.json @@ -0,0 +1,2 @@ +{ +} diff --git a/tests/specs/run/package_json_type/none/sub_folder_cjs.js b/tests/specs/run/package_json_type/none/sub_folder_cjs.js new file mode 100644 index 0000000000..ad04e67301 --- /dev/null +++ b/tests/specs/run/package_json_type/none/sub_folder_cjs.js @@ -0,0 +1,3 @@ +import { add } from "./commonjs/add.js"; + +console.log(add(1, 2)); From 9f0a447f7cbcdf1e38cc0d52a5da70a50ea0179b Mon Sep 17 00:00:00 2001 From: Yusuke Tanaka Date: Tue, 15 Oct 2024 11:21:47 +0900 Subject: [PATCH 21/52] fix(cli): named export takes precedence over default export in doc testing (#26112) This commit fixes the issue of import name conflict in case the named export and default export have the same name by letting named export take precedence over default export. Fixes #26009 --- cli/util/extract.rs | 62 ++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 61 insertions(+), 1 deletion(-) diff --git a/cli/util/extract.rs b/cli/util/extract.rs index 841cf6eb0f..873b7e7f2d 100644 --- a/cli/util/extract.rs +++ b/cli/util/extract.rs @@ -254,7 +254,11 @@ impl ExportCollector { let mut import_specifiers = vec![]; if let Some(default_export) = &self.default_export { - if !symbols_to_exclude.contains(default_export) { + // If the default export conflicts with a named export, a named one + // takes precedence. + if !symbols_to_exclude.contains(default_export) + && !self.named_exports.contains(default_export) + { import_specifiers.push(ast::ImportSpecifier::Default( ast::ImportDefaultSpecifier { span: DUMMY_SP, @@ -1137,6 +1141,30 @@ Deno.test("file:///README.md$6-12.js", async ()=>{ media_type: MediaType::JavaScript, }], }, + // https://github.com/denoland/deno/issues/26009 + Test { + input: Input { + source: r#" +/** + * ```ts + * console.log(Foo) + * ``` + */ +export class Foo {} +export default Foo +"#, + specifier: "file:///main.ts", + }, + expected: vec![Expected { + source: r#"import { Foo } from "file:///main.ts"; +Deno.test("file:///main.ts$3-6.ts", async ()=>{ + console.log(Foo); +}); +"#, + specifier: "file:///main.ts$3-6.ts", + media_type: MediaType::TypeScript, + }], + }, ]; for test in tests { @@ -1326,6 +1354,28 @@ assertEquals(add(1, 2), 3); media_type: MediaType::JavaScript, }], }, + // https://github.com/denoland/deno/issues/26009 + Test { + input: Input { + source: r#" +/** + * ```ts + * console.log(Foo) + * ``` + */ +export class Foo {} +export default Foo +"#, + specifier: "file:///main.ts", + }, + expected: vec![Expected { + source: r#"import { Foo } from "file:///main.ts"; +console.log(Foo); +"#, + specifier: "file:///main.ts$3-6.ts", + media_type: MediaType::TypeScript, + }], + }, ]; for test in tests { @@ -1581,6 +1631,16 @@ declare global { named_expected: atom_set!(), default_expected: None, }, + // The identifier `Foo` conflicts, but `ExportCollector` doesn't do + // anything about it. It is handled by `to_import_specifiers` method. + Test { + input: r#" +export class Foo {} +export default Foo +"#, + named_expected: atom_set!("Foo"), + default_expected: Some("Foo".into()), + }, ]; for test in tests { From 4c9eee3ebe383f0aa8f082dd6831f609cd5d5abb Mon Sep 17 00:00:00 2001 From: David Sherret Date: Mon, 14 Oct 2024 23:25:18 -0400 Subject: [PATCH 22/52] perf(http): cache webidl.converters lookups in ext/fetch/23_response.js (#26256) --- ext/fetch/23_response.js | 105 +++++++++++++++++++++------------------ 1 file changed, 58 insertions(+), 47 deletions(-) diff --git a/ext/fetch/23_response.js b/ext/fetch/23_response.js index ff4ad5facd..278dcb7dec 100644 --- a/ext/fetch/23_response.js +++ b/ext/fetch/23_response.js @@ -61,6 +61,15 @@ const _mimeType = Symbol("mime type"); const _body = Symbol("body"); const _brand = webidl.brand; +// it's slightly faster to cache these +const webidlConvertersBodyInitDomString = + webidl.converters["BodyInit_DOMString?"]; +const webidlConvertersUSVString = webidl.converters["USVString"]; +const webidlConvertersUnsignedShort = webidl.converters["unsigned short"]; +const webidlConvertersAny = webidl.converters["any"]; +const webidlConvertersByteString = webidl.converters["ByteString"]; +const webidlConvertersHeadersInit = webidl.converters["HeadersInit"]; + /** * @typedef InnerResponse * @property {"basic" | "cors" | "default" | "error" | "opaque" | "opaqueredirect"} type @@ -259,8 +268,8 @@ class Response { */ static redirect(url, status = 302) { const prefix = "Failed to execute 'Response.redirect'"; - url = webidl.converters["USVString"](url, prefix, "Argument 1"); - status = webidl.converters["unsigned short"](status, prefix, "Argument 2"); + url = webidlConvertersUSVString(url, prefix, "Argument 1"); + status = webidlConvertersUnsignedShort(status, prefix, "Argument 2"); const baseURL = getLocationHref(); const parsedURL = new URL(url, baseURL); @@ -286,8 +295,8 @@ class Response { */ static json(data = undefined, init = { __proto__: null }) { const prefix = "Failed to execute 'Response.json'"; - data = webidl.converters.any(data); - init = webidl.converters["ResponseInit_fast"](init, prefix, "Argument 2"); + data = webidlConvertersAny(data); + init = webidlConvertersResponseInitFast(init, prefix, "Argument 2"); const str = serializeJSValueToJSONString(data); const res = extractBody(str); @@ -313,8 +322,8 @@ class Response { } const prefix = "Failed to construct 'Response'"; - body = webidl.converters["BodyInit_DOMString?"](body, prefix, "Argument 1"); - init = webidl.converters["ResponseInit_fast"](init, prefix, "Argument 2"); + body = webidlConvertersBodyInitDomString(body, prefix, "Argument 1"); + init = webidlConvertersResponseInitFast(init, prefix, "Argument 2"); this[_response] = newInnerResponse(); this[_headers] = headersFromHeaderList( @@ -443,47 +452,49 @@ webidl.converters["Response"] = webidl.createInterfaceConverter( "Response", ResponsePrototype, ); -webidl.converters["ResponseInit"] = webidl.createDictionaryConverter( - "ResponseInit", - [{ - key: "status", - defaultValue: 200, - converter: webidl.converters["unsigned short"], - }, { - key: "statusText", - defaultValue: "", - converter: webidl.converters["ByteString"], - }, { - key: "headers", - converter: webidl.converters["HeadersInit"], - }], -); -webidl.converters["ResponseInit_fast"] = function ( - init, - prefix, - context, - opts, -) { - if (init === undefined || init === null) { - return { status: 200, statusText: "", headers: undefined }; - } - // Fast path, if not a proxy - if (typeof init === "object" && !core.isProxy(init)) { - // Not a proxy fast path - const status = init.status !== undefined - ? webidl.converters["unsigned short"](init.status) - : 200; - const statusText = init.statusText !== undefined - ? webidl.converters["ByteString"](init.statusText) - : ""; - const headers = init.headers !== undefined - ? webidl.converters["HeadersInit"](init.headers) - : undefined; - return { status, statusText, headers }; - } - // Slow default path - return webidl.converters["ResponseInit"](init, prefix, context, opts); -}; +const webidlConvertersResponseInit = webidl.converters["ResponseInit"] = webidl + .createDictionaryConverter( + "ResponseInit", + [{ + key: "status", + defaultValue: 200, + converter: webidlConvertersUnsignedShort, + }, { + key: "statusText", + defaultValue: "", + converter: webidlConvertersByteString, + }, { + key: "headers", + converter: webidlConvertersHeadersInit, + }], + ); +const webidlConvertersResponseInitFast = webidl + .converters["ResponseInit_fast"] = function ( + init, + prefix, + context, + opts, + ) { + if (init === undefined || init === null) { + return { status: 200, statusText: "", headers: undefined }; + } + // Fast path, if not a proxy + if (typeof init === "object" && !core.isProxy(init)) { + // Not a proxy fast path + const status = init.status !== undefined + ? webidlConvertersUnsignedShort(init.status) + : 200; + const statusText = init.statusText !== undefined + ? webidlConvertersByteString(init.statusText) + : ""; + const headers = init.headers !== undefined + ? webidlConvertersHeadersInit(init.headers) + : undefined; + return { status, statusText, headers }; + } + // Slow default path + return webidlConvertersResponseInit(init, prefix, context, opts); + }; /** * @param {Response} response From 7f3747f2ef7cc9e1d45aa33360afdfe62cd20c56 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Mon, 14 Oct 2024 23:25:47 -0400 Subject: [PATCH 23/52] perf(http): avoid clone getting request method and url (#26250) --- ext/http/fly_accept_encoding.rs | 2 +- ext/http/http_next.rs | 2 +- ext/http/request_properties.rs | 27 +++++++++++++-------------- 3 files changed, 15 insertions(+), 16 deletions(-) diff --git a/ext/http/fly_accept_encoding.rs b/ext/http/fly_accept_encoding.rs index 94e3368767..4d6fd2231e 100644 --- a/ext/http/fly_accept_encoding.rs +++ b/ext/http/fly_accept_encoding.rs @@ -119,7 +119,7 @@ fn encodings_iter_inner<'s>( }; Some(Ok((encoding, qval))) }) - .map(|r| r?) // flatten Result = match request_properties.authority { Some(authority) => v8::String::new_from_utf8( scope, - authority.as_ref(), + authority.as_bytes(), v8::NewStringType::Normal, ) .unwrap() diff --git a/ext/http/request_properties.rs b/ext/http/request_properties.rs index 1422c7417d..39d35a79f1 100644 --- a/ext/http/request_properties.rs +++ b/ext/http/request_properties.rs @@ -34,8 +34,8 @@ pub struct HttpConnectionProperties { pub stream_type: NetworkStreamType, } -pub struct HttpRequestProperties { - pub authority: Option, +pub struct HttpRequestProperties<'a> { + pub authority: Option>, } /// Pluggable trait to determine listen, connection and request properties @@ -84,11 +84,11 @@ pub trait HttpPropertyExtractor { ) -> NetworkStream; /// Determines the request properties. - fn request_properties( - connection_properties: &HttpConnectionProperties, - uri: &Uri, - headers: &HeaderMap, - ) -> HttpRequestProperties; + fn request_properties<'a>( + connection_properties: &'a HttpConnectionProperties, + uri: &'a Uri, + headers: &'a HeaderMap, + ) -> HttpRequestProperties<'a>; } pub struct DefaultHttpPropertyExtractor {} @@ -180,18 +180,17 @@ impl HttpPropertyExtractor for DefaultHttpPropertyExtractor { } } - fn request_properties( - connection_properties: &HttpConnectionProperties, - uri: &Uri, - headers: &HeaderMap, - ) -> HttpRequestProperties { + fn request_properties<'a>( + connection_properties: &'a HttpConnectionProperties, + uri: &'a Uri, + headers: &'a HeaderMap, + ) -> HttpRequestProperties<'a> { let authority = req_host( uri, headers, connection_properties.stream_type, connection_properties.local_port.unwrap_or_default(), - ) - .map(|s| s.into_owned()); + ); HttpRequestProperties { authority } } From ae6a2b23bae83795bd973414216a89c839dd8fda Mon Sep 17 00:00:00 2001 From: David Sherret Date: Mon, 14 Oct 2024 23:57:31 -0400 Subject: [PATCH 24/52] fix: do not panic running remote cjs module (#26259) Instead error. --- cli/worker.rs | 3 ++- tests/specs/run/remote_cjs_main/__test__.jsonc | 5 +++++ tests/specs/run/remote_cjs_main/output.out | 3 +++ tests/testdata/run/add.cjs | 3 +++ 4 files changed, 13 insertions(+), 1 deletion(-) create mode 100644 tests/specs/run/remote_cjs_main/__test__.jsonc create mode 100644 tests/specs/run/remote_cjs_main/output.out create mode 100644 tests/testdata/run/add.cjs diff --git a/cli/worker.rs b/cli/worker.rs index 489b2dd931..e230197d2b 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -552,7 +552,8 @@ impl CliMainWorkerFactory { .await?; self.shared.cjs_resolution_store.is_known_cjs(&main_module) } else { - specifier_has_extension(&main_module, "cjs") + main_module.scheme() == "file" + && specifier_has_extension(&main_module, "cjs") }; (main_module, is_cjs) }; diff --git a/tests/specs/run/remote_cjs_main/__test__.jsonc b/tests/specs/run/remote_cjs_main/__test__.jsonc new file mode 100644 index 0000000000..f45d09406e --- /dev/null +++ b/tests/specs/run/remote_cjs_main/__test__.jsonc @@ -0,0 +1,5 @@ +{ + "args": "run http://localhost:4545/run/add.cjs", + "output": "output.out", + "exitCode": 1 +} diff --git a/tests/specs/run/remote_cjs_main/output.out b/tests/specs/run/remote_cjs_main/output.out new file mode 100644 index 0000000000..f75c33907a --- /dev/null +++ b/tests/specs/run/remote_cjs_main/output.out @@ -0,0 +1,3 @@ +Download http://localhost:4545/run/add.cjs +error: Expected a JavaScript or TypeScript module, but identified a Cjs module. Importing these types of modules is currently not supported. + Specifier: http://localhost:4545/run/add.cjs diff --git a/tests/testdata/run/add.cjs b/tests/testdata/run/add.cjs new file mode 100644 index 0000000000..2a886fbc18 --- /dev/null +++ b/tests/testdata/run/add.cjs @@ -0,0 +1,3 @@ +module.exports.add = function (a, b) { + return a + b; +}; From 7bfec3817310c6197ef49694a51d53365ac8d038 Mon Sep 17 00:00:00 2001 From: Yoshiya Hinosawa Date: Tue, 15 Oct 2024 16:44:21 +0900 Subject: [PATCH 25/52] fix(repl): remove check flags (#26140) This change removes the handling of `--check` and `--no-check` flags from `deno repl` subcommand. Currently these flags don't have effects, and the help output for these options are incorrect and confusing. closes #26042 --- cli/args/flags.rs | 33 +++++++++++++++++++++++++-------- 1 file changed, 25 insertions(+), 8 deletions(-) diff --git a/cli/args/flags.rs b/cli/args/flags.rs index acaf74a67e..7b0cee5bde 100644 --- a/cli/args/flags.rs +++ b/cli/args/flags.rs @@ -2769,8 +2769,13 @@ It is especially useful for quick prototyping and checking snippets of code. TypeScript is supported, however it is not type-checked, only transpiled." ), UnstableArgsConfig::ResolutionAndRuntime) - .defer(|cmd| runtime_args(cmd, true, true, true) - .arg(check_arg(false)) + .defer(|cmd| { + let cmd = compile_args_without_check_args(cmd); + let cmd = inspect_args(cmd); + let cmd = permission_args(cmd, None); + let cmd = runtime_misc_args(cmd); + + cmd .arg( Arg::new("eval-file") .long("eval-file") @@ -2789,7 +2794,7 @@ TypeScript is supported, however it is not type-checked, only transpiled." .after_help(cstr!("Environment variables: DENO_REPL_HISTORY Set REPL history file path. History file is disabled when the value is empty. [default: $DENO_DIR/deno_history.txt]")) - ) + }) .arg(env_file_arg()) .arg( Arg::new("args") @@ -3662,6 +3667,10 @@ fn runtime_args( } else { app }; + runtime_misc_args(app) +} + +fn runtime_misc_args(app: Command) -> Command { app .arg(frozen_lockfile_arg()) .arg(cached_only_arg()) @@ -4880,8 +4889,18 @@ fn repl_parse( flags: &mut Flags, matches: &mut ArgMatches, ) -> clap::error::Result<()> { - runtime_args_parse(flags, matches, true, true, true)?; - unsafely_ignore_certificate_errors_parse(flags, matches); + unstable_args_parse(flags, matches, UnstableArgsConfig::ResolutionAndRuntime); + compile_args_without_check_parse(flags, matches)?; + cached_only_arg_parse(flags, matches); + frozen_lockfile_arg_parse(flags, matches); + permission_args_parse(flags, matches)?; + inspect_arg_parse(flags, matches); + location_arg_parse(flags, matches); + v8_flags_arg_parse(flags, matches); + seed_arg_parse(flags, matches); + enable_testing_features_arg_parse(flags, matches); + env_file_arg_parse(flags, matches); + strace_ops_parse(flags, matches); let eval_files = matches .remove_many::("eval-file") @@ -7429,7 +7448,7 @@ mod tests { #[test] fn repl_with_flags() { #[rustfmt::skip] - let r = flags_from_vec(svec!["deno", "repl", "-A", "--import-map", "import_map.json", "--no-remote", "--config", "tsconfig.json", "--no-check", "--reload", "--lock", "lock.json", "--cert", "example.crt", "--cached-only", "--location", "https:foo", "--v8-flags=--help", "--seed", "1", "--inspect=127.0.0.1:9229", "--unsafely-ignore-certificate-errors", "--env=.example.env"]); + let r = flags_from_vec(svec!["deno", "repl", "-A", "--import-map", "import_map.json", "--no-remote", "--config", "tsconfig.json", "--reload", "--lock", "lock.json", "--cert", "example.crt", "--cached-only", "--location", "https:foo", "--v8-flags=--help", "--seed", "1", "--inspect=127.0.0.1:9229", "--unsafely-ignore-certificate-errors", "--env=.example.env"]); assert_eq!( r.unwrap(), Flags { @@ -7477,7 +7496,6 @@ mod tests { allow_write: Some(vec![]), ..Default::default() }, - type_check_mode: TypeCheckMode::None, ..Flags::default() } ); @@ -7499,7 +7517,6 @@ mod tests { eval: None, is_default_command: false, }), - type_check_mode: TypeCheckMode::None, ..Flags::default() } ); From c7153838ec332d474c32b464b94f99382028bbbc Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Tue, 15 Oct 2024 14:47:12 +0530 Subject: [PATCH 26/52] fix(ext/node): implement TCP.setNoDelay (#26263) Fixes https://github.com/denoland/deno/issues/26177 The significant delay was caused by Nagel's algorithm + delayed ACKs in Linux kernels. Here's the [kernel patch](https://lwn.net/Articles/502585/) which added 40ms `tcp_default_delack_min` ``` $ deno run -A pg-bench.mjs # main Tue Oct 15 2024 12:27:22 GMT+0530 (India Standard Time): 42ms $ target/release/deno run -A pg-bench.mjs # this patch Tue Oct 15 2024 12:28:02 GMT+0530 (India Standard Time): 1ms ``` ```js import { Buffer } from "node:buffer"; import pg from 'pg' const { Client } = pg const client = new Client({ connectionString: 'postgresql://postgres:postgres@127.0.0.1:5432/postgres' }) await client.connect() async function fetch() { const startPerf = performance.now(); const res = await client.query(`select $1::int as int, $2 as string, $3::timestamp with time zone as timestamp, $4 as null, $5::bool as boolean, $6::bytea as bytea, $7::jsonb as json `, [ 1337, 'wat', new Date().toISOString(), null, false, Buffer.from('awesome'), JSON.stringify([{ some: 'json' }, { array: 'object' }]) ]) console.log(`${new Date()}: ${Math.round(performance.now() - startPerf)}ms`) } for(;;) await fetch(); ``` --- .../polyfills/internal_binding/tcp_wrap.ts | 4 +- tests/node_compat/config.jsonc | 1 + tests/node_compat/runner/TODO.md | 1 - .../parallel/test-net-socket-setnodelay.js | 63 +++++++++++++++++++ 4 files changed, 66 insertions(+), 3 deletions(-) create mode 100644 tests/node_compat/test/parallel/test-net-socket-setnodelay.js diff --git a/ext/node/polyfills/internal_binding/tcp_wrap.ts b/ext/node/polyfills/internal_binding/tcp_wrap.ts index 973a1d1c01..1321cc6277 100644 --- a/ext/node/polyfills/internal_binding/tcp_wrap.ts +++ b/ext/node/polyfills/internal_binding/tcp_wrap.ts @@ -299,8 +299,8 @@ export class TCP extends ConnectionWrap { * @param noDelay * @return An error status code. */ - setNoDelay(_noDelay: boolean): number { - // TODO(bnoordhuis) https://github.com/denoland/deno/pull/13103 + setNoDelay(noDelay: boolean): number { + this[kStreamBaseField].setNoDelay(noDelay); return 0; } diff --git a/tests/node_compat/config.jsonc b/tests/node_compat/config.jsonc index bc9bf476be..75f463342e 100644 --- a/tests/node_compat/config.jsonc +++ b/tests/node_compat/config.jsonc @@ -439,6 +439,7 @@ "test-net-server-unref.js", "test-net-socket-destroy-twice.js", "test-net-socket-no-halfopen-enforcer.js", + "test-net-socket-setnodelay.js", "test-net-timeout-no-handle.js", "test-net-write-arguments.js", "test-next-tick-doesnt-hang.js", diff --git a/tests/node_compat/runner/TODO.md b/tests/node_compat/runner/TODO.md index 99258f5a56..11b5d28053 100644 --- a/tests/node_compat/runner/TODO.md +++ b/tests/node_compat/runner/TODO.md @@ -1851,7 +1851,6 @@ NOTE: This file should not be manually edited. Please edit `tests/node_compat/co - [parallel/test-net-socket-ready-without-cb.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-net-socket-ready-without-cb.js) - [parallel/test-net-socket-reset-send.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-net-socket-reset-send.js) - [parallel/test-net-socket-reset-twice.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-net-socket-reset-twice.js) -- [parallel/test-net-socket-setnodelay.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-net-socket-setnodelay.js) - [parallel/test-net-socket-timeout-unref.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-net-socket-timeout-unref.js) - [parallel/test-net-socket-timeout.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-net-socket-timeout.js) - [parallel/test-net-socket-write-after-close.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-net-socket-write-after-close.js) diff --git a/tests/node_compat/test/parallel/test-net-socket-setnodelay.js b/tests/node_compat/test/parallel/test-net-socket-setnodelay.js new file mode 100644 index 0000000000..3d11b84528 --- /dev/null +++ b/tests/node_compat/test/parallel/test-net-socket-setnodelay.js @@ -0,0 +1,63 @@ +// deno-fmt-ignore-file +// deno-lint-ignore-file + +// Copyright Joyent and Node contributors. All rights reserved. MIT license. +// Taken from Node 18.12.1 +// This file is automatically generated by `tests/node_compat/runner/setup.ts`. Do not modify this file manually. + +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const net = require('net'); + +const truthyValues = [true, 1, 'true', {}, []]; +const falseyValues = [false, 0, '']; +const genSetNoDelay = (desiredArg) => (enable) => { + assert.strictEqual(enable, desiredArg); +}; + +// setNoDelay should default to true +let socket = new net.Socket({ + handle: { + setNoDelay: common.mustCall(genSetNoDelay(true)), + readStart() {} + } +}); +socket.setNoDelay(); + +socket = new net.Socket({ + handle: { + setNoDelay: common.mustCall(genSetNoDelay(true), 1), + readStart() {} + } +}); +truthyValues.forEach((testVal) => socket.setNoDelay(testVal)); + +socket = new net.Socket({ + handle: { + setNoDelay: common.mustNotCall(), + readStart() {} + } +}); +falseyValues.forEach((testVal) => socket.setNoDelay(testVal)); + +socket = new net.Socket({ + handle: { + setNoDelay: common.mustCall(3), + readStart() {} + } +}); +truthyValues.concat(falseyValues).concat(truthyValues) + .forEach((testVal) => socket.setNoDelay(testVal)); + +// If a handler doesn't have a setNoDelay function it shouldn't be called. +// In the case below, if it is called an exception will be thrown +socket = new net.Socket({ + handle: { + setNoDelay: null, + readStart() {} + } +}); +const returned = socket.setNoDelay(true); +assert.ok(returned instanceof net.Socket); From e4b52f5a7684cfb6754f2531e51f833a64d97d7b Mon Sep 17 00:00:00 2001 From: Divy Srivastava Date: Tue, 15 Oct 2024 18:26:49 +0530 Subject: [PATCH 27/52] fix: panic in `prepare_stack_trace_callback` when global interceptor throws (#26241) Fixes https://github.com/denoland/deno/issues/26240 Fixes https://github.com/denoland/deno/pull/24985#issuecomment-2365460210 Fix panic when a global interceptor is misconfigured or throws an exception. Updates deno_core to 0.313.0 --- Cargo.lock | 12 ++++++------ Cargo.toml | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 56121705b2..da415dcf72 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1422,9 +1422,9 @@ dependencies = [ [[package]] name = "deno_core" -version = "0.312.0" +version = "0.313.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed5b20008c84715322782af2f94ff7ebc34eb50294ae098daf36b63a5b9bdd24" +checksum = "29f36be738d78e39b6603a6b07f1cf91e28baf3681f87205f07482999e0d0bc2" dependencies = [ "anyhow", "bincode", @@ -1922,9 +1922,9 @@ dependencies = [ [[package]] name = "deno_ops" -version = "0.188.0" +version = "0.189.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba1c7a8d9169e23de729000d6903dd0dee39e521e226d1ffbb9c5336665afae6" +checksum = "e8f998ad1d5b36064109367ffe67b1088385eb3d8025efc95e445bc013a147a2" dependencies = [ "proc-macro-rules", "proc-macro2", @@ -6175,9 +6175,9 @@ dependencies = [ [[package]] name = "serde_v8" -version = "0.221.0" +version = "0.222.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ca142cf34468e22063560302e6cc5e45f48e95a0dd25b3e22d6c32c32251135" +checksum = "27130b5cd87f6f06228940a1f3a7ecc988ea13d1bede1398a48d74cb59dabc9a" dependencies = [ "num-bigint", "serde", diff --git a/Cargo.toml b/Cargo.toml index a4ec99953b..502b7b7d76 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -46,7 +46,7 @@ repository = "https://github.com/denoland/deno" [workspace.dependencies] deno_ast = { version = "=0.42.2", features = ["transpiling"] } -deno_core = { version = "0.312.0" } +deno_core = { version = "0.313.0" } deno_bench_util = { version = "0.165.0", path = "./bench_util" } deno_lockfile = "=0.23.1" From 533a9b108677f1560fe55882771a0be2bb0b0fd2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=9E=97=E7=82=B3=E6=9D=83?= <695601626@qq.com> Date: Wed, 16 Oct 2024 00:10:07 +0800 Subject: [PATCH 28/52] chore: upgrade to rust 1.81.0 (#26261) --- .github/workflows/ci.generate.ts | 2 +- .github/workflows/ci.yml | 8 ++++---- cli/lsp/tsc.rs | 2 +- cli/npm/common.rs | 2 +- cli/util/fs.rs | 10 +++++----- runtime/permissions/prompter.rs | 2 +- rust-toolchain.toml | 2 +- tests/integration/compile_tests.rs | 6 +++--- 8 files changed, 17 insertions(+), 17 deletions(-) diff --git a/.github/workflows/ci.generate.ts b/.github/workflows/ci.generate.ts index 9abedad9c0..a487f11f93 100755 --- a/.github/workflows/ci.generate.ts +++ b/.github/workflows/ci.generate.ts @@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify"; // Bump this number when you want to purge the cache. // Note: the tools/release/01_bump_crate_versions.ts script will update this version // automatically via regex, so ensure that this line maintains this format. -const cacheVersion = 18; +const cacheVersion = 19; const ubuntuX86Runner = "ubuntu-22.04"; const ubuntuX86XlRunner = "ubuntu-22.04-xl"; diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d8a0682cb0..57bbf6a5e5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -361,8 +361,8 @@ jobs: path: |- ~/.cargo/registry/index ~/.cargo/registry/cache - key: '18-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}' - restore-keys: '18-cargo-home-${{ matrix.os }}-${{ matrix.arch }}' + key: '19-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}' + restore-keys: '19-cargo-home-${{ matrix.os }}-${{ matrix.arch }}' if: '!(matrix.skip)' - name: Restore cache build output (PR) uses: actions/cache/restore@v4 @@ -375,7 +375,7 @@ jobs: !./target/*/*.zip !./target/*/*.tar.gz key: never_saved - restore-keys: '18-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-' + restore-keys: '19-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-' - name: Apply and update mtime cache if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))' uses: ./.github/mtime_cache @@ -685,7 +685,7 @@ jobs: !./target/*/*.zip !./target/*/*.sha256sum !./target/*/*.tar.gz - key: '18-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' + key: '19-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' publish-canary: name: publish canary runs-on: ubuntu-22.04 diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs index c8b5c47f83..cfab39b20b 100644 --- a/cli/lsp/tsc.rs +++ b/cli/lsp/tsc.rs @@ -3939,7 +3939,7 @@ pub struct OutliningSpan { kind: OutliningSpanKind, } -const FOLD_END_PAIR_CHARACTERS: &[u8] = &[b'}', b']', b')', b'`']; +const FOLD_END_PAIR_CHARACTERS: &[u8] = b"}])`"; impl OutliningSpan { pub fn to_folding_range( diff --git a/cli/npm/common.rs b/cli/npm/common.rs index a3a828e745..de282310a1 100644 --- a/cli/npm/common.rs +++ b/cli/npm/common.rs @@ -40,7 +40,7 @@ pub fn maybe_auth_header_for_npm_registry( header::AUTHORIZATION, header::HeaderValue::from_str(&format!( "Basic {}", - BASE64_STANDARD.encode(&format!( + BASE64_STANDARD.encode(format!( "{}:{}", username.unwrap(), password.unwrap() diff --git a/cli/util/fs.rs b/cli/util/fs.rs index 2ad3affc8d..2c34f486ad 100644 --- a/cli/util/fs.rs +++ b/cli/util/fs.rs @@ -160,11 +160,11 @@ fn atomic_write_file( data: &[u8], ) -> std::io::Result<()> { fs.write_file(temp_file_path, data)?; - fs.rename_file(temp_file_path, file_path).map_err(|err| { - // clean up the created temp file on error - let _ = fs.remove_file(temp_file_path); - err - }) + fs.rename_file(temp_file_path, file_path) + .inspect_err(|_err| { + // clean up the created temp file on error + let _ = fs.remove_file(temp_file_path); + }) } let temp_file_path = get_atomic_file_path(file_path); diff --git a/runtime/permissions/prompter.rs b/runtime/permissions/prompter.rs index e48e0af10d..3d7536928e 100644 --- a/runtime/permissions/prompter.rs +++ b/runtime/permissions/prompter.rs @@ -366,7 +366,7 @@ impl PermissionPrompter for TtyPrompter { let mut input = String::new(); let result = stdin_lock.read_line(&mut input); - let input = input.trim_end_matches(|c| c == '\r' || c == '\n'); + let input = input.trim_end_matches(['\r', '\n']); if result.is_err() || input.len() != 1 { break PromptResponse::Deny; }; diff --git a/rust-toolchain.toml b/rust-toolchain.toml index e8d3cddd18..f19c7df470 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,3 +1,3 @@ [toolchain] -channel = "1.80.1" +channel = "1.81.0" components = ["rustfmt", "clippy"] diff --git a/tests/integration/compile_tests.rs b/tests/integration/compile_tests.rs index 0a5916f6f7..a8ea5b038b 100644 --- a/tests/integration/compile_tests.rs +++ b/tests/integration/compile_tests.rs @@ -241,7 +241,7 @@ fn compile_with_file_exists_error() { "./compile/args.ts", ]) .run() - .assert_matches_text(&format!( + .assert_matches_text(format!( concat!( "[WILDCARD]error: Could not compile to file '{}' because its parent directory ", "is an existing file. You can use the `--output ` flag to ", @@ -269,7 +269,7 @@ fn compile_with_directory_exists_error() { &exe.to_string_lossy(), "./compile/args.ts" ]).run() - .assert_matches_text(&format!( + .assert_matches_text(format!( concat!( "[WILDCARD]error: Could not compile to file '{}' because a directory exists with ", "the same name. You can use the `--output ` flag to ", @@ -297,7 +297,7 @@ fn compile_with_conflict_file_exists_error() { &exe.to_string_lossy(), "./compile/args.ts" ]).run() - .assert_matches_text(&format!( + .assert_matches_text(format!( concat!( "[WILDCARD]error: Could not compile to file '{}' because the file already exists ", "and cannot be overwritten. Please delete the existing file or ", From 797405fc61b2d155941506fb53d498076e121017 Mon Sep 17 00:00:00 2001 From: Nathan Whitaker <17734409+nathanwhit@users.noreply.github.com> Date: Tue, 15 Oct 2024 09:38:42 -0700 Subject: [PATCH 29/52] fix(add): create deno.json when running `deno add jsr:` (#26275) Fixes https://github.com/denoland/deno/issues/26119. Originally I wanted to put them in package.json if there's no deno.json, but on second thought it makes more sense to just create a deno.json --- cli/tools/registry/pm.rs | 13 +++++++++---- .../add/jsr_prefers_deno_json/__test__.jsonc | 16 ++++++++++++++++ tests/specs/add/jsr_prefers_deno_json/add.out | 3 +++ .../add/jsr_prefers_deno_json/deno.json.out | 5 +++++ .../specs/add/jsr_prefers_deno_json/package.json | 1 + 5 files changed, 34 insertions(+), 4 deletions(-) create mode 100644 tests/specs/add/jsr_prefers_deno_json/__test__.jsonc create mode 100644 tests/specs/add/jsr_prefers_deno_json/add.out create mode 100644 tests/specs/add/jsr_prefers_deno_json/deno.json.out create mode 100644 tests/specs/add/jsr_prefers_deno_json/package.json diff --git a/cli/tools/registry/pm.rs b/cli/tools/registry/pm.rs index 3276acfbf7..ff900d113d 100644 --- a/cli/tools/registry/pm.rs +++ b/cli/tools/registry/pm.rs @@ -400,14 +400,17 @@ impl std::fmt::Display for AddCommandName { fn load_configs( flags: &Arc, + has_jsr_specifiers: impl FnOnce() -> bool, ) -> Result<(CliFactory, Option, Option), AnyError> { let cli_factory = CliFactory::from_flags(flags.clone()); let options = cli_factory.cli_options()?; let npm_config = NpmConfig::from_options(options)?; let (cli_factory, deno_config) = match DenoConfig::from_options(options)? { Some(config) => (cli_factory, Some(config)), - None if npm_config.is_some() => (cli_factory, None), - None => { + None if npm_config.is_some() && !has_jsr_specifiers() => { + (cli_factory, None) + } + _ => { let factory = create_deno_json(flags, options)?; let options = factory.cli_options()?.clone(); ( @@ -427,7 +430,9 @@ pub async fn add( add_flags: AddFlags, cmd_name: AddCommandName, ) -> Result<(), AnyError> { - let (cli_factory, npm_config, deno_config) = load_configs(&flags)?; + let (cli_factory, npm_config, deno_config) = load_configs(&flags, || { + add_flags.packages.iter().any(|s| s.starts_with("jsr:")) + })?; let mut npm_config = ConfigUpdater::maybe_new(npm_config).await?; let mut deno_config = ConfigUpdater::maybe_new(deno_config).await?; @@ -764,7 +769,7 @@ pub async fn remove( flags: Arc, remove_flags: RemoveFlags, ) -> Result<(), AnyError> { - let (_, npm_config, deno_config) = load_configs(&flags)?; + let (_, npm_config, deno_config) = load_configs(&flags, || false)?; let mut configs = [ ConfigUpdater::maybe_new(npm_config).await?, diff --git a/tests/specs/add/jsr_prefers_deno_json/__test__.jsonc b/tests/specs/add/jsr_prefers_deno_json/__test__.jsonc new file mode 100644 index 0000000000..ca9c4b4d9d --- /dev/null +++ b/tests/specs/add/jsr_prefers_deno_json/__test__.jsonc @@ -0,0 +1,16 @@ +{ + "tempDir": true, + "steps": [ + { + "args": "add jsr:@denotest/add", + "output": "add.out" + }, + { + "args": [ + "eval", + "console.log(Deno.readTextFileSync('./deno.json').trim())" + ], + "output": "deno.json.out" + } + ] +} diff --git a/tests/specs/add/jsr_prefers_deno_json/add.out b/tests/specs/add/jsr_prefers_deno_json/add.out new file mode 100644 index 0000000000..cb8140c6db --- /dev/null +++ b/tests/specs/add/jsr_prefers_deno_json/add.out @@ -0,0 +1,3 @@ +Created deno.json configuration file. +Add jsr:@denotest/add@1.0.0 +Download [WILDCARD] diff --git a/tests/specs/add/jsr_prefers_deno_json/deno.json.out b/tests/specs/add/jsr_prefers_deno_json/deno.json.out new file mode 100644 index 0000000000..38ca2d4b85 --- /dev/null +++ b/tests/specs/add/jsr_prefers_deno_json/deno.json.out @@ -0,0 +1,5 @@ +{ + "imports": { + "@denotest/add": "jsr:@denotest/add@^1.0.0" + } +} diff --git a/tests/specs/add/jsr_prefers_deno_json/package.json b/tests/specs/add/jsr_prefers_deno_json/package.json new file mode 100644 index 0000000000..0967ef424b --- /dev/null +++ b/tests/specs/add/jsr_prefers_deno_json/package.json @@ -0,0 +1 @@ +{} From 9d93e333a6f6fe508cdf6c2fb8c91375048ec2a9 Mon Sep 17 00:00:00 2001 From: Eli Uriegas <1700823+seemethere@users.noreply.github.com> Date: Tue, 15 Oct 2024 09:47:02 -0700 Subject: [PATCH 30/52] chore: ensure only one cargo publish can run (#26262) Signed-off-by: Eli Uriegas --- .github/workflows/cargo_publish.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/cargo_publish.yml b/.github/workflows/cargo_publish.yml index f77286c7d4..8d164ea1bf 100644 --- a/.github/workflows/cargo_publish.yml +++ b/.github/workflows/cargo_publish.yml @@ -2,6 +2,11 @@ name: cargo_publish on: workflow_dispatch +# Ensures only one publish is running at a time +concurrency: + group: ${{ github.workflow }} + cancel-in-progress: true + jobs: build: name: cargo publish From 38888061698f230f2288cf520daf86d781c395bd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Tue, 15 Oct 2024 17:59:28 +0100 Subject: [PATCH 31/52] refactor: always apply hint when formatting JsError (#26252) Moves code for generating suggestions and hint to `cli/fmt_errors.rs`. This effectively applies suggestion to any place that format JS errors in terminal, like `deno test`. Addresses https://github.com/denoland/deno/pull/26218#issuecomment-2409139055 --- cli/main.rs | 97 +------------------------------------- runtime/fmt_errors.rs | 107 ++++++++++++++++++++++++++++++++++++------ 2 files changed, 94 insertions(+), 110 deletions(-) diff --git a/cli/main.rs b/cli/main.rs index ddb6078af4..360307d755 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -47,8 +47,7 @@ use deno_core::error::JsError; use deno_core::futures::FutureExt; use deno_core::unsync::JoinHandle; use deno_npm::resolution::SnapshotFromLockfileError; -use deno_runtime::fmt_errors::format_js_error_with_suggestions; -use deno_runtime::fmt_errors::FixSuggestion; +use deno_runtime::fmt_errors::format_js_error; use deno_runtime::tokio_util::create_and_run_current_thread_with_maybe_metrics; use deno_terminal::colors; use factory::CliFactory; @@ -362,104 +361,12 @@ fn exit_with_message(message: &str, code: i32) -> ! { std::process::exit(code); } -fn get_suggestions_for_terminal_errors(e: &JsError) -> Vec { - if let Some(msg) = &e.message { - if msg.contains("module is not defined") - || msg.contains("exports is not defined") - { - return vec![ - FixSuggestion::info( - "Deno does not support CommonJS modules without `.cjs` extension.", - ), - FixSuggestion::hint( - "Rewrite this module to ESM or change the file extension to `.cjs`.", - ), - ]; - } else if msg.contains("openKv is not a function") { - return vec![ - FixSuggestion::info("Deno.openKv() is an unstable API."), - FixSuggestion::hint( - "Run again with `--unstable-kv` flag to enable this API.", - ), - ]; - } else if msg.contains("cron is not a function") { - return vec![ - FixSuggestion::info("Deno.cron() is an unstable API."), - FixSuggestion::hint( - "Run again with `--unstable-cron` flag to enable this API.", - ), - ]; - } else if msg.contains("WebSocketStream is not defined") { - return vec![ - FixSuggestion::info("new WebSocketStream() is an unstable API."), - FixSuggestion::hint( - "Run again with `--unstable-net` flag to enable this API.", - ), - ]; - } else if msg.contains("Temporal is not defined") { - return vec![ - FixSuggestion::info("Temporal is an unstable API."), - FixSuggestion::hint( - "Run again with `--unstable-temporal` flag to enable this API.", - ), - ]; - } else if msg.contains("BroadcastChannel is not defined") { - return vec![ - FixSuggestion::info("BroadcastChannel is an unstable API."), - FixSuggestion::hint( - "Run again with `--unstable-broadcast-channel` flag to enable this API.", - ), - ]; - } else if msg.contains("window is not defined") { - return vec![ - FixSuggestion::info("window global is not available in Deno 2."), - FixSuggestion::hint("Replace `window` with `globalThis`."), - ]; - } else if msg.contains("UnsafeWindowSurface is not a constructor") { - return vec![ - FixSuggestion::info("Deno.UnsafeWindowSurface is an unstable API."), - FixSuggestion::hint( - "Run again with `--unstable-webgpu` flag to enable this API.", - ), - ]; - // Try to capture errors like: - // ``` - // Uncaught Error: Cannot find module '../build/Release/canvas.node' - // Require stack: - // - /.../deno/npm/registry.npmjs.org/canvas/2.11.2/lib/bindings.js - // - /.../.cache/deno/npm/registry.npmjs.org/canvas/2.11.2/lib/canvas.js - // ``` - } else if msg.contains("Cannot find module") - && msg.contains("Require stack") - && msg.contains(".node'") - { - return vec![ - FixSuggestion::info_multiline( - &[ - "Trying to execute an npm package using Node-API addons,", - "these packages require local `node_modules` directory to be present." - ] - ), - FixSuggestion::hint_multiline( - &[ - "Add `\"nodeModulesDir\": \"auto\" option to `deno.json`, and then run", - "`deno install --allow-scripts=npm: --entrypoint