mirror of
https://github.com/denoland/deno.git
synced 2024-11-21 15:04:11 -05:00
refactor(ext/web): use concrete error types (#26185)
This commit is contained in:
parent
40b4b9aaa3
commit
eca83fc9b4
13 changed files with 257 additions and 184 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -2180,6 +2180,7 @@ dependencies = [
|
|||
"flate2",
|
||||
"futures",
|
||||
"serde",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"uuid",
|
||||
]
|
||||
|
|
|
@ -333,7 +333,7 @@ impl FileFetcher {
|
|||
)
|
||||
})?;
|
||||
|
||||
let bytes = blob.read_all().await?;
|
||||
let bytes = blob.read_all().await;
|
||||
let headers =
|
||||
HashMap::from([("content-type".to_string(), blob.media_type.clone())]);
|
||||
|
||||
|
|
|
@ -149,10 +149,7 @@ pub fn op_crypto_get_random_values(
|
|||
#[buffer] out: &mut [u8],
|
||||
) -> Result<(), AnyError> {
|
||||
if out.len() > 65536 {
|
||||
return Err(
|
||||
deno_web::DomExceptionQuotaExceededError::new(&format!("The ArrayBufferView's byte length ({}) exceeds the number of bytes of entropy available via this API (65536)", out.len()))
|
||||
.into(),
|
||||
);
|
||||
return Err(custom_error("DOMExceptionQuotaExceededError", format!("The ArrayBufferView's byte length ({}) exceeds the number of bytes of entropy available via this API (65536)", out.len())));
|
||||
}
|
||||
|
||||
let maybe_seeded_rng = state.try_borrow_mut::<StdRng>();
|
||||
|
|
|
@ -23,6 +23,7 @@ encoding_rs.workspace = true
|
|||
flate2 = { workspace = true, features = ["default"] }
|
||||
futures.workspace = true
|
||||
serde = "1.0.149"
|
||||
thiserror.workspace = true
|
||||
tokio.workspace = true
|
||||
uuid = { workspace = true, features = ["serde"] }
|
||||
|
||||
|
|
|
@ -7,8 +7,6 @@ use std::rc::Rc;
|
|||
use std::sync::Arc;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use deno_core::error::type_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::op2;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::url::Url;
|
||||
|
@ -19,6 +17,18 @@ use serde::Deserialize;
|
|||
use serde::Serialize;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum BlobError {
|
||||
#[error("Blob part not found")]
|
||||
BlobPartNotFound,
|
||||
#[error("start + len can not be larger than blob part size")]
|
||||
SizeLargerThanBlobPart,
|
||||
#[error("Blob URLs are not supported in this context")]
|
||||
BlobURLsNotSupported,
|
||||
#[error(transparent)]
|
||||
Url(#[from] deno_core::url::ParseError),
|
||||
}
|
||||
|
||||
use crate::Location;
|
||||
|
||||
pub type PartMap = HashMap<Uuid, Arc<dyn BlobPart + Send + Sync>>;
|
||||
|
@ -96,18 +106,18 @@ pub struct Blob {
|
|||
|
||||
impl Blob {
|
||||
// TODO(lucacsonato): this should be a stream!
|
||||
pub async fn read_all(&self) -> Result<Vec<u8>, AnyError> {
|
||||
pub async fn read_all(&self) -> Vec<u8> {
|
||||
let size = self.size();
|
||||
let mut bytes = Vec::with_capacity(size);
|
||||
|
||||
for part in &self.parts {
|
||||
let chunk = part.read().await?;
|
||||
let chunk = part.read().await;
|
||||
bytes.extend_from_slice(chunk);
|
||||
}
|
||||
|
||||
assert_eq!(bytes.len(), size);
|
||||
|
||||
Ok(bytes)
|
||||
bytes
|
||||
}
|
||||
|
||||
fn size(&self) -> usize {
|
||||
|
@ -122,7 +132,7 @@ impl Blob {
|
|||
#[async_trait]
|
||||
pub trait BlobPart: Debug {
|
||||
// TODO(lucacsonato): this should be a stream!
|
||||
async fn read(&self) -> Result<&[u8], AnyError>;
|
||||
async fn read(&self) -> &[u8];
|
||||
fn size(&self) -> usize;
|
||||
}
|
||||
|
||||
|
@ -137,8 +147,8 @@ impl From<Vec<u8>> for InMemoryBlobPart {
|
|||
|
||||
#[async_trait]
|
||||
impl BlobPart for InMemoryBlobPart {
|
||||
async fn read(&self) -> Result<&[u8], AnyError> {
|
||||
Ok(&self.0)
|
||||
async fn read(&self) -> &[u8] {
|
||||
&self.0
|
||||
}
|
||||
|
||||
fn size(&self) -> usize {
|
||||
|
@ -155,9 +165,9 @@ pub struct SlicedBlobPart {
|
|||
|
||||
#[async_trait]
|
||||
impl BlobPart for SlicedBlobPart {
|
||||
async fn read(&self) -> Result<&[u8], AnyError> {
|
||||
let original = self.part.read().await?;
|
||||
Ok(&original[self.start..self.start + self.len])
|
||||
async fn read(&self) -> &[u8] {
|
||||
let original = self.part.read().await;
|
||||
&original[self.start..self.start + self.len]
|
||||
}
|
||||
|
||||
fn size(&self) -> usize {
|
||||
|
@ -189,19 +199,17 @@ pub fn op_blob_slice_part(
|
|||
state: &mut OpState,
|
||||
#[serde] id: Uuid,
|
||||
#[serde] options: SliceOptions,
|
||||
) -> Result<Uuid, AnyError> {
|
||||
) -> Result<Uuid, BlobError> {
|
||||
let blob_store = state.borrow::<Arc<BlobStore>>();
|
||||
let part = blob_store
|
||||
.get_part(&id)
|
||||
.ok_or_else(|| type_error("Blob part not found"))?;
|
||||
.ok_or(BlobError::BlobPartNotFound)?;
|
||||
|
||||
let SliceOptions { start, len } = options;
|
||||
|
||||
let size = part.size();
|
||||
if start + len > size {
|
||||
return Err(type_error(
|
||||
"start + len can not be larger than blob part size",
|
||||
));
|
||||
return Err(BlobError::SizeLargerThanBlobPart);
|
||||
}
|
||||
|
||||
let sliced_part = SlicedBlobPart { part, start, len };
|
||||
|
@ -215,14 +223,14 @@ pub fn op_blob_slice_part(
|
|||
pub async fn op_blob_read_part(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
#[serde] id: Uuid,
|
||||
) -> Result<ToJsBuffer, AnyError> {
|
||||
) -> Result<ToJsBuffer, BlobError> {
|
||||
let part = {
|
||||
let state = state.borrow();
|
||||
let blob_store = state.borrow::<Arc<BlobStore>>();
|
||||
blob_store.get_part(&id)
|
||||
}
|
||||
.ok_or_else(|| type_error("Blob part not found"))?;
|
||||
let buf = part.read().await?;
|
||||
.ok_or(BlobError::BlobPartNotFound)?;
|
||||
let buf = part.read().await;
|
||||
Ok(ToJsBuffer::from(buf.to_vec()))
|
||||
}
|
||||
|
||||
|
@ -238,13 +246,13 @@ pub fn op_blob_create_object_url(
|
|||
state: &mut OpState,
|
||||
#[string] media_type: String,
|
||||
#[serde] part_ids: Vec<Uuid>,
|
||||
) -> Result<String, AnyError> {
|
||||
) -> Result<String, BlobError> {
|
||||
let mut parts = Vec::with_capacity(part_ids.len());
|
||||
let blob_store = state.borrow::<Arc<BlobStore>>();
|
||||
for part_id in part_ids {
|
||||
let part = blob_store
|
||||
.get_part(&part_id)
|
||||
.ok_or_else(|| type_error("Blob part not found"))?;
|
||||
.ok_or(BlobError::BlobPartNotFound)?;
|
||||
parts.push(part);
|
||||
}
|
||||
|
||||
|
@ -263,7 +271,7 @@ pub fn op_blob_create_object_url(
|
|||
pub fn op_blob_revoke_object_url(
|
||||
state: &mut OpState,
|
||||
#[string] url: &str,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), BlobError> {
|
||||
let url = Url::parse(url)?;
|
||||
let blob_store = state.borrow::<Arc<BlobStore>>();
|
||||
blob_store.remove_object_url(&url);
|
||||
|
@ -287,15 +295,15 @@ pub struct ReturnBlobPart {
|
|||
pub fn op_blob_from_object_url(
|
||||
state: &mut OpState,
|
||||
#[string] url: String,
|
||||
) -> Result<Option<ReturnBlob>, AnyError> {
|
||||
) -> Result<Option<ReturnBlob>, BlobError> {
|
||||
let url = Url::parse(&url)?;
|
||||
if url.scheme() != "blob" {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let blob_store = state.try_borrow::<Arc<BlobStore>>().ok_or_else(|| {
|
||||
type_error("Blob URLs are not supported in this context.")
|
||||
})?;
|
||||
let blob_store = state
|
||||
.try_borrow::<Arc<BlobStore>>()
|
||||
.ok_or(BlobError::BlobURLsNotSupported)?;
|
||||
if let Some(blob) = blob_store.get_object_url(url) {
|
||||
let parts = blob
|
||||
.parts
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_core::error::type_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::op2;
|
||||
use flate2::write::DeflateDecoder;
|
||||
use flate2::write::DeflateEncoder;
|
||||
|
@ -13,6 +11,18 @@ use flate2::Compression;
|
|||
use std::cell::RefCell;
|
||||
use std::io::Write;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum CompressionError {
|
||||
#[error("Unsupported format")]
|
||||
UnsupportedFormat,
|
||||
#[error("resource is closed")]
|
||||
ResourceClosed,
|
||||
#[error(transparent)]
|
||||
IoTypeError(std::io::Error),
|
||||
#[error(transparent)]
|
||||
Io(std::io::Error),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct CompressionResource(RefCell<Option<Inner>>);
|
||||
|
||||
|
@ -34,7 +44,7 @@ enum Inner {
|
|||
pub fn op_compression_new(
|
||||
#[string] format: &str,
|
||||
is_decoder: bool,
|
||||
) -> Result<CompressionResource, AnyError> {
|
||||
) -> Result<CompressionResource, CompressionError> {
|
||||
let w = Vec::new();
|
||||
let inner = match (format, is_decoder) {
|
||||
("deflate", true) => Inner::DeflateDecoder(ZlibDecoder::new(w)),
|
||||
|
@ -49,7 +59,7 @@ pub fn op_compression_new(
|
|||
("gzip", false) => {
|
||||
Inner::GzEncoder(GzEncoder::new(w, Compression::default()))
|
||||
}
|
||||
_ => return Err(type_error("Unsupported format")),
|
||||
_ => return Err(CompressionError::UnsupportedFormat),
|
||||
};
|
||||
Ok(CompressionResource(RefCell::new(Some(inner))))
|
||||
}
|
||||
|
@ -59,40 +69,38 @@ pub fn op_compression_new(
|
|||
pub fn op_compression_write(
|
||||
#[cppgc] resource: &CompressionResource,
|
||||
#[anybuffer] input: &[u8],
|
||||
) -> Result<Vec<u8>, AnyError> {
|
||||
) -> Result<Vec<u8>, CompressionError> {
|
||||
let mut inner = resource.0.borrow_mut();
|
||||
let inner = inner
|
||||
.as_mut()
|
||||
.ok_or_else(|| type_error("resource is closed"))?;
|
||||
let inner = inner.as_mut().ok_or(CompressionError::ResourceClosed)?;
|
||||
let out: Vec<u8> = match &mut *inner {
|
||||
Inner::DeflateDecoder(d) => {
|
||||
d.write_all(input).map_err(|e| type_error(e.to_string()))?;
|
||||
d.flush()?;
|
||||
d.write_all(input).map_err(CompressionError::IoTypeError)?;
|
||||
d.flush().map_err(CompressionError::Io)?;
|
||||
d.get_mut().drain(..)
|
||||
}
|
||||
Inner::DeflateEncoder(d) => {
|
||||
d.write_all(input).map_err(|e| type_error(e.to_string()))?;
|
||||
d.flush()?;
|
||||
d.write_all(input).map_err(CompressionError::IoTypeError)?;
|
||||
d.flush().map_err(CompressionError::Io)?;
|
||||
d.get_mut().drain(..)
|
||||
}
|
||||
Inner::DeflateRawDecoder(d) => {
|
||||
d.write_all(input).map_err(|e| type_error(e.to_string()))?;
|
||||
d.flush()?;
|
||||
d.write_all(input).map_err(CompressionError::IoTypeError)?;
|
||||
d.flush().map_err(CompressionError::Io)?;
|
||||
d.get_mut().drain(..)
|
||||
}
|
||||
Inner::DeflateRawEncoder(d) => {
|
||||
d.write_all(input).map_err(|e| type_error(e.to_string()))?;
|
||||
d.flush()?;
|
||||
d.write_all(input).map_err(CompressionError::IoTypeError)?;
|
||||
d.flush().map_err(CompressionError::Io)?;
|
||||
d.get_mut().drain(..)
|
||||
}
|
||||
Inner::GzDecoder(d) => {
|
||||
d.write_all(input).map_err(|e| type_error(e.to_string()))?;
|
||||
d.flush()?;
|
||||
d.write_all(input).map_err(CompressionError::IoTypeError)?;
|
||||
d.flush().map_err(CompressionError::Io)?;
|
||||
d.get_mut().drain(..)
|
||||
}
|
||||
Inner::GzEncoder(d) => {
|
||||
d.write_all(input).map_err(|e| type_error(e.to_string()))?;
|
||||
d.flush()?;
|
||||
d.write_all(input).map_err(CompressionError::IoTypeError)?;
|
||||
d.flush().map_err(CompressionError::Io)?;
|
||||
d.get_mut().drain(..)
|
||||
}
|
||||
}
|
||||
|
@ -105,27 +113,27 @@ pub fn op_compression_write(
|
|||
pub fn op_compression_finish(
|
||||
#[cppgc] resource: &CompressionResource,
|
||||
report_errors: bool,
|
||||
) -> Result<Vec<u8>, AnyError> {
|
||||
) -> Result<Vec<u8>, CompressionError> {
|
||||
let inner = resource
|
||||
.0
|
||||
.borrow_mut()
|
||||
.take()
|
||||
.ok_or_else(|| type_error("resource is closed"))?;
|
||||
.ok_or(CompressionError::ResourceClosed)?;
|
||||
let out = match inner {
|
||||
Inner::DeflateDecoder(d) => {
|
||||
d.finish().map_err(|e| type_error(e.to_string()))
|
||||
d.finish().map_err(CompressionError::IoTypeError)
|
||||
}
|
||||
Inner::DeflateEncoder(d) => {
|
||||
d.finish().map_err(|e| type_error(e.to_string()))
|
||||
d.finish().map_err(CompressionError::IoTypeError)
|
||||
}
|
||||
Inner::DeflateRawDecoder(d) => {
|
||||
d.finish().map_err(|e| type_error(e.to_string()))
|
||||
d.finish().map_err(CompressionError::IoTypeError)
|
||||
}
|
||||
Inner::DeflateRawEncoder(d) => {
|
||||
d.finish().map_err(|e| type_error(e.to_string()))
|
||||
d.finish().map_err(CompressionError::IoTypeError)
|
||||
}
|
||||
Inner::GzDecoder(d) => d.finish().map_err(|e| type_error(e.to_string())),
|
||||
Inner::GzEncoder(d) => d.finish().map_err(|e| type_error(e.to_string())),
|
||||
Inner::GzDecoder(d) => d.finish().map_err(CompressionError::IoTypeError),
|
||||
Inner::GzEncoder(d) => d.finish().map_err(CompressionError::IoTypeError),
|
||||
};
|
||||
match out {
|
||||
Err(err) => {
|
||||
|
|
146
ext/web/lib.rs
146
ext/web/lib.rs
|
@ -6,9 +6,6 @@ mod message_port;
|
|||
mod stream_resource;
|
||||
mod timers;
|
||||
|
||||
use deno_core::error::range_error;
|
||||
use deno_core::error::type_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::op2;
|
||||
use deno_core::url::Url;
|
||||
use deno_core::v8;
|
||||
|
@ -22,10 +19,14 @@ use encoding_rs::DecoderResult;
|
|||
use encoding_rs::Encoding;
|
||||
use std::borrow::Cow;
|
||||
use std::cell::RefCell;
|
||||
use std::fmt;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use blob::BlobError;
|
||||
pub use compression::CompressionError;
|
||||
pub use message_port::MessagePortError;
|
||||
pub use stream_resource::StreamResourceError;
|
||||
|
||||
use crate::blob::op_blob_create_object_url;
|
||||
use crate::blob::op_blob_create_part;
|
||||
use crate::blob::op_blob_from_object_url;
|
||||
|
@ -126,9 +127,27 @@ deno_core::extension!(deno_web,
|
|||
}
|
||||
);
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum WebError {
|
||||
#[error("Failed to decode base64")]
|
||||
Base64Decode,
|
||||
#[error("The encoding label provided ('{0}') is invalid.")]
|
||||
InvalidEncodingLabel(String),
|
||||
#[error("buffer exceeds maximum length")]
|
||||
BufferTooLong,
|
||||
#[error("Value too large to decode")]
|
||||
ValueTooLarge,
|
||||
#[error("Provided buffer too small")]
|
||||
BufferTooSmall,
|
||||
#[error("The encoded data is not valid")]
|
||||
DataInvalid,
|
||||
#[error(transparent)]
|
||||
DataError(#[from] v8::DataError),
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[serde]
|
||||
fn op_base64_decode(#[string] input: String) -> Result<ToJsBuffer, AnyError> {
|
||||
fn op_base64_decode(#[string] input: String) -> Result<ToJsBuffer, WebError> {
|
||||
let mut s = input.into_bytes();
|
||||
let decoded_len = forgiving_base64_decode_inplace(&mut s)?;
|
||||
s.truncate(decoded_len);
|
||||
|
@ -137,7 +156,7 @@ fn op_base64_decode(#[string] input: String) -> Result<ToJsBuffer, AnyError> {
|
|||
|
||||
#[op2]
|
||||
#[serde]
|
||||
fn op_base64_atob(#[serde] mut s: ByteString) -> Result<ByteString, AnyError> {
|
||||
fn op_base64_atob(#[serde] mut s: ByteString) -> Result<ByteString, WebError> {
|
||||
let decoded_len = forgiving_base64_decode_inplace(&mut s)?;
|
||||
s.truncate(decoded_len);
|
||||
Ok(s)
|
||||
|
@ -147,11 +166,9 @@ fn op_base64_atob(#[serde] mut s: ByteString) -> Result<ByteString, AnyError> {
|
|||
#[inline]
|
||||
fn forgiving_base64_decode_inplace(
|
||||
input: &mut [u8],
|
||||
) -> Result<usize, AnyError> {
|
||||
let error =
|
||||
|| DomExceptionInvalidCharacterError::new("Failed to decode base64");
|
||||
let decoded =
|
||||
base64_simd::forgiving_decode_inplace(input).map_err(|_| error())?;
|
||||
) -> Result<usize, WebError> {
|
||||
let decoded = base64_simd::forgiving_decode_inplace(input)
|
||||
.map_err(|_| WebError::Base64Decode)?;
|
||||
Ok(decoded.len())
|
||||
}
|
||||
|
||||
|
@ -177,13 +194,9 @@ fn forgiving_base64_encode(s: &[u8]) -> String {
|
|||
#[string]
|
||||
fn op_encoding_normalize_label(
|
||||
#[string] label: String,
|
||||
) -> Result<String, AnyError> {
|
||||
) -> Result<String, WebError> {
|
||||
let encoding = Encoding::for_label_no_replacement(label.as_bytes())
|
||||
.ok_or_else(|| {
|
||||
range_error(format!(
|
||||
"The encoding label provided ('{label}') is invalid."
|
||||
))
|
||||
})?;
|
||||
.ok_or(WebError::InvalidEncodingLabel(label))?;
|
||||
Ok(encoding.name().to_lowercase())
|
||||
}
|
||||
|
||||
|
@ -192,7 +205,7 @@ fn op_encoding_decode_utf8<'a>(
|
|||
scope: &mut v8::HandleScope<'a>,
|
||||
#[anybuffer] zero_copy: &[u8],
|
||||
ignore_bom: bool,
|
||||
) -> Result<v8::Local<'a, v8::String>, AnyError> {
|
||||
) -> Result<v8::Local<'a, v8::String>, WebError> {
|
||||
let buf = &zero_copy;
|
||||
|
||||
let buf = if !ignore_bom
|
||||
|
@ -216,7 +229,7 @@ fn op_encoding_decode_utf8<'a>(
|
|||
// - https://github.com/v8/v8/blob/d68fb4733e39525f9ff0a9222107c02c28096e2a/include/v8.h#L3277-L3278
|
||||
match v8::String::new_from_utf8(scope, buf, v8::NewStringType::Normal) {
|
||||
Some(text) => Ok(text),
|
||||
None => Err(type_error("buffer exceeds maximum length")),
|
||||
None => Err(WebError::BufferTooLong),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -227,12 +240,9 @@ fn op_encoding_decode_single(
|
|||
#[string] label: String,
|
||||
fatal: bool,
|
||||
ignore_bom: bool,
|
||||
) -> Result<U16String, AnyError> {
|
||||
let encoding = Encoding::for_label(label.as_bytes()).ok_or_else(|| {
|
||||
range_error(format!(
|
||||
"The encoding label provided ('{label}') is invalid."
|
||||
))
|
||||
})?;
|
||||
) -> Result<U16String, WebError> {
|
||||
let encoding = Encoding::for_label(label.as_bytes())
|
||||
.ok_or(WebError::InvalidEncodingLabel(label))?;
|
||||
|
||||
let mut decoder = if ignore_bom {
|
||||
encoding.new_decoder_without_bom_handling()
|
||||
|
@ -242,7 +252,7 @@ fn op_encoding_decode_single(
|
|||
|
||||
let max_buffer_length = decoder
|
||||
.max_utf16_buffer_length(data.len())
|
||||
.ok_or_else(|| range_error("Value too large to decode."))?;
|
||||
.ok_or(WebError::ValueTooLarge)?;
|
||||
|
||||
let mut output = vec![0; max_buffer_length];
|
||||
|
||||
|
@ -254,12 +264,8 @@ fn op_encoding_decode_single(
|
|||
output.truncate(written);
|
||||
Ok(output.into())
|
||||
}
|
||||
DecoderResult::OutputFull => {
|
||||
Err(range_error("Provided buffer too small."))
|
||||
}
|
||||
DecoderResult::Malformed(_, _) => {
|
||||
Err(type_error("The encoded data is not valid."))
|
||||
}
|
||||
DecoderResult::OutputFull => Err(WebError::BufferTooSmall),
|
||||
DecoderResult::Malformed(_, _) => Err(WebError::DataInvalid),
|
||||
}
|
||||
} else {
|
||||
let (result, _, written, _) =
|
||||
|
@ -269,7 +275,7 @@ fn op_encoding_decode_single(
|
|||
output.truncate(written);
|
||||
Ok(output.into())
|
||||
}
|
||||
CoderResult::OutputFull => Err(range_error("Provided buffer too small.")),
|
||||
CoderResult::OutputFull => Err(WebError::BufferTooSmall),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -280,12 +286,9 @@ fn op_encoding_new_decoder(
|
|||
#[string] label: &str,
|
||||
fatal: bool,
|
||||
ignore_bom: bool,
|
||||
) -> Result<TextDecoderResource, AnyError> {
|
||||
let encoding = Encoding::for_label(label.as_bytes()).ok_or_else(|| {
|
||||
range_error(format!(
|
||||
"The encoding label provided ('{label}') is invalid."
|
||||
))
|
||||
})?;
|
||||
) -> Result<TextDecoderResource, WebError> {
|
||||
let encoding = Encoding::for_label(label.as_bytes())
|
||||
.ok_or_else(|| WebError::InvalidEncodingLabel(label.to_string()))?;
|
||||
|
||||
let decoder = if ignore_bom {
|
||||
encoding.new_decoder_without_bom_handling()
|
||||
|
@ -305,13 +308,13 @@ fn op_encoding_decode(
|
|||
#[anybuffer] data: &[u8],
|
||||
#[cppgc] resource: &TextDecoderResource,
|
||||
stream: bool,
|
||||
) -> Result<U16String, AnyError> {
|
||||
) -> Result<U16String, WebError> {
|
||||
let mut decoder = resource.decoder.borrow_mut();
|
||||
let fatal = resource.fatal;
|
||||
|
||||
let max_buffer_length = decoder
|
||||
.max_utf16_buffer_length(data.len())
|
||||
.ok_or_else(|| range_error("Value too large to decode."))?;
|
||||
.ok_or(WebError::ValueTooLarge)?;
|
||||
|
||||
let mut output = vec![0; max_buffer_length];
|
||||
|
||||
|
@ -323,12 +326,8 @@ fn op_encoding_decode(
|
|||
output.truncate(written);
|
||||
Ok(output.into())
|
||||
}
|
||||
DecoderResult::OutputFull => {
|
||||
Err(range_error("Provided buffer too small."))
|
||||
}
|
||||
DecoderResult::Malformed(_, _) => {
|
||||
Err(type_error("The encoded data is not valid."))
|
||||
}
|
||||
DecoderResult::OutputFull => Err(WebError::BufferTooSmall),
|
||||
DecoderResult::Malformed(_, _) => Err(WebError::DataInvalid),
|
||||
}
|
||||
} else {
|
||||
let (result, _, written, _) =
|
||||
|
@ -338,7 +337,7 @@ fn op_encoding_decode(
|
|||
output.truncate(written);
|
||||
Ok(output.into())
|
||||
}
|
||||
CoderResult::OutputFull => Err(range_error("Provided buffer too small.")),
|
||||
CoderResult::OutputFull => Err(WebError::BufferTooSmall),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -356,7 +355,7 @@ fn op_encoding_encode_into(
|
|||
input: v8::Local<v8::Value>,
|
||||
#[buffer] buffer: &mut [u8],
|
||||
#[buffer] out_buf: &mut [u32],
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), WebError> {
|
||||
let s = v8::Local::<v8::String>::try_from(input)?;
|
||||
|
||||
let mut nchars = 0;
|
||||
|
@ -414,53 +413,4 @@ pub fn get_declaration() -> PathBuf {
|
|||
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("lib.deno_web.d.ts")
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct DomExceptionQuotaExceededError {
|
||||
pub msg: String,
|
||||
}
|
||||
|
||||
impl DomExceptionQuotaExceededError {
|
||||
pub fn new(msg: &str) -> Self {
|
||||
DomExceptionQuotaExceededError {
|
||||
msg: msg.to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct DomExceptionInvalidCharacterError {
|
||||
pub msg: String,
|
||||
}
|
||||
|
||||
impl DomExceptionInvalidCharacterError {
|
||||
pub fn new(msg: &str) -> Self {
|
||||
DomExceptionInvalidCharacterError {
|
||||
msg: msg.to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for DomExceptionQuotaExceededError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.pad(&self.msg)
|
||||
}
|
||||
}
|
||||
impl fmt::Display for DomExceptionInvalidCharacterError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.pad(&self.msg)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for DomExceptionQuotaExceededError {}
|
||||
|
||||
impl std::error::Error for DomExceptionInvalidCharacterError {}
|
||||
|
||||
pub fn get_error_class_name(e: &AnyError) -> Option<&'static str> {
|
||||
e.downcast_ref::<DomExceptionQuotaExceededError>()
|
||||
.map(|_| "DOMExceptionQuotaExceededError")
|
||||
.or_else(|| {
|
||||
e.downcast_ref::<DomExceptionInvalidCharacterError>()
|
||||
.map(|_| "DOMExceptionInvalidCharacterError")
|
||||
})
|
||||
}
|
||||
pub struct Location(pub Url);
|
||||
|
|
|
@ -4,8 +4,6 @@ use std::borrow::Cow;
|
|||
use std::cell::RefCell;
|
||||
use std::rc::Rc;
|
||||
|
||||
use deno_core::error::type_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::op2;
|
||||
|
||||
use deno_core::CancelFuture;
|
||||
|
@ -23,6 +21,20 @@ use tokio::sync::mpsc::unbounded_channel;
|
|||
use tokio::sync::mpsc::UnboundedReceiver;
|
||||
use tokio::sync::mpsc::UnboundedSender;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum MessagePortError {
|
||||
#[error("Invalid message port transfer")]
|
||||
InvalidTransfer,
|
||||
#[error("Message port is not ready for transfer")]
|
||||
NotReady,
|
||||
#[error("Can not transfer self message port")]
|
||||
TransferSelf,
|
||||
#[error(transparent)]
|
||||
Canceled(#[from] deno_core::Canceled),
|
||||
#[error(transparent)]
|
||||
Resource(deno_core::error::AnyError),
|
||||
}
|
||||
|
||||
pub enum Transferable {
|
||||
MessagePort(MessagePort),
|
||||
ArrayBuffer(u32),
|
||||
|
@ -40,7 +52,7 @@ impl MessagePort {
|
|||
&self,
|
||||
state: &mut OpState,
|
||||
data: JsMessageData,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), MessagePortError> {
|
||||
let transferables =
|
||||
deserialize_js_transferables(state, data.transferables)?;
|
||||
|
||||
|
@ -56,7 +68,7 @@ impl MessagePort {
|
|||
pub async fn recv(
|
||||
&self,
|
||||
state: Rc<RefCell<OpState>>,
|
||||
) -> Result<Option<JsMessageData>, AnyError> {
|
||||
) -> Result<Option<JsMessageData>, MessagePortError> {
|
||||
let rx = &self.rx;
|
||||
|
||||
let maybe_data = poll_fn(|cx| {
|
||||
|
@ -147,7 +159,7 @@ pub enum JsTransferable {
|
|||
pub fn deserialize_js_transferables(
|
||||
state: &mut OpState,
|
||||
js_transferables: Vec<JsTransferable>,
|
||||
) -> Result<Vec<Transferable>, AnyError> {
|
||||
) -> Result<Vec<Transferable>, MessagePortError> {
|
||||
let mut transferables = Vec::with_capacity(js_transferables.len());
|
||||
for js_transferable in js_transferables {
|
||||
match js_transferable {
|
||||
|
@ -155,10 +167,10 @@ pub fn deserialize_js_transferables(
|
|||
let resource = state
|
||||
.resource_table
|
||||
.take::<MessagePortResource>(id)
|
||||
.map_err(|_| type_error("Invalid message port transfer"))?;
|
||||
.map_err(|_| MessagePortError::InvalidTransfer)?;
|
||||
resource.cancel.cancel();
|
||||
let resource = Rc::try_unwrap(resource)
|
||||
.map_err(|_| type_error("Message port is not ready for transfer"))?;
|
||||
let resource =
|
||||
Rc::try_unwrap(resource).map_err(|_| MessagePortError::NotReady)?;
|
||||
transferables.push(Transferable::MessagePort(resource.port));
|
||||
}
|
||||
JsTransferable::ArrayBuffer(id) => {
|
||||
|
@ -202,16 +214,19 @@ pub fn op_message_port_post_message(
|
|||
state: &mut OpState,
|
||||
#[smi] rid: ResourceId,
|
||||
#[serde] data: JsMessageData,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), MessagePortError> {
|
||||
for js_transferable in &data.transferables {
|
||||
if let JsTransferable::MessagePort(id) = js_transferable {
|
||||
if *id == rid {
|
||||
return Err(type_error("Can not transfer self message port"));
|
||||
return Err(MessagePortError::TransferSelf);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let resource = state.resource_table.get::<MessagePortResource>(rid)?;
|
||||
let resource = state
|
||||
.resource_table
|
||||
.get::<MessagePortResource>(rid)
|
||||
.map_err(MessagePortError::Resource)?;
|
||||
resource.port.send(state, data)
|
||||
}
|
||||
|
||||
|
@ -220,7 +235,7 @@ pub fn op_message_port_post_message(
|
|||
pub async fn op_message_port_recv_message(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
#[smi] rid: ResourceId,
|
||||
) -> Result<Option<JsMessageData>, AnyError> {
|
||||
) -> Result<Option<JsMessageData>, MessagePortError> {
|
||||
let resource = {
|
||||
let state = state.borrow();
|
||||
match state.resource_table.get::<MessagePortResource>(rid) {
|
||||
|
@ -237,8 +252,11 @@ pub async fn op_message_port_recv_message(
|
|||
pub fn op_message_port_recv_message_sync(
|
||||
state: &mut OpState, // Rc<RefCell<OpState>>,
|
||||
#[smi] rid: ResourceId,
|
||||
) -> Result<Option<JsMessageData>, AnyError> {
|
||||
let resource = state.resource_table.get::<MessagePortResource>(rid)?;
|
||||
) -> Result<Option<JsMessageData>, MessagePortError> {
|
||||
let resource = state
|
||||
.resource_table
|
||||
.get::<MessagePortResource>(rid)
|
||||
.map_err(MessagePortError::Resource)?;
|
||||
let mut rx = resource.port.rx.borrow_mut();
|
||||
|
||||
match rx.try_recv() {
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
use bytes::BytesMut;
|
||||
use deno_core::error::type_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::external;
|
||||
use deno_core::op2;
|
||||
use deno_core::serde_v8::V8Slice;
|
||||
|
@ -18,6 +16,7 @@ use deno_core::RcRef;
|
|||
use deno_core::Resource;
|
||||
use deno_core::ResourceId;
|
||||
use futures::future::poll_fn;
|
||||
use futures::TryFutureExt;
|
||||
use std::borrow::Cow;
|
||||
use std::cell::RefCell;
|
||||
use std::cell::RefMut;
|
||||
|
@ -31,6 +30,14 @@ use std::task::Context;
|
|||
use std::task::Poll;
|
||||
use std::task::Waker;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum StreamResourceError {
|
||||
#[error(transparent)]
|
||||
Canceled(#[from] deno_core::Canceled),
|
||||
#[error("{0}")]
|
||||
Js(String),
|
||||
}
|
||||
|
||||
// How many buffers we'll allow in the channel before we stop allowing writes.
|
||||
const BUFFER_CHANNEL_SIZE: u16 = 1024;
|
||||
|
||||
|
@ -48,7 +55,7 @@ struct BoundedBufferChannelInner {
|
|||
buffers: [MaybeUninit<V8Slice<u8>>; BUFFER_CHANNEL_SIZE as _],
|
||||
ring_producer: u16,
|
||||
ring_consumer: u16,
|
||||
error: Option<AnyError>,
|
||||
error: Option<StreamResourceError>,
|
||||
current_size: usize,
|
||||
// TODO(mmastrac): we can math this field instead of accounting for it
|
||||
len: usize,
|
||||
|
@ -141,7 +148,10 @@ impl BoundedBufferChannelInner {
|
|||
self.len = 0;
|
||||
}
|
||||
|
||||
pub fn read(&mut self, limit: usize) -> Result<Option<BufView>, AnyError> {
|
||||
pub fn read(
|
||||
&mut self,
|
||||
limit: usize,
|
||||
) -> Result<Option<BufView>, StreamResourceError> {
|
||||
// Empty buffers will return the error, if one exists, or None
|
||||
if self.len == 0 {
|
||||
if let Some(error) = self.error.take() {
|
||||
|
@ -230,7 +240,7 @@ impl BoundedBufferChannelInner {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn write_error(&mut self, error: AnyError) {
|
||||
pub fn write_error(&mut self, error: StreamResourceError) {
|
||||
self.error = Some(error);
|
||||
if let Some(waker) = self.read_waker.take() {
|
||||
waker.wake();
|
||||
|
@ -306,7 +316,10 @@ impl BoundedBufferChannel {
|
|||
self.inner.borrow_mut()
|
||||
}
|
||||
|
||||
pub fn read(&self, limit: usize) -> Result<Option<BufView>, AnyError> {
|
||||
pub fn read(
|
||||
&self,
|
||||
limit: usize,
|
||||
) -> Result<Option<BufView>, StreamResourceError> {
|
||||
self.inner().read(limit)
|
||||
}
|
||||
|
||||
|
@ -314,7 +327,7 @@ impl BoundedBufferChannel {
|
|||
self.inner().write(buffer)
|
||||
}
|
||||
|
||||
pub fn write_error(&self, error: AnyError) {
|
||||
pub fn write_error(&self, error: StreamResourceError) {
|
||||
self.inner().write_error(error)
|
||||
}
|
||||
|
||||
|
@ -358,7 +371,10 @@ impl ReadableStreamResource {
|
|||
RcRef::map(self, |s| &s.cancel_handle).clone()
|
||||
}
|
||||
|
||||
async fn read(self: Rc<Self>, limit: usize) -> Result<BufView, AnyError> {
|
||||
async fn read(
|
||||
self: Rc<Self>,
|
||||
limit: usize,
|
||||
) -> Result<BufView, StreamResourceError> {
|
||||
let cancel_handle = self.cancel_handle();
|
||||
// Serialize all the reads using a task queue.
|
||||
let _read_permit = self.read_queue.acquire().await;
|
||||
|
@ -387,7 +403,7 @@ impl Resource for ReadableStreamResource {
|
|||
}
|
||||
|
||||
fn read(self: Rc<Self>, limit: usize) -> AsyncResult<BufView> {
|
||||
Box::pin(ReadableStreamResource::read(self, limit))
|
||||
Box::pin(ReadableStreamResource::read(self, limit).map_err(|e| e.into()))
|
||||
}
|
||||
|
||||
fn close(self: Rc<Self>) {
|
||||
|
@ -550,7 +566,7 @@ pub fn op_readable_stream_resource_write_error(
|
|||
) -> bool {
|
||||
let sender = get_sender(sender);
|
||||
// We can always write an error, no polling required
|
||||
sender.write_error(type_error(Cow::Owned(error)));
|
||||
sender.write_error(StreamResourceError::Js(error));
|
||||
!sender.closed()
|
||||
}
|
||||
|
||||
|
|
|
@ -28,6 +28,11 @@ use deno_kv::KvError;
|
|||
use deno_kv::KvMutationError;
|
||||
use deno_net::ops::NetError;
|
||||
use deno_tls::TlsError;
|
||||
use deno_web::BlobError;
|
||||
use deno_web::CompressionError;
|
||||
use deno_web::MessagePortError;
|
||||
use deno_web::StreamResourceError;
|
||||
use deno_web::WebError;
|
||||
use deno_webstorage::WebStorageError;
|
||||
use std::env;
|
||||
use std::error::Error;
|
||||
|
@ -169,6 +174,61 @@ pub fn get_nix_error_class(error: &nix::Error) -> &'static str {
|
|||
}
|
||||
}
|
||||
|
||||
fn get_web_error_class(e: &WebError) -> &'static str {
|
||||
match e {
|
||||
WebError::Base64Decode => "DOMExceptionInvalidCharacterError",
|
||||
WebError::InvalidEncodingLabel(_) => "RangeError",
|
||||
WebError::BufferTooLong => "TypeError",
|
||||
WebError::ValueTooLarge => "RangeError",
|
||||
WebError::BufferTooSmall => "RangeError",
|
||||
WebError::DataInvalid => "TypeError",
|
||||
WebError::DataError(_) => "Error",
|
||||
}
|
||||
}
|
||||
|
||||
fn get_web_compression_error_class(e: &CompressionError) -> &'static str {
|
||||
match e {
|
||||
CompressionError::UnsupportedFormat => "TypeError",
|
||||
CompressionError::ResourceClosed => "TypeError",
|
||||
CompressionError::IoTypeError(_) => "TypeError",
|
||||
CompressionError::Io(e) => get_io_error_class(e),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_web_message_port_error_class(e: &MessagePortError) -> &'static str {
|
||||
match e {
|
||||
MessagePortError::InvalidTransfer => "TypeError",
|
||||
MessagePortError::NotReady => "TypeError",
|
||||
MessagePortError::TransferSelf => "TypeError",
|
||||
MessagePortError::Canceled(e) => {
|
||||
let io_err: io::Error = e.to_owned().into();
|
||||
get_io_error_class(&io_err)
|
||||
}
|
||||
MessagePortError::Resource(e) => get_error_class_name(e).unwrap_or("Error"),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_web_stream_resource_error_class(
|
||||
e: &StreamResourceError,
|
||||
) -> &'static str {
|
||||
match e {
|
||||
StreamResourceError::Canceled(e) => {
|
||||
let io_err: io::Error = e.to_owned().into();
|
||||
get_io_error_class(&io_err)
|
||||
}
|
||||
StreamResourceError::Js(_) => "TypeError",
|
||||
}
|
||||
}
|
||||
|
||||
fn get_web_blob_error_class(e: &BlobError) -> &'static str {
|
||||
match e {
|
||||
BlobError::BlobPartNotFound => "TypeError",
|
||||
BlobError::SizeLargerThanBlobPart => "TypeError",
|
||||
BlobError::BlobURLsNotSupported => "TypeError",
|
||||
BlobError::Url(_) => "Error",
|
||||
}
|
||||
}
|
||||
|
||||
fn get_ffi_repr_error_class(e: &ReprError) -> &'static str {
|
||||
match e {
|
||||
ReprError::InvalidOffset => "TypeError",
|
||||
|
@ -382,8 +442,21 @@ fn get_net_map_error(error: &deno_net::io::MapError) -> &'static str {
|
|||
pub fn get_error_class_name(e: &AnyError) -> Option<&'static str> {
|
||||
deno_core::error::get_custom_error_class(e)
|
||||
.or_else(|| deno_webgpu::error::get_error_class_name(e))
|
||||
.or_else(|| deno_web::get_error_class_name(e))
|
||||
.or_else(|| deno_websocket::get_network_error_class_name(e))
|
||||
.or_else(|| e.downcast_ref::<WebError>().map(get_web_error_class))
|
||||
.or_else(|| {
|
||||
e.downcast_ref::<CompressionError>()
|
||||
.map(get_web_compression_error_class)
|
||||
})
|
||||
.or_else(|| {
|
||||
e.downcast_ref::<MessagePortError>()
|
||||
.map(get_web_message_port_error_class)
|
||||
})
|
||||
.or_else(|| {
|
||||
e.downcast_ref::<StreamResourceError>()
|
||||
.map(get_web_stream_resource_error_class)
|
||||
})
|
||||
.or_else(|| e.downcast_ref::<BlobError>().map(get_web_blob_error_class))
|
||||
.or_else(|| e.downcast_ref::<IRError>().map(|_| "TypeError"))
|
||||
.or_else(|| e.downcast_ref::<ReprError>().map(get_ffi_repr_error_class))
|
||||
.or_else(|| {
|
||||
|
|
|
@ -50,6 +50,7 @@ async fn op_worker_recv_message(
|
|||
.recv(state.clone())
|
||||
.or_cancel(handle.cancel)
|
||||
.await?
|
||||
.map_err(|e| e.into())
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
|
|
|
@ -134,7 +134,7 @@ pub fn op_worker_sync_fetch(
|
|||
|
||||
let mime_type = mime_type_essence(&blob.media_type);
|
||||
|
||||
let body = blob.read_all().await?;
|
||||
let body = blob.read_all().await;
|
||||
|
||||
(Bytes::from(body), Some(mime_type), script)
|
||||
}
|
||||
|
|
|
@ -359,7 +359,7 @@ async fn op_host_recv_message(
|
|||
}
|
||||
Ok(ret)
|
||||
}
|
||||
Ok(Err(err)) => Err(err),
|
||||
Ok(Err(err)) => Err(err.into()),
|
||||
Err(_) => {
|
||||
// The worker was terminated.
|
||||
Ok(None)
|
||||
|
|
Loading…
Reference in a new issue