1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-11-25 15:29:32 -05:00

refactor: asynchronous blob backing store (#10969)

Co-authored-by: Luca Casonato <hello@lcas.dev>
This commit is contained in:
Jimmy Wärting 2021-07-05 15:34:37 +02:00 committed by GitHub
parent ea87d860be
commit 2c0b0e45b7
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
28 changed files with 652 additions and 269 deletions

1
Cargo.lock generated
View file

@ -794,6 +794,7 @@ dependencies = [
name = "deno_web" name = "deno_web"
version = "0.41.1" version = "0.41.1"
dependencies = [ dependencies = [
"async-trait",
"base64 0.13.0", "base64 0.13.0",
"deno_core", "deno_core",
"encoding_rs", "encoding_rs",

View file

@ -19,7 +19,7 @@ use deno_core::futures;
use deno_core::futures::future::FutureExt; use deno_core::futures::future::FutureExt;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_runtime::deno_fetch::reqwest; use deno_runtime::deno_fetch::reqwest;
use deno_runtime::deno_web::BlobUrlStore; use deno_runtime::deno_web::BlobStore;
use deno_runtime::permissions::Permissions; use deno_runtime::permissions::Permissions;
use log::debug; use log::debug;
use log::info; use log::info;
@ -212,7 +212,7 @@ pub struct FileFetcher {
cache_setting: CacheSetting, cache_setting: CacheSetting,
http_cache: HttpCache, http_cache: HttpCache,
http_client: reqwest::Client, http_client: reqwest::Client,
blob_url_store: BlobUrlStore, blob_store: BlobStore,
} }
impl FileFetcher { impl FileFetcher {
@ -221,7 +221,7 @@ impl FileFetcher {
cache_setting: CacheSetting, cache_setting: CacheSetting,
allow_remote: bool, allow_remote: bool,
ca_data: Option<Vec<u8>>, ca_data: Option<Vec<u8>>,
blob_url_store: BlobUrlStore, blob_store: BlobStore,
) -> Result<Self, AnyError> { ) -> Result<Self, AnyError> {
Ok(Self { Ok(Self {
auth_tokens: AuthTokens::new(env::var(DENO_AUTH_TOKENS).ok()), auth_tokens: AuthTokens::new(env::var(DENO_AUTH_TOKENS).ok()),
@ -230,7 +230,7 @@ impl FileFetcher {
cache_setting, cache_setting,
http_cache, http_cache,
http_client: create_http_client(get_user_agent(), ca_data)?, http_client: create_http_client(get_user_agent(), ca_data)?,
blob_url_store, blob_store,
}) })
} }
@ -360,7 +360,7 @@ impl FileFetcher {
} }
/// Get a blob URL. /// Get a blob URL.
fn fetch_blob_url( async fn fetch_blob_url(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
) -> Result<File, AnyError> { ) -> Result<File, AnyError> {
@ -381,20 +381,24 @@ impl FileFetcher {
)); ));
} }
let blob_url_storage = self.blob_url_store.borrow(); let blob = {
let blob = blob_url_storage.get(specifier.clone())?.ok_or_else(|| { let blob_store = self.blob_store.borrow();
blob_store
.get_object_url(specifier.clone())?
.ok_or_else(|| {
custom_error( custom_error(
"NotFound", "NotFound",
format!("Blob URL not found: \"{}\".", specifier), format!("Blob URL not found: \"{}\".", specifier),
) )
})?; })?
};
let content_type = blob.media_type; let content_type = blob.media_type.clone();
let bytes = blob.read_all().await?;
let (media_type, maybe_charset) = let (media_type, maybe_charset) =
map_content_type(specifier, Some(content_type.clone())); map_content_type(specifier, Some(content_type.clone()));
let source = let source = strip_shebang(get_source_from_bytes(bytes, maybe_charset)?);
strip_shebang(get_source_from_bytes(blob.data, maybe_charset)?);
let local = let local =
self self
@ -525,7 +529,7 @@ impl FileFetcher {
} }
result result
} else if scheme == "blob" { } else if scheme == "blob" {
let result = self.fetch_blob_url(specifier); let result = self.fetch_blob_url(specifier).await;
if let Ok(file) = &result { if let Ok(file) = &result {
self.cache.insert(specifier.clone(), file.clone()); self.cache.insert(specifier.clone(), file.clone());
} }
@ -580,6 +584,7 @@ mod tests {
use deno_core::resolve_url; use deno_core::resolve_url;
use deno_core::resolve_url_or_path; use deno_core::resolve_url_or_path;
use deno_runtime::deno_web::Blob; use deno_runtime::deno_web::Blob;
use deno_runtime::deno_web::InMemoryBlobPart;
use std::rc::Rc; use std::rc::Rc;
use tempfile::TempDir; use tempfile::TempDir;
@ -588,28 +593,28 @@ mod tests {
maybe_temp_dir: Option<Rc<TempDir>>, maybe_temp_dir: Option<Rc<TempDir>>,
) -> (FileFetcher, Rc<TempDir>) { ) -> (FileFetcher, Rc<TempDir>) {
let (file_fetcher, temp_dir, _) = let (file_fetcher, temp_dir, _) =
setup_with_blob_url_store(cache_setting, maybe_temp_dir); setup_with_blob_store(cache_setting, maybe_temp_dir);
(file_fetcher, temp_dir) (file_fetcher, temp_dir)
} }
fn setup_with_blob_url_store( fn setup_with_blob_store(
cache_setting: CacheSetting, cache_setting: CacheSetting,
maybe_temp_dir: Option<Rc<TempDir>>, maybe_temp_dir: Option<Rc<TempDir>>,
) -> (FileFetcher, Rc<TempDir>, BlobUrlStore) { ) -> (FileFetcher, Rc<TempDir>, BlobStore) {
let temp_dir = maybe_temp_dir.unwrap_or_else(|| { let temp_dir = maybe_temp_dir.unwrap_or_else(|| {
Rc::new(TempDir::new().expect("failed to create temp directory")) Rc::new(TempDir::new().expect("failed to create temp directory"))
}); });
let location = temp_dir.path().join("deps"); let location = temp_dir.path().join("deps");
let blob_url_store = BlobUrlStore::default(); let blob_store = BlobStore::default();
let file_fetcher = FileFetcher::new( let file_fetcher = FileFetcher::new(
HttpCache::new(&location), HttpCache::new(&location),
cache_setting, cache_setting,
true, true,
None, None,
blob_url_store.clone(), blob_store.clone(),
) )
.expect("setup failed"); .expect("setup failed");
(file_fetcher, temp_dir, blob_url_store) (file_fetcher, temp_dir, blob_store)
} }
macro_rules! file_url { macro_rules! file_url {
@ -948,16 +953,18 @@ mod tests {
#[tokio::test] #[tokio::test]
async fn test_fetch_blob_url() { async fn test_fetch_blob_url() {
let (file_fetcher, _, blob_url_store) = let (file_fetcher, _, blob_store) =
setup_with_blob_url_store(CacheSetting::Use, None); setup_with_blob_store(CacheSetting::Use, None);
let specifier = blob_url_store.insert( let bytes =
Blob {
data:
"export const a = \"a\";\n\nexport enum A {\n A,\n B,\n C,\n}\n" "export const a = \"a\";\n\nexport enum A {\n A,\n B,\n C,\n}\n"
.as_bytes() .as_bytes()
.to_vec(), .to_vec();
let specifier = blob_store.insert_object_url(
Blob {
media_type: "application/typescript".to_string(), media_type: "application/typescript".to_string(),
parts: vec![Arc::new(Box::new(InMemoryBlobPart::from(bytes)))],
}, },
None, None,
); );
@ -1049,7 +1056,7 @@ mod tests {
CacheSetting::ReloadAll, CacheSetting::ReloadAll,
true, true,
None, None,
BlobUrlStore::default(), BlobStore::default(),
) )
.expect("setup failed"); .expect("setup failed");
let result = file_fetcher let result = file_fetcher
@ -1076,7 +1083,7 @@ mod tests {
CacheSetting::Use, CacheSetting::Use,
true, true,
None, None,
BlobUrlStore::default(), BlobStore::default(),
) )
.expect("could not create file fetcher"); .expect("could not create file fetcher");
let specifier = let specifier =
@ -1104,7 +1111,7 @@ mod tests {
CacheSetting::Use, CacheSetting::Use,
true, true,
None, None,
BlobUrlStore::default(), BlobStore::default(),
) )
.expect("could not create file fetcher"); .expect("could not create file fetcher");
let result = file_fetcher_02 let result = file_fetcher_02
@ -1265,7 +1272,7 @@ mod tests {
CacheSetting::Use, CacheSetting::Use,
true, true,
None, None,
BlobUrlStore::default(), BlobStore::default(),
) )
.expect("could not create file fetcher"); .expect("could not create file fetcher");
let specifier = let specifier =
@ -1296,7 +1303,7 @@ mod tests {
CacheSetting::Use, CacheSetting::Use,
true, true,
None, None,
BlobUrlStore::default(), BlobStore::default(),
) )
.expect("could not create file fetcher"); .expect("could not create file fetcher");
let result = file_fetcher_02 let result = file_fetcher_02
@ -1406,7 +1413,7 @@ mod tests {
CacheSetting::Use, CacheSetting::Use,
false, false,
None, None,
BlobUrlStore::default(), BlobStore::default(),
) )
.expect("could not create file fetcher"); .expect("could not create file fetcher");
let specifier = let specifier =
@ -1433,7 +1440,7 @@ mod tests {
CacheSetting::Only, CacheSetting::Only,
true, true,
None, None,
BlobUrlStore::default(), BlobStore::default(),
) )
.expect("could not create file fetcher"); .expect("could not create file fetcher");
let file_fetcher_02 = FileFetcher::new( let file_fetcher_02 = FileFetcher::new(
@ -1441,7 +1448,7 @@ mod tests {
CacheSetting::Use, CacheSetting::Use,
true, true,
None, None,
BlobUrlStore::default(), BlobStore::default(),
) )
.expect("could not create file fetcher"); .expect("could not create file fetcher");
let specifier = let specifier =

View file

@ -26,7 +26,7 @@ use deno_core::serde_json::json;
use deno_core::url::Position; use deno_core::url::Position;
use deno_core::url::Url; use deno_core::url::Url;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_runtime::deno_web::BlobUrlStore; use deno_runtime::deno_web::BlobStore;
use deno_runtime::permissions::Permissions; use deno_runtime::permissions::Permissions;
use log::error; use log::error;
use lspower::lsp; use lspower::lsp;
@ -264,7 +264,7 @@ impl Default for ModuleRegistry {
cache_setting, cache_setting,
true, true,
None, None,
BlobUrlStore::default(), BlobStore::default(),
) )
.unwrap(); .unwrap();
@ -283,7 +283,7 @@ impl ModuleRegistry {
CacheSetting::Use, CacheSetting::Use,
true, true,
None, None,
BlobUrlStore::default(), BlobStore::default(),
) )
.context("Error creating file fetcher in module registry.") .context("Error creating file fetcher in module registry.")
.unwrap(); .unwrap();

View file

@ -121,7 +121,7 @@ fn create_web_worker_callback(
ts_version: version::TYPESCRIPT.to_string(), ts_version: version::TYPESCRIPT.to_string(),
no_color: !colors::use_color(), no_color: !colors::use_color(),
get_error_class_fn: Some(&crate::errors::get_error_class_name), get_error_class_fn: Some(&crate::errors::get_error_class_name),
blob_url_store: program_state.blob_url_store.clone(), blob_store: program_state.blob_store.clone(),
broadcast_channel: program_state.broadcast_channel.clone(), broadcast_channel: program_state.broadcast_channel.clone(),
}; };
@ -207,7 +207,7 @@ pub fn create_main_worker(
.join("location_data") .join("location_data")
.join(checksum::gen(&[loc.to_string().as_bytes()])) .join(checksum::gen(&[loc.to_string().as_bytes()]))
}), }),
blob_url_store: program_state.blob_url_store.clone(), blob_store: program_state.blob_store.clone(),
broadcast_channel: program_state.broadcast_channel.clone(), broadcast_channel: program_state.broadcast_channel.clone(),
}; };

View file

@ -16,7 +16,7 @@ use crate::source_maps::SourceMapGetter;
use crate::specifier_handler::FetchHandler; use crate::specifier_handler::FetchHandler;
use crate::version; use crate::version;
use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel; use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel;
use deno_runtime::deno_web::BlobUrlStore; use deno_runtime::deno_web::BlobStore;
use deno_runtime::inspector_server::InspectorServer; use deno_runtime::inspector_server::InspectorServer;
use deno_runtime::permissions::Permissions; use deno_runtime::permissions::Permissions;
@ -53,7 +53,7 @@ pub struct ProgramState {
pub maybe_import_map: Option<ImportMap>, pub maybe_import_map: Option<ImportMap>,
pub maybe_inspector_server: Option<Arc<InspectorServer>>, pub maybe_inspector_server: Option<Arc<InspectorServer>>,
pub ca_data: Option<Vec<u8>>, pub ca_data: Option<Vec<u8>>,
pub blob_url_store: BlobUrlStore, pub blob_store: BlobStore,
pub broadcast_channel: InMemoryBroadcastChannel, pub broadcast_channel: InMemoryBroadcastChannel,
} }
@ -79,7 +79,7 @@ impl ProgramState {
CacheSetting::Use CacheSetting::Use
}; };
let blob_url_store = BlobUrlStore::default(); let blob_store = BlobStore::default();
let broadcast_channel = InMemoryBroadcastChannel::default(); let broadcast_channel = InMemoryBroadcastChannel::default();
let file_fetcher = FileFetcher::new( let file_fetcher = FileFetcher::new(
@ -87,7 +87,7 @@ impl ProgramState {
cache_usage, cache_usage,
!flags.no_remote, !flags.no_remote,
ca_data.clone(), ca_data.clone(),
blob_url_store.clone(), blob_store.clone(),
)?; )?;
let lockfile = if let Some(filename) = &flags.lock { let lockfile = if let Some(filename) = &flags.lock {
@ -146,7 +146,7 @@ impl ProgramState {
maybe_import_map, maybe_import_map,
maybe_inspector_server, maybe_inspector_server,
ca_data, ca_data,
blob_url_store, blob_store,
broadcast_channel, broadcast_channel,
}; };
Ok(Arc::new(program_state)) Ok(Arc::new(program_state))

View file

@ -574,7 +574,7 @@ pub mod tests {
use crate::file_fetcher::CacheSetting; use crate::file_fetcher::CacheSetting;
use crate::http_cache::HttpCache; use crate::http_cache::HttpCache;
use deno_core::resolve_url_or_path; use deno_core::resolve_url_or_path;
use deno_runtime::deno_web::BlobUrlStore; use deno_runtime::deno_web::BlobStore;
use tempfile::TempDir; use tempfile::TempDir;
macro_rules! map ( macro_rules! map (
@ -599,7 +599,7 @@ pub mod tests {
CacheSetting::Use, CacheSetting::Use,
true, true,
None, None,
BlobUrlStore::default(), BlobStore::default(),
) )
.expect("could not setup"); .expect("could not setup");
let disk_cache = deno_dir.gen_cache; let disk_cache = deno_dir.gen_cache;

View file

@ -24,7 +24,7 @@ use deno_core::ModuleLoader;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_core::OpState; use deno_core::OpState;
use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel; use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel;
use deno_runtime::deno_web::BlobUrlStore; use deno_runtime::deno_web::BlobStore;
use deno_runtime::permissions::Permissions; use deno_runtime::permissions::Permissions;
use deno_runtime::permissions::PermissionsOptions; use deno_runtime::permissions::PermissionsOptions;
use deno_runtime::worker::MainWorker; use deno_runtime::worker::MainWorker;
@ -213,7 +213,7 @@ pub async fn run(
let main_module = resolve_url(SPECIFIER)?; let main_module = resolve_url(SPECIFIER)?;
let program_state = ProgramState::build(flags).await?; let program_state = ProgramState::build(flags).await?;
let permissions = Permissions::from_options(&metadata.permissions); let permissions = Permissions::from_options(&metadata.permissions);
let blob_url_store = BlobUrlStore::default(); let blob_store = BlobStore::default();
let broadcast_channel = InMemoryBroadcastChannel::default(); let broadcast_channel = InMemoryBroadcastChannel::default();
let module_loader = Rc::new(EmbeddedModuleLoader(source_code)); let module_loader = Rc::new(EmbeddedModuleLoader(source_code));
let create_web_worker_cb = Arc::new(|_| { let create_web_worker_cb = Arc::new(|_| {
@ -246,7 +246,7 @@ pub async fn run(
get_error_class_fn: Some(&get_error_class_name), get_error_class_fn: Some(&get_error_class_name),
location: metadata.location, location: metadata.location,
origin_storage_dir: None, origin_storage_dir: None,
blob_url_store, blob_store,
broadcast_channel, broadcast_channel,
}; };
let mut worker = let mut worker =

View file

@ -0,0 +1,11 @@
// This test creates 1024 blobs of 128 MB each. This will only work if the blobs
// and their backing data is GCed as expected.
for (let i = 0; i < 1024; i++) {
// Create a 128MB byte array, and then a blob from it.
const buf = new Uint8Array(128 * 1024 * 1024);
new Blob([buf]);
// It is very important that there is a yield here, otherwise the finalizer
// for the blob is not called and the memory is not freed.
await new Promise((resolve) => setTimeout(resolve, 0));
}
console.log("GCed all blobs");

View file

@ -0,0 +1 @@
GCed all blobs

View file

@ -366,6 +366,12 @@ itest!(js_import_detect {
exit_code: 0, exit_code: 0,
}); });
itest!(blob_gc_finalization {
args: "run blob_gc_finalization.js",
output: "blob_gc_finalization.js.out",
exit_code: 0,
});
itest!(lock_write_requires_lock { itest!(lock_write_requires_lock {
args: "run --lock-write some_file.ts", args: "run --lock-write some_file.ts",
output: "lock_write_requires_lock.out", output: "lock_write_requires_lock.out",

View file

@ -821,7 +821,9 @@ unitTest(function responseRedirect(): void {
unitTest(async function responseWithoutBody(): Promise<void> { unitTest(async function responseWithoutBody(): Promise<void> {
const response = new Response(); const response = new Response();
assertEquals(await response.arrayBuffer(), new ArrayBuffer(0)); assertEquals(await response.arrayBuffer(), new ArrayBuffer(0));
assertEquals(await response.blob(), new Blob([])); const blob = await response.blob();
assertEquals(blob.size, 0);
assertEquals(await blob.arrayBuffer(), new ArrayBuffer(0));
assertEquals(await response.text(), ""); assertEquals(await response.text(), "");
await assertThrowsAsync(async () => { await assertThrowsAsync(async () => {
await response.json(); await response.json();

View file

@ -34,7 +34,8 @@ unitTest(async function responseBlob() {
assert(blobPromise instanceof Promise); assert(blobPromise instanceof Promise);
const blob = await blobPromise; const blob = await blobPromise;
assert(blob instanceof Blob); assert(blob instanceof Blob);
assertEquals(blob, new Blob([new Uint8Array([1, 2, 3])])); assertEquals(blob.size, 3);
assertEquals(await blob.arrayBuffer(), new Uint8Array([1, 2, 3]).buffer);
}); });
// TODO(lucacasonato): re-enable test once #10002 is fixed. // TODO(lucacasonato): re-enable test once #10002 is fixed.

View file

@ -152,16 +152,19 @@
if (req.body !== null) { if (req.body !== null) {
if (req.body.streamOrStatic instanceof ReadableStream) { if (req.body.streamOrStatic instanceof ReadableStream) {
if (req.body.length === null) { if (req.body.length === null || req.body.source instanceof Blob) {
reqBody = req.body.stream; reqBody = req.body.stream;
} else { } else {
const reader = req.body.stream.getReader(); const reader = req.body.stream.getReader();
const r1 = await reader.read(); const r1 = await reader.read();
if (r1.done) throw new TypeError("Unreachable"); if (r1.done) {
reqBody = new Uint8Array(0);
} else {
reqBody = r1.value; reqBody = r1.value;
const r2 = await reader.read(); const r2 = await reader.read();
if (!r2.done) throw new TypeError("Unreachable"); if (!r2.done) throw new TypeError("Unreachable");
} }
}
} else { } else {
req.body.streamOrStatic.consumed = true; req.body.streamOrStatic.consumed = true;
reqBody = req.body.streamOrStatic.body; reqBody = req.body.streamOrStatic.body;
@ -174,6 +177,7 @@
headers: req.headerList, headers: req.headerList,
clientRid: req.clientRid, clientRid: req.clientRid,
hasBody: reqBody !== null, hasBody: reqBody !== null,
bodyLength: req.body?.length,
}, reqBody instanceof Uint8Array ? reqBody : null); }, reqBody instanceof Uint8Array ? reqBody : null);
function onAbort() { function onAbort() {

View file

@ -26,7 +26,8 @@ use deno_core::ResourceId;
use deno_core::ZeroCopyBuf; use deno_core::ZeroCopyBuf;
use data_url::DataUrl; use data_url::DataUrl;
use deno_web::BlobUrlStore; use deno_web::BlobStore;
use http::header::CONTENT_LENGTH;
use reqwest::header::HeaderMap; use reqwest::header::HeaderMap;
use reqwest::header::HeaderName; use reqwest::header::HeaderName;
use reqwest::header::HeaderValue; use reqwest::header::HeaderValue;
@ -130,6 +131,7 @@ pub struct FetchArgs {
headers: Vec<(ByteString, ByteString)>, headers: Vec<(ByteString, ByteString)>,
client_rid: Option<u32>, client_rid: Option<u32>,
has_body: bool, has_body: bool,
body_length: Option<u64>,
} }
#[derive(Serialize)] #[derive(Serialize)]
@ -176,6 +178,14 @@ where
None => { None => {
// If no body is passed, we return a writer for streaming the body. // If no body is passed, we return a writer for streaming the body.
let (tx, rx) = mpsc::channel::<std::io::Result<Vec<u8>>>(1); let (tx, rx) = mpsc::channel::<std::io::Result<Vec<u8>>>(1);
// If the size of the body is known, we include a content-length
// header explicitly.
if let Some(body_size) = args.body_length {
request =
request.header(CONTENT_LENGTH, HeaderValue::from(body_size))
}
request = request.body(Body::wrap_stream(ReceiverStream::new(rx))); request = request.body(Body::wrap_stream(ReceiverStream::new(rx)));
let request_body_rid = let request_body_rid =
@ -207,7 +217,13 @@ where
let cancel_handle = CancelHandle::new_rc(); let cancel_handle = CancelHandle::new_rc();
let cancel_handle_ = cancel_handle.clone(); let cancel_handle_ = cancel_handle.clone();
let fut = async move { request.send().or_cancel(cancel_handle_).await }; let fut = async move {
request
.send()
.or_cancel(cancel_handle_)
.await
.map(|res| res.map_err(|err| type_error(err.to_string())))
};
let request_rid = state let request_rid = state
.resource_table .resource_table
@ -240,32 +256,49 @@ where
(request_rid, None, None) (request_rid, None, None)
} }
"blob" => { "blob" => {
let blob_url_storage = let blob_store = state.try_borrow::<BlobStore>().ok_or_else(|| {
state.try_borrow::<BlobUrlStore>().ok_or_else(|| {
type_error("Blob URLs are not supported in this context.") type_error("Blob URLs are not supported in this context.")
})?; })?;
let blob = blob_url_storage let blob = blob_store
.get(url)? .get_object_url(url)?
.ok_or_else(|| type_error("Blob for the given URL not found."))?; .ok_or_else(|| type_error("Blob for the given URL not found."))?;
if method != "GET" { if method != "GET" {
return Err(type_error("Blob URL fetch only supports GET method.")); return Err(type_error("Blob URL fetch only supports GET method."));
} }
let response = http::Response::builder() let cancel_handle = CancelHandle::new_rc();
.status(http::StatusCode::OK) let cancel_handle_ = cancel_handle.clone();
.header(http::header::CONTENT_LENGTH, blob.data.len())
.header(http::header::CONTENT_TYPE, blob.media_type)
.body(reqwest::Body::from(blob.data))?;
let fut = async move { Ok(Ok(Response::from(response))) }; let fut = async move {
// TODO(lucacsonato): this should be a stream!
let chunk = match blob.read_all().or_cancel(cancel_handle_).await? {
Ok(chunk) => chunk,
Err(err) => return Ok(Err(err)),
};
let res = http::Response::builder()
.status(http::StatusCode::OK)
.header(http::header::CONTENT_LENGTH, chunk.len())
.header(http::header::CONTENT_TYPE, blob.media_type.clone())
.body(reqwest::Body::from(chunk))
.map_err(|err| type_error(err.to_string()));
match res {
Ok(response) => Ok(Ok(Response::from(response))),
Err(err) => Ok(Err(err)),
}
};
let request_rid = state let request_rid = state
.resource_table .resource_table
.add(FetchRequestResource(Box::pin(fut))); .add(FetchRequestResource(Box::pin(fut)));
(request_rid, None, None) let cancel_handle_rid =
state.resource_table.add(FetchCancelHandle(cancel_handle));
(request_rid, None, Some(cancel_handle_rid))
} }
_ => return Err(type_error(format!("scheme '{}' not supported", scheme))), _ => return Err(type_error(format!("scheme '{}' not supported", scheme))),
}; };
@ -382,8 +415,7 @@ pub async fn op_fetch_response_read(
Ok(read) Ok(read)
} }
type CancelableResponseResult = type CancelableResponseResult = Result<Result<Response, AnyError>, Canceled>;
Result<Result<Response, reqwest::Error>, Canceled>;
struct FetchRequestResource( struct FetchRequestResource(
Pin<Box<dyn Future<Output = CancelableResponseResult>>>, Pin<Box<dyn Future<Output = CancelableResponseResult>>>,

View file

@ -140,7 +140,10 @@
if (innerResp.body !== null) { if (innerResp.body !== null) {
if (innerResp.body.unusable()) throw new TypeError("Body is unusable."); if (innerResp.body.unusable()) throw new TypeError("Body is unusable.");
if (innerResp.body.streamOrStatic instanceof ReadableStream) { if (innerResp.body.streamOrStatic instanceof ReadableStream) {
if (innerResp.body.length === null) { if (
innerResp.body.length === null ||
innerResp.body.source instanceof Blob
) {
respBody = innerResp.body.stream; respBody = innerResp.body.stream;
} else { } else {
const reader = innerResp.body.stream.getReader(); const reader = innerResp.body.stream.getReader();

View file

@ -3,13 +3,13 @@ use deno_core::Extension;
use deno_bench_util::bench_or_profile; use deno_bench_util::bench_or_profile;
use deno_bench_util::bencher::{benchmark_group, Bencher}; use deno_bench_util::bencher::{benchmark_group, Bencher};
use deno_bench_util::{bench_js_async, bench_js_sync}; use deno_bench_util::{bench_js_async, bench_js_sync};
use deno_web::BlobUrlStore; use deno_web::BlobStore;
fn setup() -> Vec<Extension> { fn setup() -> Vec<Extension> {
vec![ vec![
deno_webidl::init(), deno_webidl::init(),
deno_url::init(), deno_url::init(),
deno_web::init(BlobUrlStore::default(), None), deno_web::init(BlobStore::default(), None),
deno_timers::init::<deno_timers::NoTimersPermission>(), deno_timers::init::<deno_timers::NoTimersPermission>(),
Extension::builder() Extension::builder()
.js(vec![ .js(vec![

View file

@ -67,22 +67,11 @@
return result; return result;
} }
/** /** @param {(BlobReference | Blob)[]} parts */
* @param {...Uint8Array} bytesArrays async function* toIterator(parts) {
* @returns {Uint8Array} for (const part of parts) {
*/ yield* part.stream();
function concatUint8Arrays(...bytesArrays) {
let byteLength = 0;
for (const bytes of bytesArrays) {
byteLength += bytes.byteLength;
} }
const finalBytes = new Uint8Array(byteLength);
let current = 0;
for (const bytes of bytesArrays) {
finalBytes.set(bytes, current);
current += bytes.byteLength;
}
return finalBytes;
} }
/** @typedef {BufferSource | Blob | string} BlobPart */ /** @typedef {BufferSource | Blob | string} BlobPart */
@ -90,35 +79,39 @@
/** /**
* @param {BlobPart[]} parts * @param {BlobPart[]} parts
* @param {string} endings * @param {string} endings
* @returns {Uint8Array} * @returns {{ parts: (BlobReference|Blob)[], size: number }}
*/ */
function processBlobParts(parts, endings) { function processBlobParts(parts, endings) {
/** @type {Uint8Array[]} */ /** @type {(BlobReference|Blob)[]} */
const bytesArrays = []; const processedParts = [];
let size = 0;
for (const element of parts) { for (const element of parts) {
if (element instanceof ArrayBuffer) { if (element instanceof ArrayBuffer) {
bytesArrays.push(new Uint8Array(element.slice(0))); const chunk = new Uint8Array(element.slice(0));
processedParts.push(BlobReference.fromUint8Array(chunk));
size += element.byteLength;
} else if (ArrayBuffer.isView(element)) { } else if (ArrayBuffer.isView(element)) {
const buffer = element.buffer.slice( const chunk = new Uint8Array(
element.buffer,
element.byteOffset, element.byteOffset,
element.byteOffset + element.byteLength, element.byteLength,
); );
bytesArrays.push(new Uint8Array(buffer)); size += element.byteLength;
processedParts.push(BlobReference.fromUint8Array(chunk));
} else if (element instanceof Blob) { } else if (element instanceof Blob) {
bytesArrays.push( processedParts.push(element);
new Uint8Array(element[_byteSequence].buffer.slice(0)), size += element.size;
);
} else if (typeof element === "string") { } else if (typeof element === "string") {
let s = element; const chunk = core.encode(
if (endings == "native") { endings == "native" ? convertLineEndingsToNative(element) : element,
s = convertLineEndingsToNative(s); );
} size += chunk.byteLength;
bytesArrays.push(core.encode(s)); processedParts.push(BlobReference.fromUint8Array(chunk));
} else { } else {
throw new TypeError("Unreachable code (invalild element type)"); throw new TypeError("Unreachable code (invalid element type)");
} }
} }
return concatUint8Arrays(...bytesArrays); return { parts: processedParts, size };
} }
/** /**
@ -133,18 +126,30 @@
return normalizedType.toLowerCase(); return normalizedType.toLowerCase();
} }
const _byteSequence = Symbol("[[ByteSequence]]"); /**
* Get all Parts as a flat array containing all references
class Blob { * @param {Blob} blob
get [Symbol.toStringTag]() { * @param {string[]} bag
return "Blob"; * @returns {string[]}
*/
function getParts(blob, bag = []) {
for (const part of blob[_parts]) {
if (part instanceof Blob) {
getParts(part, bag);
} else {
bag.push(part._id);
}
}
return bag;
} }
/** @type {string} */ const _size = Symbol("Size");
#type; const _parts = Symbol("Parts");
/** @type {Uint8Array} */ class Blob {
[_byteSequence]; #type = "";
[_size] = 0;
[_parts];
/** /**
* @param {BlobPart[]} blobParts * @param {BlobPart[]} blobParts
@ -163,18 +168,20 @@
this[webidl.brand] = webidl.brand; this[webidl.brand] = webidl.brand;
/** @type {Uint8Array} */ const { parts, size } = processBlobParts(
this[_byteSequence] = processBlobParts(
blobParts, blobParts,
options.endings, options.endings,
); );
this[_parts] = parts;
this[_size] = size;
this.#type = normalizeType(options.type); this.#type = normalizeType(options.type);
} }
/** @returns {number} */ /** @returns {number} */
get size() { get size() {
webidl.assertBranded(this, Blob); webidl.assertBranded(this, Blob);
return this[_byteSequence].byteLength; return this[_size];
} }
/** @returns {string} */ /** @returns {string} */
@ -237,6 +244,36 @@
relativeEnd = Math.min(end, O.size); relativeEnd = Math.min(end, O.size);
} }
} }
const span = Math.max(relativeEnd - relativeStart, 0);
const blobParts = [];
let added = 0;
for (const part of this[_parts]) {
// don't add the overflow to new blobParts
if (added >= span) {
// Could maybe be possible to remove variable `added`
// and only use relativeEnd?
break;
}
const size = part.size;
if (relativeStart && size <= relativeStart) {
// Skip the beginning and change the relative
// start & end position as we skip the unwanted parts
relativeStart -= size;
relativeEnd -= size;
} else {
const chunk = part.slice(
relativeStart,
Math.min(part.size, relativeEnd),
);
added += chunk.size;
relativeEnd -= part.size;
blobParts.push(chunk);
relativeStart = 0; // All next sequential parts should start at 0
}
}
/** @type {string} */ /** @type {string} */
let relativeContentType; let relativeContentType;
if (contentType === undefined) { if (contentType === undefined) {
@ -244,9 +281,11 @@
} else { } else {
relativeContentType = normalizeType(contentType); relativeContentType = normalizeType(contentType);
} }
return new Blob([
O[_byteSequence].buffer.slice(relativeStart, relativeEnd), const blob = new Blob([], { type: relativeContentType });
], { type: relativeContentType }); blob[_parts] = blobParts;
blob[_size] = span;
return blob;
} }
/** /**
@ -254,14 +293,18 @@
*/ */
stream() { stream() {
webidl.assertBranded(this, Blob); webidl.assertBranded(this, Blob);
const bytes = this[_byteSequence]; const partIterator = toIterator(this[_parts]);
const stream = new ReadableStream({ const stream = new ReadableStream({
type: "bytes", type: "bytes",
/** @param {ReadableByteStreamController} controller */ /** @param {ReadableByteStreamController} controller */
start(controller) { async pull(controller) {
const chunk = new Uint8Array(bytes.buffer.slice(0)); while (true) {
if (chunk.byteLength > 0) controller.enqueue(chunk); const { value, done } = await partIterator.next();
controller.close(); if (done) return controller.close();
if (value.byteLength > 0) {
return controller.enqueue(value);
}
}
}, },
}); });
return stream; return stream;
@ -282,12 +325,22 @@
async arrayBuffer() { async arrayBuffer() {
webidl.assertBranded(this, Blob); webidl.assertBranded(this, Blob);
const stream = this.stream(); const stream = this.stream();
let bytes = new Uint8Array(); const bytes = new Uint8Array(this.size);
let offset = 0;
for await (const chunk of stream) { for await (const chunk of stream) {
bytes = concatUint8Arrays(bytes, chunk); bytes.set(chunk, offset);
offset += chunk.byteLength;
} }
return bytes.buffer; return bytes.buffer;
} }
get [Symbol.toStringTag]() {
return "Blob";
}
[Symbol.for("Deno.customInspect")](inspect) {
return `Blob ${inspect({ size: this.size, type: this.#type })}`;
}
} }
webidl.configurePrototype(Blob); webidl.configurePrototype(Blob);
@ -333,17 +386,13 @@
); );
const _Name = Symbol("[[Name]]"); const _Name = Symbol("[[Name]]");
const _LastModfied = Symbol("[[LastModified]]"); const _LastModified = Symbol("[[LastModified]]");
class File extends Blob { class File extends Blob {
get [Symbol.toStringTag]() {
return "File";
}
/** @type {string} */ /** @type {string} */
[_Name]; [_Name];
/** @type {number} */ /** @type {number} */
[_LastModfied]; [_LastModified];
/** /**
* @param {BlobPart[]} fileBits * @param {BlobPart[]} fileBits
@ -373,10 +422,10 @@
this[_Name] = fileName; this[_Name] = fileName;
if (options.lastModified === undefined) { if (options.lastModified === undefined) {
/** @type {number} */ /** @type {number} */
this[_LastModfied] = new Date().getTime(); this[_LastModified] = new Date().getTime();
} else { } else {
/** @type {number} */ /** @type {number} */
this[_LastModfied] = options.lastModified; this[_LastModified] = options.lastModified;
} }
} }
@ -389,7 +438,11 @@
/** @returns {number} */ /** @returns {number} */
get lastModified() { get lastModified() {
webidl.assertBranded(this, File); webidl.assertBranded(this, File);
return this[_LastModfied]; return this[_LastModified];
}
get [Symbol.toStringTag]() {
return "File";
} }
} }
@ -406,9 +459,80 @@
], ],
); );
// A finalization registry to deallocate a blob part when its JS reference is
// garbage collected.
const registry = new FinalizationRegistry((uuid) => {
core.opSync("op_blob_remove_part", uuid);
});
// TODO(lucacasonato): get a better stream from Rust in BlobReference#stream
/**
* An opaque reference to a blob part in Rust. This could be backed by a file,
* in memory storage, or something else.
*/
class BlobReference {
/**
* Don't use directly. Use `BlobReference.fromUint8Array`.
* @param {string} id
* @param {number} size
*/
constructor(id, size) {
this._id = id;
this.size = size;
registry.register(this, id);
}
/**
* Create a new blob part from a Uint8Array.
*
* @param {Uint8Array} data
* @returns {BlobReference}
*/
static fromUint8Array(data) {
const id = core.opSync("op_blob_create_part", data);
return new BlobReference(id, data.byteLength);
}
/**
* Create a new BlobReference by slicing this BlobReference. This is a copy
* free operation - the sliced reference will still reference the original
* underlying bytes.
*
* @param {number} start
* @param {number} end
* @returns {BlobReference}
*/
slice(start, end) {
const size = end - start;
const id = core.opSync("op_blob_slice_part", this._id, {
start,
len: size,
});
return new BlobReference(id, size);
}
/**
* Read the entire contents of the reference blob.
* @returns {AsyncGenerator<Uint8Array>}
*/
async *stream() {
yield core.opAsync("op_blob_read_part", this._id);
// let position = 0;
// const end = this.size;
// while (position !== end) {
// const size = Math.min(end - position, 65536);
// const chunk = this.slice(position, position + size);
// position += chunk.size;
// yield core.opAsync("op_blob_read_part", chunk._id);
// }
}
}
window.__bootstrap.file = { window.__bootstrap.file = {
getParts,
Blob, Blob,
_byteSequence,
File, File,
}; };
})(this); })(this);

View file

@ -15,7 +15,7 @@
((window) => { ((window) => {
const core = Deno.core; const core = Deno.core;
const webidl = window.__bootstrap.webidl; const webidl = window.__bootstrap.webidl;
const { _byteSequence } = window.__bootstrap.file; const { getParts } = window.__bootstrap.file;
const { URL } = window.__bootstrap.url; const { URL } = window.__bootstrap.url;
/** /**
@ -31,9 +31,9 @@
}); });
const url = core.opSync( const url = core.opSync(
"op_file_create_object_url", "op_blob_create_object_url",
blob.type, blob.type,
blob[_byteSequence], getParts(blob),
); );
return url; return url;
@ -51,10 +51,7 @@
prefix, prefix,
}); });
core.opSync( core.opSync("op_blob_revoke_object_url", url);
"op_file_revoke_object_url",
url,
);
} }
URL.createObjectURL = createObjectURL; URL.createObjectURL = createObjectURL;

View file

@ -154,6 +154,10 @@
this[_id] = null; this[_id] = null;
} }
} }
get [Symbol.toStringTag]() {
return "MessagePort";
}
} }
defineEventHandler(MessagePort.prototype, "message", function (self) { defineEventHandler(MessagePort.prototype, "message", function (self) {

View file

@ -14,6 +14,7 @@ repository = "https://github.com/denoland/deno"
path = "lib.rs" path = "lib.rs"
[dependencies] [dependencies]
async-trait = "0.1.50"
base64 = "0.13.0" base64 = "0.13.0"
deno_core = { version = "0.92.0", path = "../../core" } deno_core = { version = "0.92.0", path = "../../core" }
encoding_rs = "0.8.28" encoding_rs = "0.8.28"

265
extensions/web/blob.rs Normal file
View file

@ -0,0 +1,265 @@
use async_trait::async_trait;
use deno_core::error::type_error;
use deno_core::url::Url;
use deno_core::ZeroCopyBuf;
use serde::Deserialize;
use std::cell::RefCell;
use std::collections::HashMap;
use std::fmt::Debug;
use std::rc::Rc;
use std::sync::Arc;
use std::sync::Mutex;
use deno_core::error::AnyError;
use uuid::Uuid;
use crate::Location;
pub type PartMap = HashMap<Uuid, Arc<Box<dyn BlobPart + Send + Sync>>>;
#[derive(Clone, Default, Debug)]
pub struct BlobStore {
parts: Arc<Mutex<PartMap>>,
object_urls: Arc<Mutex<HashMap<Url, Arc<Blob>>>>,
}
impl BlobStore {
pub fn insert_part(&self, part: Box<dyn BlobPart + Send + Sync>) -> Uuid {
let id = Uuid::new_v4();
let mut parts = self.parts.lock().unwrap();
parts.insert(id, Arc::new(part));
id
}
pub fn get_part(
&self,
id: &Uuid,
) -> Option<Arc<Box<dyn BlobPart + Send + Sync>>> {
let parts = self.parts.lock().unwrap();
let part = parts.get(&id);
part.cloned()
}
pub fn remove_part(
&self,
id: &Uuid,
) -> Option<Arc<Box<dyn BlobPart + Send + Sync>>> {
let mut parts = self.parts.lock().unwrap();
parts.remove(&id)
}
pub fn get_object_url(
&self,
mut url: Url,
) -> Result<Option<Arc<Blob>>, AnyError> {
let blob_store = self.object_urls.lock().unwrap();
url.set_fragment(None);
Ok(blob_store.get(&url).cloned())
}
pub fn insert_object_url(
&self,
blob: Blob,
maybe_location: Option<Url>,
) -> Url {
let origin = if let Some(location) = maybe_location {
location.origin().ascii_serialization()
} else {
"null".to_string()
};
let id = Uuid::new_v4();
let url = Url::parse(&format!("blob:{}/{}", origin, id)).unwrap();
let mut blob_store = self.object_urls.lock().unwrap();
blob_store.insert(url.clone(), Arc::new(blob));
url
}
pub fn remove_object_url(&self, url: &Url) {
let mut blob_store = self.object_urls.lock().unwrap();
blob_store.remove(&url);
}
}
#[derive(Debug)]
pub struct Blob {
pub media_type: String,
pub parts: Vec<Arc<Box<dyn BlobPart + Send + Sync>>>,
}
impl Blob {
// TODO(lucacsonato): this should be a stream!
pub async fn read_all(&self) -> Result<Vec<u8>, AnyError> {
let size = self.size();
let mut bytes = Vec::with_capacity(size);
for part in &self.parts {
let chunk = part.read().await?;
bytes.extend_from_slice(chunk);
}
assert_eq!(bytes.len(), size);
Ok(bytes)
}
fn size(&self) -> usize {
let mut total = 0;
for part in &self.parts {
total += part.size()
}
total
}
}
#[async_trait]
pub trait BlobPart: Debug {
// TODO(lucacsonato): this should be a stream!
async fn read(&self) -> Result<&[u8], AnyError>;
fn size(&self) -> usize;
}
#[derive(Debug)]
pub struct InMemoryBlobPart(Vec<u8>);
impl From<Vec<u8>> for InMemoryBlobPart {
fn from(vec: Vec<u8>) -> Self {
Self(vec)
}
}
#[async_trait]
impl BlobPart for InMemoryBlobPart {
async fn read(&self) -> Result<&[u8], AnyError> {
Ok(&self.0)
}
fn size(&self) -> usize {
self.0.len()
}
}
#[derive(Debug)]
pub struct SlicedBlobPart {
part: Arc<Box<dyn BlobPart + Send + Sync>>,
start: usize,
len: usize,
}
#[async_trait]
impl BlobPart for SlicedBlobPart {
async fn read(&self) -> Result<&[u8], AnyError> {
let original = self.part.read().await?;
Ok(&original[self.start..self.start + self.len])
}
fn size(&self) -> usize {
self.len
}
}
pub fn op_blob_create_part(
state: &mut deno_core::OpState,
data: ZeroCopyBuf,
_: (),
) -> Result<Uuid, AnyError> {
let blob_store = state.borrow::<BlobStore>();
let part = InMemoryBlobPart(data.to_vec());
let id = blob_store.insert_part(Box::new(part));
Ok(id)
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct SliceOptions {
start: usize,
len: usize,
}
pub fn op_blob_slice_part(
state: &mut deno_core::OpState,
id: Uuid,
options: SliceOptions,
) -> Result<Uuid, AnyError> {
let blob_store = state.borrow::<BlobStore>();
let part = blob_store
.get_part(&id)
.ok_or_else(|| type_error("Blob part not found"))?;
let SliceOptions { start, len } = options;
let size = part.size();
if start + len > size {
return Err(type_error(
"start + len can not be larger than blob part size",
));
}
let sliced_part = SlicedBlobPart { part, start, len };
let id = blob_store.insert_part(Box::new(sliced_part));
Ok(id)
}
pub async fn op_blob_read_part(
state: Rc<RefCell<deno_core::OpState>>,
id: Uuid,
_: (),
) -> Result<ZeroCopyBuf, AnyError> {
let part = {
let state = state.borrow();
let blob_store = state.borrow::<BlobStore>();
blob_store.get_part(&id)
}
.ok_or_else(|| type_error("Blob part not found"))?;
let buf = part.read().await?;
Ok(ZeroCopyBuf::from(buf.to_vec()))
}
pub fn op_blob_remove_part(
state: &mut deno_core::OpState,
id: Uuid,
_: (),
) -> Result<(), AnyError> {
let blob_store = state.borrow::<BlobStore>();
blob_store.remove_part(&id);
Ok(())
}
pub fn op_blob_create_object_url(
state: &mut deno_core::OpState,
media_type: String,
part_ids: Vec<Uuid>,
) -> Result<String, AnyError> {
let mut parts = Vec::with_capacity(part_ids.len());
let blob_store = state.borrow::<BlobStore>();
for part_id in part_ids {
let part = blob_store
.get_part(&part_id)
.ok_or_else(|| type_error("Blob part not found"))?;
parts.push(part);
}
let blob = Blob { media_type, parts };
let maybe_location = state.try_borrow::<Location>();
let blob_store = state.borrow::<BlobStore>();
let url = blob_store
.insert_object_url(blob, maybe_location.map(|location| location.0.clone()));
Ok(url.to_string())
}
pub fn op_blob_revoke_object_url(
state: &mut deno_core::OpState,
url: String,
_: (),
) -> Result<(), AnyError> {
let url = Url::parse(&url)?;
let blob_store = state.borrow::<BlobStore>();
blob_store.remove_object_url(&url);
Ok(())
}

View file

@ -73,13 +73,9 @@ declare namespace globalThis {
}; };
declare var file: { declare var file: {
Blob: typeof Blob & { getParts(blob: Blob): string[];
[globalThis.__bootstrap.file._byteSequence]: Uint8Array; Blob: typeof Blob;
}; File: typeof File;
readonly _byteSequence: unique symbol;
File: typeof File & {
[globalThis.__bootstrap.file._byteSequence]: Uint8Array;
};
}; };
declare var streams: { declare var streams: {

View file

@ -1,13 +1,9 @@
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
mod blob;
mod message_port; mod message_port;
pub use crate::message_port::create_entangled_message_port;
pub use crate::message_port::JsMessageData;
pub use crate::message_port::MessagePort;
use deno_core::error::bad_resource_id; use deno_core::error::bad_resource_id;
use deno_core::error::null_opbuf;
use deno_core::error::range_error; use deno_core::error::range_error;
use deno_core::error::type_error; use deno_core::error::type_error;
use deno_core::error::AnyError; use deno_core::error::AnyError;
@ -16,7 +12,6 @@ use deno_core::op_async;
use deno_core::op_sync; use deno_core::op_sync;
use deno_core::url::Url; use deno_core::url::Url;
use deno_core::Extension; use deno_core::Extension;
use deno_core::ModuleSpecifier;
use deno_core::OpState; use deno_core::OpState;
use deno_core::Resource; use deno_core::Resource;
use deno_core::ResourceId; use deno_core::ResourceId;
@ -29,23 +24,30 @@ use serde::Deserialize;
use serde::Serialize; use serde::Serialize;
use std::borrow::Cow; use std::borrow::Cow;
use std::cell::RefCell; use std::cell::RefCell;
use std::collections::HashMap;
use std::fmt; use std::fmt;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc;
use std::sync::Mutex;
use std::usize; use std::usize;
use uuid::Uuid;
use crate::blob::op_blob_create_object_url;
use crate::blob::op_blob_create_part;
use crate::blob::op_blob_read_part;
use crate::blob::op_blob_remove_part;
use crate::blob::op_blob_revoke_object_url;
use crate::blob::op_blob_slice_part;
pub use crate::blob::Blob;
pub use crate::blob::BlobPart;
pub use crate::blob::BlobStore;
pub use crate::blob::InMemoryBlobPart;
pub use crate::message_port::create_entangled_message_port;
use crate::message_port::op_message_port_create_entangled; use crate::message_port::op_message_port_create_entangled;
use crate::message_port::op_message_port_post_message; use crate::message_port::op_message_port_post_message;
use crate::message_port::op_message_port_recv_message; use crate::message_port::op_message_port_recv_message;
pub use crate::message_port::JsMessageData;
pub use crate::message_port::MessagePort;
/// Load and execute the javascript code. /// Load and execute the javascript code.
pub fn init( pub fn init(blob_store: BlobStore, maybe_location: Option<Url>) -> Extension {
blob_url_store: BlobUrlStore,
maybe_location: Option<Url>,
) -> Extension {
Extension::builder() Extension::builder()
.js(include_js_files!( .js(include_js_files!(
prefix "deno:extensions/web", prefix "deno:extensions/web",
@ -75,13 +77,17 @@ pub fn init(
("op_encoding_new_decoder", op_sync(op_encoding_new_decoder)), ("op_encoding_new_decoder", op_sync(op_encoding_new_decoder)),
("op_encoding_decode", op_sync(op_encoding_decode)), ("op_encoding_decode", op_sync(op_encoding_decode)),
("op_encoding_encode_into", op_sync(op_encoding_encode_into)), ("op_encoding_encode_into", op_sync(op_encoding_encode_into)),
("op_blob_create_part", op_sync(op_blob_create_part)),
("op_blob_slice_part", op_sync(op_blob_slice_part)),
("op_blob_read_part", op_async(op_blob_read_part)),
("op_blob_remove_part", op_sync(op_blob_remove_part)),
( (
"op_file_create_object_url", "op_blob_create_object_url",
op_sync(op_file_create_object_url), op_sync(op_blob_create_object_url),
), ),
( (
"op_file_revoke_object_url", "op_blob_revoke_object_url",
op_sync(op_file_revoke_object_url), op_sync(op_blob_revoke_object_url),
), ),
( (
"op_message_port_create_entangled", "op_message_port_create_entangled",
@ -97,7 +103,7 @@ pub fn init(
), ),
]) ])
.state(move |state| { .state(move |state| {
state.put(blob_url_store.clone()); state.put(blob_store.clone());
if let Some(location) = maybe_location.clone() { if let Some(location) = maybe_location.clone() {
state.put(Location(location)); state.put(Location(location));
} }
@ -381,73 +387,4 @@ pub fn get_error_class_name(e: &AnyError) -> Option<&'static str> {
.map(|_| "DOMExceptionInvalidCharacterError") .map(|_| "DOMExceptionInvalidCharacterError")
}) })
} }
#[derive(Debug, Clone)]
pub struct Blob {
pub data: Vec<u8>,
pub media_type: String,
}
pub struct Location(pub Url); pub struct Location(pub Url);
#[derive(Debug, Default, Clone)]
pub struct BlobUrlStore(Arc<Mutex<HashMap<Url, Blob>>>);
impl BlobUrlStore {
pub fn get(&self, mut url: Url) -> Result<Option<Blob>, AnyError> {
let blob_store = self.0.lock().unwrap();
url.set_fragment(None);
Ok(blob_store.get(&url).cloned())
}
pub fn insert(&self, blob: Blob, maybe_location: Option<Url>) -> Url {
let origin = if let Some(location) = maybe_location {
location.origin().ascii_serialization()
} else {
"null".to_string()
};
let id = Uuid::new_v4();
let url = Url::parse(&format!("blob:{}/{}", origin, id)).unwrap();
let mut blob_store = self.0.lock().unwrap();
blob_store.insert(url.clone(), blob);
url
}
pub fn remove(&self, url: &ModuleSpecifier) {
let mut blob_store = self.0.lock().unwrap();
blob_store.remove(&url);
}
}
pub fn op_file_create_object_url(
state: &mut deno_core::OpState,
media_type: String,
zero_copy: Option<ZeroCopyBuf>,
) -> Result<String, AnyError> {
let data = zero_copy.ok_or_else(null_opbuf)?;
let blob = Blob {
data: data.to_vec(),
media_type,
};
let maybe_location = state.try_borrow::<Location>();
let blob_store = state.borrow::<BlobUrlStore>();
let url =
blob_store.insert(blob, maybe_location.map(|location| location.0.clone()));
Ok(url.to_string())
}
pub fn op_file_revoke_object_url(
state: &mut deno_core::OpState,
url: String,
_: (),
) -> Result<(), AnyError> {
let url = Url::parse(&url)?;
let blob_store = state.borrow::<BlobUrlStore>();
blob_store.remove(&url);
Ok(())
}

View file

@ -41,7 +41,7 @@ fn create_runtime_snapshot(snapshot_path: &Path, files: Vec<PathBuf>) {
deno_webidl::init(), deno_webidl::init(),
deno_console::init(), deno_console::init(),
deno_url::init(), deno_url::init(),
deno_web::init(Default::default(), Default::default()), deno_web::init(deno_web::BlobStore::default(), Default::default()),
deno_fetch::init::<deno_fetch::NoFetchPermissions>( deno_fetch::init::<deno_fetch::NoFetchPermissions>(
"".to_owned(), "".to_owned(),
None, None,

View file

@ -3,7 +3,7 @@
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::FsModuleLoader; use deno_core::FsModuleLoader;
use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel; use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel;
use deno_runtime::deno_web::BlobUrlStore; use deno_runtime::deno_web::BlobStore;
use deno_runtime::permissions::Permissions; use deno_runtime::permissions::Permissions;
use deno_runtime::worker::MainWorker; use deno_runtime::worker::MainWorker;
use deno_runtime::worker::WorkerOptions; use deno_runtime::worker::WorkerOptions;
@ -41,7 +41,7 @@ async fn main() -> Result<(), AnyError> {
get_error_class_fn: Some(&get_error_class_name), get_error_class_fn: Some(&get_error_class_name),
location: None, location: None,
origin_storage_dir: None, origin_storage_dir: None,
blob_url_store: BlobUrlStore::default(), blob_store: BlobStore::default(),
broadcast_channel: InMemoryBroadcastChannel::default(), broadcast_channel: InMemoryBroadcastChannel::default(),
}; };

View file

@ -29,7 +29,7 @@ use deno_core::ModuleLoader;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_core::RuntimeOptions; use deno_core::RuntimeOptions;
use deno_web::create_entangled_message_port; use deno_web::create_entangled_message_port;
use deno_web::BlobUrlStore; use deno_web::BlobStore;
use deno_web::MessagePort; use deno_web::MessagePort;
use log::debug; use log::debug;
use std::cell::RefCell; use std::cell::RefCell;
@ -267,7 +267,7 @@ pub struct WebWorkerOptions {
/// Sets `Deno.noColor` in JS runtime. /// Sets `Deno.noColor` in JS runtime.
pub no_color: bool, pub no_color: bool,
pub get_error_class_fn: Option<GetErrorClassFn>, pub get_error_class_fn: Option<GetErrorClassFn>,
pub blob_url_store: BlobUrlStore, pub blob_store: BlobStore,
pub broadcast_channel: InMemoryBroadcastChannel, pub broadcast_channel: InMemoryBroadcastChannel,
} }
@ -294,7 +294,7 @@ impl WebWorker {
deno_webidl::init(), deno_webidl::init(),
deno_console::init(), deno_console::init(),
deno_url::init(), deno_url::init(),
deno_web::init(options.blob_url_store.clone(), Some(main_module.clone())), deno_web::init(options.blob_store.clone(), Some(main_module.clone())),
deno_fetch::init::<Permissions>( deno_fetch::init::<Permissions>(
options.user_agent.clone(), options.user_agent.clone(),
options.ca_data.clone(), options.ca_data.clone(),

View file

@ -22,7 +22,7 @@ use deno_core::ModuleId;
use deno_core::ModuleLoader; use deno_core::ModuleLoader;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_core::RuntimeOptions; use deno_core::RuntimeOptions;
use deno_web::BlobUrlStore; use deno_web::BlobStore;
use log::debug; use log::debug;
use std::env; use std::env;
use std::pin::Pin; use std::pin::Pin;
@ -68,7 +68,7 @@ pub struct WorkerOptions {
pub get_error_class_fn: Option<GetErrorClassFn>, pub get_error_class_fn: Option<GetErrorClassFn>,
pub location: Option<Url>, pub location: Option<Url>,
pub origin_storage_dir: Option<std::path::PathBuf>, pub origin_storage_dir: Option<std::path::PathBuf>,
pub blob_url_store: BlobUrlStore, pub blob_store: BlobStore,
pub broadcast_channel: InMemoryBroadcastChannel, pub broadcast_channel: InMemoryBroadcastChannel,
} }
@ -94,7 +94,7 @@ impl MainWorker {
deno_webidl::init(), deno_webidl::init(),
deno_console::init(), deno_console::init(),
deno_url::init(), deno_url::init(),
deno_web::init(options.blob_url_store.clone(), options.location.clone()), deno_web::init(options.blob_store.clone(), options.location.clone()),
deno_fetch::init::<Permissions>( deno_fetch::init::<Permissions>(
options.user_agent.clone(), options.user_agent.clone(),
options.ca_data.clone(), options.ca_data.clone(),
@ -298,7 +298,7 @@ mod tests {
get_error_class_fn: None, get_error_class_fn: None,
location: None, location: None,
origin_storage_dir: None, origin_storage_dir: None,
blob_url_store: BlobUrlStore::default(), blob_store: BlobStore::default(),
broadcast_channel: InMemoryBroadcastChannel::default(), broadcast_channel: InMemoryBroadcastChannel::default(),
}; };

View file

@ -986,16 +986,7 @@
"filereader_readAsDataURL.any.html": true, "filereader_readAsDataURL.any.html": true,
"filereader_readAsText.any.html": true, "filereader_readAsText.any.html": true,
"filereader_readystate.any.html": true, "filereader_readystate.any.html": true,
"filereader_result.any.html": [ "filereader_result.any.html": true
"result is null during \"loadstart\" event for readAsText",
"result is null during \"loadstart\" event for readAsDataURL",
"result is null during \"loadstart\" event for readAsArrayBuffer",
"result is null during \"loadstart\" event for readAsBinaryString",
"result is null during \"progress\" event for readAsText",
"result is null during \"progress\" event for readAsDataURL",
"result is null during \"progress\" event for readAsArrayBuffer",
"result is null during \"progress\" event for readAsBinaryString"
]
} }
}, },
"html": { "html": {