1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2025-01-11 08:33:43 -05:00

fix(runtime): event loop panics in classic workers (#11756)

Classic worker scripts are now executed in the context of a Tokio
runtime. This does mean we can not spawn more tokio runtimes in
"op_worker_sync_fetch". We instead spawn a new thread there, that can
create a new Tokio runtime that we can use to block the worker thread.
This commit is contained in:
Luca Casonato 2021-08-18 15:19:22 +02:00 committed by GitHub
parent 480cfda8d5
commit a66218d457
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 144 additions and 131 deletions

View file

@ -1109,6 +1109,13 @@ itest!(import_file_with_colon {
output: "import_file_with_colon.ts.out",
http_server: true,
});
itest!(classic_workers_event_loop {
args:
"run --enable-testing-features-do-not-use classic_workers_event_loop.js",
output: "classic_workers_event_loop.js.out",
});
// FIXME(bartlomieju): disabled, because this test is very flaky on CI
// itest!(local_sources_not_cached_in_memory {
// args: "run --allow-read --allow-write no_mem_cache.js",

View file

@ -0,0 +1,4 @@
new Worker(
"data:application/javascript,setTimeout(() => {console.log('done'); self.close()}, 1000)",
{ type: "classic" },
);

View file

@ -0,0 +1 @@
done

View file

@ -38,118 +38,124 @@ pub fn op_worker_sync_fetch(
let handle = state.borrow::<WebWorkerInternalHandle>().clone();
assert_eq!(handle.worker_type, WebWorkerType::Classic);
// TODO(andreubotella) Make the runtime into a resource and add a new op to
// block on each request, so a script can run while the next loads.
let runtime = tokio::runtime::Builder::new_current_thread()
.enable_io()
.enable_time()
.build()
.unwrap();
// TODO(andreubotella) It's not good to throw an exception related to blob
// URLs when none of the script URLs use the blob scheme.
// Also, in which contexts are blob URLs not supported?
let blob_store = state.try_borrow::<BlobStore>().ok_or_else(|| {
type_error("Blob URLs are not supported in this context.")
})?;
let blob_store = state
.try_borrow::<BlobStore>()
.ok_or_else(|| type_error("Blob URLs are not supported in this context."))?
.clone();
let handles: Vec<_> = scripts
.into_iter()
.map(|script| -> JoinHandle<Result<SyncFetchScript, AnyError>> {
let blob_store = blob_store.clone();
runtime.spawn(async move {
let script_url =
Url::parse(&script).map_err(|_| type_error("Invalid script URL"))?;
// TODO(andreubotella): make the below thread into a resource that can be
// re-used. This would allow parallel fecthing of multiple scripts.
let (body, mime_type, res_url) = match script_url.scheme() {
"http" | "https" => {
let resp = reqwest::get(script_url).await?.error_for_status()?;
let thread = std::thread::spawn(move || {
let runtime = tokio::runtime::Builder::new_current_thread()
.enable_io()
.enable_time()
.build()
.unwrap();
let res_url = resp.url().to_string();
let handles: Vec<_> = scripts
.into_iter()
.map(|script| -> JoinHandle<Result<SyncFetchScript, AnyError>> {
let blob_store = blob_store.clone();
runtime.spawn(async move {
let script_url = Url::parse(&script)
.map_err(|_| type_error("Invalid script URL"))?;
// TODO(andreubotella) Properly run fetch's "extract a MIME type".
let mime_type = resp
.headers()
.get("Content-Type")
.and_then(|v| v.to_str().ok())
.map(mime_type_essence);
let (body, mime_type, res_url) = match script_url.scheme() {
"http" | "https" => {
let resp = reqwest::get(script_url).await?.error_for_status()?;
// Always check the MIME type with HTTP(S).
loose_mime_checks = false;
let res_url = resp.url().to_string();
let body = resp.bytes().await?;
// TODO(andreubotella) Properly run fetch's "extract a MIME type".
let mime_type = resp
.headers()
.get("Content-Type")
.and_then(|v| v.to_str().ok())
.map(mime_type_essence);
(body, mime_type, res_url)
}
"data" => {
let data_url = DataUrl::process(&script)
.map_err(|e| type_error(format!("{:?}", e)))?;
// Always check the MIME type with HTTP(S).
loose_mime_checks = false;
let mime_type = {
let mime = data_url.mime_type();
format!("{}/{}", mime.type_, mime.subtype)
};
let body = resp.bytes().await?;
let (body, _) = data_url
.decode_to_vec()
.map_err(|e| type_error(format!("{:?}", e)))?;
(Bytes::from(body), Some(mime_type), script)
}
"blob" => {
let blob = blob_store
.get_object_url(script_url)?
.ok_or_else(|| type_error("Blob for the given URL not found."))?;
let mime_type = mime_type_essence(&blob.media_type);
let body = blob.read_all().await?;
(Bytes::from(body), Some(mime_type), script)
}
_ => {
return Err(type_error(format!(
"Classic scripts with scheme {}: are not supported in workers.",
script_url.scheme()
)))
}
};
if !loose_mime_checks {
// TODO(andreubotella) Check properly for a Javascript MIME type.
match mime_type.as_deref() {
Some("application/javascript" | "text/javascript") => {}
Some(mime_type) => {
return Err(
DomExceptionNetworkError {
msg: format!("Invalid MIME type {:?}.", mime_type),
}
.into(),
)
(body, mime_type, res_url)
}
None => {
return Err(
DomExceptionNetworkError::new("Missing MIME type.").into(),
)
"data" => {
let data_url = DataUrl::process(&script)
.map_err(|e| type_error(format!("{:?}", e)))?;
let mime_type = {
let mime = data_url.mime_type();
format!("{}/{}", mime.type_, mime.subtype)
};
let (body, _) = data_url
.decode_to_vec()
.map_err(|e| type_error(format!("{:?}", e)))?;
(Bytes::from(body), Some(mime_type), script)
}
"blob" => {
let blob =
blob_store.get_object_url(script_url)?.ok_or_else(|| {
type_error("Blob for the given URL not found.")
})?;
let mime_type = mime_type_essence(&blob.media_type);
let body = blob.read_all().await?;
(Bytes::from(body), Some(mime_type), script)
}
_ => {
return Err(type_error(format!(
"Classic scripts with scheme {}: are not supported in workers.",
script_url.scheme()
)))
}
};
if !loose_mime_checks {
// TODO(andreubotella) Check properly for a Javascript MIME type.
match mime_type.as_deref() {
Some("application/javascript" | "text/javascript") => {}
Some(mime_type) => {
return Err(
DomExceptionNetworkError {
msg: format!("Invalid MIME type {:?}.", mime_type),
}
.into(),
)
}
None => {
return Err(
DomExceptionNetworkError::new("Missing MIME type.").into(),
)
}
}
}
}
let (text, _) = encoding_rs::UTF_8.decode_with_bom_removal(&body);
let (text, _) = encoding_rs::UTF_8.decode_with_bom_removal(&body);
Ok(SyncFetchScript {
url: res_url,
script: text.into_owned(),
Ok(SyncFetchScript {
url: res_url,
script: text.into_owned(),
})
})
})
})
.collect();
.collect();
let mut ret = Vec::with_capacity(handles.len());
for handle in handles {
let script = runtime.block_on(handle)??;
ret.push(script);
}
Ok(ret)
let mut ret = Vec::with_capacity(handles.len());
for handle in handles {
let script = runtime.block_on(handle)??;
ret.push(script);
}
Ok(ret)
});
thread.join().unwrap()
}

View file

@ -11,7 +11,6 @@ use deno_core::error::AnyError;
use deno_core::error::JsError;
use deno_core::futures::channel::mpsc;
use deno_core::futures::future::poll_fn;
use deno_core::futures::future::FutureExt;
use deno_core::futures::stream::StreamExt;
use deno_core::located_script_name;
use deno_core::serde::Deserialize;
@ -568,36 +567,38 @@ pub fn run_web_worker(
// TODO(bartlomieju): run following block using "select!"
// with terminate
// Execute provided source code immediately
let result = if let Some(source_code) = maybe_source_code {
worker.execute_script(&located_script_name!(), &source_code)
} else {
// TODO(bartlomieju): add "type": "classic", ie. ability to load
// script instead of module
let load_future = worker.execute_module(&specifier).boxed_local();
let fut = async move {
// Execute provided source code immediately
let result = if let Some(source_code) = maybe_source_code {
worker.execute_script(&located_script_name!(), &source_code)
} else {
// TODO(bartlomieju): add "type": "classic", ie. ability to load
// script instead of module
worker.execute_module(&specifier).await
};
rt.block_on(load_future)
let internal_handle = worker.internal_handle.clone();
// If sender is closed it means that worker has already been closed from
// within using "globalThis.close()"
if internal_handle.is_terminated() {
return Ok(());
}
if let Err(e) = result {
print_worker_error(e.to_string(), &name);
internal_handle
.post_event(WorkerControlEvent::TerminalError(e))
.expect("Failed to post message to host");
// Failure to execute script is a terminal error, bye, bye.
return Ok(());
}
let result = worker.run_event_loop(true).await;
debug!("Worker thread shuts down {}", &name);
result
};
let internal_handle = worker.internal_handle.clone();
// If sender is closed it means that worker has already been closed from
// within using "globalThis.close()"
if internal_handle.is_terminated() {
return Ok(());
}
if let Err(e) = result {
print_worker_error(e.to_string(), &name);
internal_handle
.post_event(WorkerControlEvent::TerminalError(e))
.expect("Failed to post message to host");
// Failure to execute script is a terminal error, bye, bye.
return Ok(());
}
let result = rt.block_on(worker.run_event_loop(true));
debug!("Worker thread shuts down {}", &name);
result
rt.block_on(fut)
}

View file

@ -16744,13 +16744,7 @@
"Cross-origin throw",
"Redirect-to-cross-origin syntax error",
"Redirect-to-Cross-origin throw"
],
"report-error-cross-origin.sub.any.worker.html": false,
"report-error-redirect-to-cross-origin.sub.any.worker.html": false,
"report-error-same-origin.sub.any.worker.html": false,
"report-error-setTimeout-cross-origin.sub.any.worker.html": false,
"report-error-setTimeout-redirect-to-cross-origin.sub.any.worker.html": false,
"report-error-setTimeout-same-origin.sub.any.worker.html": false
]
}
}
},