1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-12-26 00:59:24 -05:00

Reland "fix(cli): don't store blob and data urls in the module cache" (#18581)

Relands #18261 now that
https://github.com/lucacasonato/esbuild_deno_loader/pull/54 is landed
and used by fresh.
Fixes #18260.
This commit is contained in:
Nayeem Rahman 2023-07-01 23:52:30 +01:00 committed by GitHub
parent 4e2f02639e
commit b9c0e7cd55
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
25 changed files with 142 additions and 184 deletions

View file

@ -339,7 +339,7 @@ fn create_cli_snapshot(snapshot_path: PathBuf) -> CreateSnapshotOutput {
deno_console::deno_console::init_ops(),
deno_url::deno_url::init_ops(),
deno_web::deno_web::init_ops::<PermissionsContainer>(
deno_web::BlobStore::default(),
Default::default(),
Default::default(),
),
deno_fetch::deno_fetch::init_ops::<PermissionsContainer>(Default::default()),

View file

@ -144,7 +144,7 @@ struct CliFactoryServices {
maybe_import_map: Deferred<Option<Arc<ImportMap>>>,
maybe_inspector_server: Deferred<Option<Arc<InspectorServer>>>,
root_cert_store_provider: Deferred<Arc<dyn RootCertStoreProvider>>,
blob_store: Deferred<BlobStore>,
blob_store: Deferred<Arc<BlobStore>>,
parsed_source_cache: Deferred<Arc<ParsedSourceCache>>,
resolver: Deferred<Arc<CliGraphResolver>>,
maybe_file_watcher_reporter: Deferred<Option<FileWatcherReporter>>,
@ -215,8 +215,8 @@ impl CliFactory {
})
}
pub fn blob_store(&self) -> &BlobStore {
self.services.blob_store.get_or_init(BlobStore::default)
pub fn blob_store(&self) -> &Arc<BlobStore> {
self.services.blob_store.get_or_init(Default::default)
}
pub fn root_cert_store_provider(&self) -> &Arc<dyn RootCertStoreProvider> {

View file

@ -33,7 +33,6 @@ use deno_runtime::deno_fetch::reqwest::StatusCode;
use deno_runtime::deno_web::BlobStore;
use deno_runtime::permissions::PermissionsContainer;
use log::debug;
use std::borrow::Borrow;
use std::collections::HashMap;
use std::env;
use std::fs;
@ -50,10 +49,6 @@ pub const SUPPORTED_SCHEMES: [&str; 5] =
/// A structure representing a source file.
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct File {
/// The path to the local version of the source file. For local files this
/// will be the direct path to that file. For remote files, it will be the
/// path to the file in the HTTP cache.
pub local: PathBuf,
/// For remote files, if there was an `X-TypeScript-Type` header, the parsed
/// out value of that header.
pub maybe_types: Option<String>,
@ -90,13 +85,12 @@ fn fetch_local(specifier: &ModuleSpecifier) -> Result<File, AnyError> {
let local = specifier.to_file_path().map_err(|_| {
uri_error(format!("Invalid file path.\n Specifier: {specifier}"))
})?;
let bytes = fs::read(&local)?;
let bytes = fs::read(local)?;
let charset = text_encoding::detect_charset(&bytes).to_string();
let source = get_source_from_bytes(bytes, Some(charset))?;
let media_type = MediaType::from_specifier(specifier);
Ok(File {
local,
maybe_types: None,
media_type,
source: source.into(),
@ -179,7 +173,7 @@ pub struct FileFetcher {
cache_setting: CacheSetting,
pub http_cache: HttpCache,
http_client: Arc<HttpClient>,
blob_store: BlobStore,
blob_store: Arc<BlobStore>,
download_log_level: log::Level,
progress_bar: Option<ProgressBar>,
}
@ -190,7 +184,7 @@ impl FileFetcher {
cache_setting: CacheSetting,
allow_remote: bool,
http_client: Arc<HttpClient>,
blob_store: BlobStore,
blob_store: Arc<BlobStore>,
progress_bar: Option<ProgressBar>,
) -> Self {
Self {
@ -218,13 +212,6 @@ impl FileFetcher {
bytes: Vec<u8>,
headers: &HashMap<String, String>,
) -> Result<File, AnyError> {
let local =
self
.http_cache
.get_cache_filename(specifier)
.ok_or_else(|| {
generic_error("Cannot convert specifier to cached filename.")
})?;
let maybe_content_type = headers.get("content-type");
let (media_type, maybe_charset) =
map_content_type(specifier, maybe_content_type);
@ -238,7 +225,6 @@ impl FileFetcher {
};
Ok(File {
local,
maybe_types,
media_type,
source: source.into(),
@ -290,39 +276,11 @@ impl FileFetcher {
specifier: &ModuleSpecifier,
) -> Result<File, AnyError> {
debug!("FileFetcher::fetch_data_url() - specifier: {}", specifier);
match self.fetch_cached(specifier, 0) {
Ok(Some(file)) => return Ok(file),
Ok(None) => {}
Err(err) => return Err(err),
}
if self.cache_setting == CacheSetting::Only {
return Err(custom_error(
"NotCached",
format!(
"Specifier not found in cache: \"{specifier}\", --cached-only is specified."
),
));
}
let (source, content_type) = get_source_from_data_url(specifier)?;
let (media_type, _) = map_content_type(specifier, Some(&content_type));
let local =
self
.http_cache
.get_cache_filename(specifier)
.ok_or_else(|| {
generic_error("Cannot convert specifier to cached filename.")
})?;
let mut headers = HashMap::new();
headers.insert("content-type".to_string(), content_type);
self
.http_cache
.set(specifier, headers.clone(), source.as_bytes())?;
Ok(File {
local,
maybe_types: None,
media_type,
source: source.into(),
@ -337,32 +295,15 @@ impl FileFetcher {
specifier: &ModuleSpecifier,
) -> Result<File, AnyError> {
debug!("FileFetcher::fetch_blob_url() - specifier: {}", specifier);
match self.fetch_cached(specifier, 0) {
Ok(Some(file)) => return Ok(file),
Ok(None) => {}
Err(err) => return Err(err),
}
if self.cache_setting == CacheSetting::Only {
return Err(custom_error(
"NotCached",
format!(
"Specifier not found in cache: \"{specifier}\", --cached-only is specified."
),
));
}
let blob = {
let blob_store = self.blob_store.borrow();
blob_store
.get_object_url(specifier.clone())
.ok_or_else(|| {
custom_error(
"NotFound",
format!("Blob URL not found: \"{specifier}\"."),
)
})?
};
let blob = self
.blob_store
.get_object_url(specifier.clone())
.ok_or_else(|| {
custom_error(
"NotFound",
format!("Blob URL not found: \"{specifier}\"."),
)
})?;
let content_type = blob.media_type.clone();
let bytes = blob.read_all().await?;
@ -370,22 +311,10 @@ impl FileFetcher {
let (media_type, maybe_charset) =
map_content_type(specifier, Some(&content_type));
let source = get_source_from_bytes(bytes, maybe_charset)?;
let local =
self
.http_cache
.get_cache_filename(specifier)
.ok_or_else(|| {
generic_error("Cannot convert specifier to cached filename.")
})?;
let mut headers = HashMap::new();
headers.insert("content-type".to_string(), content_type);
self
.http_cache
.set(specifier, headers.clone(), source.as_bytes())?;
Ok(File {
local,
maybe_types: None,
media_type,
source: source.into(),
@ -562,17 +491,9 @@ impl FileFetcher {
// disk changing effecting things like workers and dynamic imports.
fetch_local(specifier)
} else if scheme == "data" {
let result = self.fetch_data_url(specifier);
if let Ok(file) = &result {
self.cache.insert(specifier.clone(), file.clone());
}
result
self.fetch_data_url(specifier)
} else if scheme == "blob" {
let result = self.fetch_blob_url(specifier).await;
if let Ok(file) = &result {
self.cache.insert(specifier.clone(), file.clone());
}
result
self.fetch_blob_url(specifier).await
} else if !self.allow_remote {
Err(custom_error(
"NoRemote",
@ -762,10 +683,10 @@ mod tests {
fn setup_with_blob_store(
cache_setting: CacheSetting,
maybe_temp_dir: Option<TempDir>,
) -> (FileFetcher, TempDir, BlobStore) {
) -> (FileFetcher, TempDir, Arc<BlobStore>) {
let temp_dir = maybe_temp_dir.unwrap_or_default();
let location = temp_dir.path().join("deps").to_path_buf();
let blob_store = BlobStore::default();
let blob_store: Arc<BlobStore> = Default::default();
let file_fetcher = FileFetcher::new(
HttpCache::new(location),
cache_setting,
@ -1035,7 +956,6 @@ mod tests {
let local = temp_dir.path().join("a.ts");
let specifier = ModuleSpecifier::from_file_path(&local).unwrap();
let file = File {
local: local.to_path_buf(),
maybe_types: None,
media_type: MediaType::TypeScript,
source: "some source code".into(),
@ -1206,7 +1126,7 @@ mod tests {
CacheSetting::ReloadAll,
true,
Arc::new(HttpClient::new(None, None)),
BlobStore::default(),
Default::default(),
None,
);
let result = file_fetcher
@ -1231,7 +1151,7 @@ mod tests {
CacheSetting::Use,
true,
Arc::new(HttpClient::new(None, None)),
BlobStore::default(),
Default::default(),
None,
);
let specifier =
@ -1256,7 +1176,7 @@ mod tests {
CacheSetting::Use,
true,
Arc::new(HttpClient::new(None, None)),
BlobStore::default(),
Default::default(),
None,
);
let result = file_fetcher_02
@ -1397,7 +1317,7 @@ mod tests {
CacheSetting::Use,
true,
Arc::new(HttpClient::new(None, None)),
BlobStore::default(),
Default::default(),
None,
);
let specifier =
@ -1425,7 +1345,7 @@ mod tests {
CacheSetting::Use,
true,
Arc::new(HttpClient::new(None, None)),
BlobStore::default(),
Default::default(),
None,
);
let result = file_fetcher_02
@ -1524,7 +1444,7 @@ mod tests {
CacheSetting::Use,
false,
Arc::new(HttpClient::new(None, None)),
BlobStore::default(),
Default::default(),
None,
);
let specifier =
@ -1549,7 +1469,7 @@ mod tests {
CacheSetting::Only,
true,
Arc::new(HttpClient::new(None, None)),
BlobStore::default(),
Default::default(),
None,
);
let file_fetcher_02 = FileFetcher::new(
@ -1557,7 +1477,7 @@ mod tests {
CacheSetting::Use,
true,
Arc::new(HttpClient::new(None, None)),
BlobStore::default(),
Default::default(),
None,
);
let specifier =

View file

@ -69,7 +69,10 @@ impl CacheMetadata {
&self,
specifier: &ModuleSpecifier,
) -> Option<Arc<HashMap<MetadataKey, String>>> {
if matches!(specifier.scheme(), "file" | "npm" | "node") {
if matches!(
specifier.scheme(),
"file" | "npm" | "node" | "data" | "blob"
) {
return None;
}
let version = self
@ -85,7 +88,10 @@ impl CacheMetadata {
}
fn refresh(&self, specifier: &ModuleSpecifier) -> Option<Metadata> {
if matches!(specifier.scheme(), "file" | "npm" | "node") {
if matches!(
specifier.scheme(),
"file" | "npm" | "node" | "data" | "blob"
) {
return None;
}
let cache_filename = self.cache.get_cache_filename(specifier)?;

View file

@ -721,10 +721,6 @@ pub enum DenoDiagnostic {
NoAssertType,
/// A remote module was not found in the cache.
NoCache(ModuleSpecifier),
/// A blob module was not found in the cache.
NoCacheBlob,
/// A data module was not found in the cache.
NoCacheData(ModuleSpecifier),
/// A remote npm package reference was not found in the cache.
NoCacheNpm(NpmPackageReqReference, ModuleSpecifier),
/// A local module was not found on the local file system.
@ -749,8 +745,6 @@ impl DenoDiagnostic {
Self::InvalidAssertType(_) => "invalid-assert-type",
Self::NoAssertType => "no-assert-type",
Self::NoCache(_) => "no-cache",
Self::NoCacheBlob => "no-cache-blob",
Self::NoCacheData(_) => "no-cache-data",
Self::NoCacheNpm(_, _) => "no-cache-npm",
Self::NoLocal(_) => "no-local",
Self::Redirect { .. } => "redirect",
@ -828,7 +822,7 @@ impl DenoDiagnostic {
}),
..Default::default()
},
"no-cache" | "no-cache-data" | "no-cache-npm" => {
"no-cache" | "no-cache-npm" => {
let data = diagnostic
.data
.clone()
@ -920,7 +914,6 @@ impl DenoDiagnostic {
"import-map-remap"
| "no-cache"
| "no-cache-npm"
| "no-cache-data"
| "no-assert-type"
| "redirect"
| "import-node-prefix-missing"
@ -939,8 +932,6 @@ impl DenoDiagnostic {
Self::InvalidAssertType(assert_type) => (lsp::DiagnosticSeverity::ERROR, format!("The module is a JSON module and expected an assertion type of \"json\". Instead got \"{assert_type}\"."), None),
Self::NoAssertType => (lsp::DiagnosticSeverity::ERROR, "The module is a JSON module and not being imported with an import assertion. Consider adding `assert { type: \"json\" }` to the import statement.".to_string(), None),
Self::NoCache(specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing remote URL: \"{specifier}\"."), Some(json!({ "specifier": specifier }))),
Self::NoCacheBlob => (lsp::DiagnosticSeverity::ERROR, "Uncached blob URL.".to_string(), None),
Self::NoCacheData(specifier) => (lsp::DiagnosticSeverity::ERROR, "Uncached data URL.".to_string(), Some(json!({ "specifier": specifier }))),
Self::NoCacheNpm(pkg_ref, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing npm package: \"{}\".", pkg_ref.req), Some(json!({ "specifier": specifier }))),
Self::NoLocal(specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Unable to load a local module: \"{specifier}\".\n Please check the file path."), None),
Self::Redirect { from, to} => (lsp::DiagnosticSeverity::INFORMATION, format!("The import of \"{from}\" was redirected to \"{to}\"."), Some(json!({ "specifier": from, "redirect": to }))),
@ -1043,8 +1034,6 @@ fn diagnose_resolution(
// about that.
let deno_diagnostic = match specifier.scheme() {
"file" => DenoDiagnostic::NoLocal(specifier.clone()),
"data" => DenoDiagnostic::NoCacheData(specifier.clone()),
"blob" => DenoDiagnostic::NoCacheBlob,
_ => DenoDiagnostic::NoCache(specifier.clone()),
};
diagnostics.push(deno_diagnostic);

View file

@ -13,6 +13,7 @@ use crate::cache::CachedUrlMetadata;
use crate::cache::FastInsecureHasher;
use crate::cache::HttpCache;
use crate::file_fetcher::get_source_from_bytes;
use crate::file_fetcher::get_source_from_data_url;
use crate::file_fetcher::map_content_type;
use crate::file_fetcher::SUPPORTED_SCHEMES;
use crate::lsp::logging::lsp_warn;
@ -728,8 +729,12 @@ impl FileSystemDocuments {
resolver: &dyn deno_graph::source::Resolver,
specifier: &ModuleSpecifier,
) -> Option<Document> {
let fs_version = get_document_path(cache, specifier)
.and_then(|path| calculate_fs_version(&path));
let fs_version = if specifier.scheme() == "data" {
Some("1".to_string())
} else {
get_document_path(cache, specifier)
.and_then(|path| calculate_fs_version(&path))
};
let file_system_doc = self.docs.get(specifier);
if file_system_doc.map(|d| d.fs_version().to_string()) != fs_version {
// attempt to update the file on the file system
@ -747,10 +752,10 @@ impl FileSystemDocuments {
resolver: &dyn deno_graph::source::Resolver,
specifier: &ModuleSpecifier,
) -> Option<Document> {
let path = get_document_path(cache, specifier)?;
let fs_version = calculate_fs_version(&path)?;
let bytes = fs::read(path).ok()?;
let doc = if specifier.scheme() == "file" {
let path = get_document_path(cache, specifier)?;
let fs_version = calculate_fs_version(&path)?;
let bytes = fs::read(path).ok()?;
let maybe_charset =
Some(text_encoding::detect_charset(&bytes).to_string());
let content = get_source_from_bytes(bytes, maybe_charset).ok()?;
@ -761,7 +766,19 @@ impl FileSystemDocuments {
SourceTextInfo::from_string(content),
resolver,
)
} else if specifier.scheme() == "data" {
let (source, _) = get_source_from_data_url(specifier).ok()?;
Document::new(
specifier.clone(),
"1".to_string(),
None,
SourceTextInfo::from_string(source),
resolver,
)
} else {
let path = get_document_path(cache, specifier)?;
let fs_version = calculate_fs_version(&path)?;
let bytes = fs::read(path).ok()?;
let cache_filename = cache.get_cache_filename(specifier)?;
let specifier_metadata = CachedUrlMetadata::read(&cache_filename).ok()?;
let maybe_content_type = specifier_metadata.headers.get("content-type");
@ -787,7 +804,7 @@ fn get_document_path(
specifier: &ModuleSpecifier,
) -> Option<PathBuf> {
match specifier.scheme() {
"npm" | "node" => None,
"npm" | "node" | "data" | "blob" => None,
"file" => specifier_to_file_path(specifier).ok(),
_ => cache.get_cache_filename(specifier),
}
@ -970,6 +987,9 @@ impl Documents {
if self.open_docs.contains_key(&specifier) {
return true;
}
if specifier.scheme() == "data" {
return true;
}
if let Some(path) = get_document_path(&self.cache, &specifier) {
return path.is_file();
}

View file

@ -20,7 +20,6 @@ use deno_runtime::deno_node::NodeResolver;
use deno_runtime::deno_node::PackageJson;
use deno_runtime::deno_tls::rustls::RootCertStore;
use deno_runtime::deno_tls::RootCertStoreProvider;
use deno_runtime::deno_web::BlobStore;
use import_map::ImportMap;
use log::error;
use serde_json::from_value;
@ -1053,7 +1052,7 @@ impl Inner {
cache_setting,
true,
self.http_client.clone(),
BlobStore::default(),
Default::default(),
None,
);
file_fetcher.set_download_log_level(super::logging::lsp_log_level());

View file

@ -29,7 +29,6 @@ use deno_core::url::Position;
use deno_core::url::Url;
use deno_core::ModuleSpecifier;
use deno_graph::Dependency;
use deno_runtime::deno_web::BlobStore;
use deno_runtime::permissions::PermissionsContainer;
use log::error;
use once_cell::sync::Lazy;
@ -439,7 +438,7 @@ impl ModuleRegistry {
CacheSetting::RespectHeaders,
true,
http_client,
BlobStore::default(),
Default::default(),
None,
);
file_fetcher.set_download_log_level(super::logging::lsp_log_level());

View file

@ -44,7 +44,6 @@ use deno_runtime::deno_node::analyze::NodeCodeTranslator;
use deno_runtime::deno_node::NodeResolver;
use deno_runtime::deno_tls::rustls::RootCertStore;
use deno_runtime::deno_tls::RootCertStoreProvider;
use deno_runtime::deno_web::BlobStore;
use deno_runtime::permissions::Permissions;
use deno_runtime::permissions::PermissionsContainer;
use deno_runtime::WorkerLogLevel;
@ -422,7 +421,7 @@ pub async fn run(
npm_resolver.clone(),
node_resolver,
Box::new(StandaloneHasNodeSpecifierChecker),
BlobStore::default(),
Default::default(),
Box::new(module_loader_factory),
root_cert_store_provider,
fs,

View file

@ -5612,7 +5612,7 @@ fn lsp_cache_location() {
"text": "import * as a from \"http://127.0.0.1:4545/xTypeScriptTypes.js\";\n// @deno-types=\"http://127.0.0.1:4545/type_definitions/foo.d.ts\"\nimport * as b from \"http://127.0.0.1:4545/type_definitions/foo.js\";\nimport * as c from \"http://127.0.0.1:4545/subdir/type_reference.js\";\nimport * as d from \"http://127.0.0.1:4545/subdir/mod1.ts\";\nimport * as e from \"data:application/typescript;base64,ZXhwb3J0IGNvbnN0IGEgPSAiYSI7CgpleHBvcnQgZW51bSBBIHsKICBBLAogIEIsCiAgQywKfQo=\";\nimport * as f from \"./file_01.ts\";\nimport * as g from \"http://localhost:4545/x/a/mod.ts\";\n\nconsole.log(a, b, c, d, e, f, g);\n"
}
}));
assert_eq!(diagnostics.all().len(), 7);
assert_eq!(diagnostics.all().len(), 6);
client.write_request(
"deno/cache",
json!({
@ -5708,7 +5708,7 @@ fn lsp_tls_cert() {
}
}));
let diagnostics = diagnostics.all();
assert_eq!(diagnostics.len(), 7);
assert_eq!(diagnostics.len(), 6);
client.write_request(
"deno/cache",
json!({
@ -7609,25 +7609,9 @@ fn lsp_data_urls_with_jsx_compiler_option() {
}
})).all();
// there will be a diagnostic about not having cached the data url
assert_eq!(diagnostics.len(), 1);
assert_eq!(
diagnostics[0].code,
Some(lsp::NumberOrString::String("no-cache-data".to_string()))
);
assert_eq!(diagnostics.len(), 0);
// so cache it
client.write_request(
"deno/cache",
json!({
"referrer": {
"uri": uri,
},
"uris": [],
}),
);
let res = client.write_request(
let res: Value = client.write_request(
"textDocument/references",
json!({
"textDocument": {

View file

@ -1255,6 +1255,47 @@ async fn test_watch_unload_handler_error_on_drop() {
check_alive_then_kill(child);
}
#[tokio::test]
async fn run_watch_blob_urls_reset() {
let _g = util::http_server();
let t = TempDir::new();
let file_to_watch = t.path().join("file_to_watch.js");
let file_content = r#"
const prevUrl = localStorage.getItem("url");
if (prevUrl == null) {
console.log("first run, storing blob url");
const url = URL.createObjectURL(
new Blob(["export {}"], { type: "application/javascript" }),
);
await import(url); // this shouldn't insert into the fs module cache
localStorage.setItem("url", url);
} else {
await import(prevUrl)
.then(() => console.log("importing old blob url incorrectly works"))
.catch(() => console.log("importing old blob url correctly failed"));
}
"#;
file_to_watch.write(file_content);
let mut child = util::deno_cmd()
.current_dir(util::testdata_path())
.arg("run")
.arg("--watch")
.arg(&file_to_watch)
.env("NO_COLOR", "1")
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped())
.spawn()
.unwrap();
let (mut stdout_lines, mut stderr_lines) = child_lines(&mut child);
wait_contains("first run, storing blob url", &mut stdout_lines).await;
wait_contains("finished", &mut stderr_lines).await;
file_to_watch.write(file_content);
wait_contains("importing old blob url correctly failed", &mut stdout_lines)
.await;
wait_contains("finished", &mut stderr_lines).await;
check_alive_then_kill(child);
}
#[cfg(unix)]
#[tokio::test]
async fn test_watch_sigint() {

View file

@ -18,7 +18,6 @@ use deno_core::resolve_url_or_path;
use deno_doc as doc;
use deno_graph::GraphKind;
use deno_graph::ModuleSpecifier;
use std::path::PathBuf;
pub async fn print_docs(
flags: Flags,
@ -76,7 +75,6 @@ pub async fn print_docs(
let root_specifier =
resolve_path("./$deno$doc.ts", cli_options.initial_cwd()).unwrap();
let root = File {
local: PathBuf::from("./$deno$doc.ts"),
maybe_types: None,
media_type: MediaType::TypeScript,
source: format!("export * from \"{module_specifier}\";").into(),

View file

@ -81,7 +81,6 @@ pub async fn run_from_stdin(flags: Flags) -> Result<i32, AnyError> {
std::io::stdin().read_to_end(&mut source)?;
// Create a dummy source file.
let source_file = File {
local: main_module.clone().to_file_path().unwrap(),
maybe_types: None,
media_type: MediaType::TypeScript,
source: String::from_utf8(source)?.into(),
@ -163,7 +162,6 @@ pub async fn eval_command(
.into_bytes();
let file = File {
local: main_module.clone().to_file_path().unwrap(),
maybe_types: None,
media_type: MediaType::Unknown,
source: String::from_utf8(source_code)?.into(),

View file

@ -1138,7 +1138,6 @@ fn extract_files_from_regex_blocks(
.unwrap_or(file_specifier);
Some(File {
local: file_specifier.to_file_path().unwrap(),
maybe_types: None,
media_type: file_media_type,
source: file_source.into(),

View file

@ -96,7 +96,7 @@ struct SharedWorkerState {
npm_resolver: Arc<CliNpmResolver>,
node_resolver: Arc<NodeResolver>,
has_node_specifier_checker: Box<dyn HasNodeSpecifierChecker>,
blob_store: BlobStore,
blob_store: Arc<BlobStore>,
broadcast_channel: InMemoryBroadcastChannel,
shared_array_buffer_store: SharedArrayBufferStore,
compiled_wasm_module_store: CompiledWasmModuleStore,
@ -311,7 +311,7 @@ impl CliMainWorkerFactory {
npm_resolver: Arc<CliNpmResolver>,
node_resolver: Arc<NodeResolver>,
has_node_specifier_checker: Box<dyn HasNodeSpecifierChecker>,
blob_store: BlobStore,
blob_store: Arc<BlobStore>,
module_loader_factory: Box<dyn ModuleLoaderFactory>,
root_cert_store_provider: Arc<dyn RootCertStoreProvider>,
fs: Arc<dyn deno_fs::FileSystem>,

View file

@ -8,7 +8,6 @@ use deno_core::Extension;
use deno_core::ExtensionFileSource;
use deno_core::ExtensionFileSourceCode;
use deno_core::OpState;
use deno_web::BlobStore;
#[derive(Clone)]
struct Permissions;
@ -28,7 +27,7 @@ fn setup() -> Vec<Extension> {
deno_url::deno_url::init_ops_and_esm(),
deno_console::deno_console::init_ops_and_esm(),
deno_web::deno_web::init_ops_and_esm::<Permissions>(
BlobStore::default(),
Default::default(),
None,
),
Extension::builder("bench_setup")

View file

@ -8,7 +8,6 @@ use deno_core::Extension;
use deno_core::ExtensionFileSource;
use deno_core::ExtensionFileSourceCode;
use deno_core::OpState;
use deno_web::BlobStore;
#[derive(Clone)]
struct Permissions;
@ -25,11 +24,11 @@ fn setup() -> Vec<Extension> {
deno_webidl::deno_webidl::init_ops_and_esm(),
deno_url::deno_url::init_ops_and_esm(),
deno_console::deno_console::init_ops_and_esm(),
deno_web::deno_web::init_ops_and_esm::<Permissions>(BlobStore::default(), None),
deno_web::deno_web::init_ops_and_esm::<Permissions>(Default::default(), None),
Extension::builder("bench_setup")
.esm(vec![
ExtensionFileSource {
specifier: "ext:bench_setup/setup",
specifier: "ext:bench_setup/setup",
code: ExtensionFileSourceCode::IncludedInBinary(r#"
import { setTimeout, handleTimerMacrotask } from "ext:deno_web/02_timers.js";
globalThis.setTimeout = setTimeout;

View file

@ -23,10 +23,10 @@ use crate::Location;
pub type PartMap = HashMap<Uuid, Arc<dyn BlobPart + Send + Sync>>;
#[derive(Clone, Default, Debug)]
#[derive(Default, Debug)]
pub struct BlobStore {
parts: Arc<Mutex<PartMap>>,
object_urls: Arc<Mutex<HashMap<Url, Arc<Blob>>>>,
parts: Mutex<PartMap>,
object_urls: Mutex<HashMap<Url, Arc<Blob>>>,
}
impl BlobStore {
@ -80,6 +80,11 @@ impl BlobStore {
let mut blob_store = self.object_urls.lock();
blob_store.remove(url);
}
pub fn clear(&self) {
self.parts.lock().clear();
self.object_urls.lock().clear();
}
}
#[derive(Debug)]
@ -162,7 +167,7 @@ impl BlobPart for SlicedBlobPart {
#[op]
pub fn op_blob_create_part(state: &mut OpState, data: JsBuffer) -> Uuid {
let blob_store = state.borrow::<BlobStore>();
let blob_store = state.borrow::<Arc<BlobStore>>();
let part = InMemoryBlobPart(data.to_vec());
blob_store.insert_part(Arc::new(part))
}
@ -180,7 +185,7 @@ pub fn op_blob_slice_part(
id: Uuid,
options: SliceOptions,
) -> Result<Uuid, AnyError> {
let blob_store = state.borrow::<BlobStore>();
let blob_store = state.borrow::<Arc<BlobStore>>();
let part = blob_store
.get_part(&id)
.ok_or_else(|| type_error("Blob part not found"))?;
@ -207,7 +212,7 @@ pub async fn op_blob_read_part(
) -> Result<ToJsBuffer, AnyError> {
let part = {
let state = state.borrow();
let blob_store = state.borrow::<BlobStore>();
let blob_store = state.borrow::<Arc<BlobStore>>();
blob_store.get_part(&id)
}
.ok_or_else(|| type_error("Blob part not found"))?;
@ -217,7 +222,7 @@ pub async fn op_blob_read_part(
#[op]
pub fn op_blob_remove_part(state: &mut OpState, id: Uuid) {
let blob_store = state.borrow::<BlobStore>();
let blob_store = state.borrow::<Arc<BlobStore>>();
blob_store.remove_part(&id);
}
@ -228,7 +233,7 @@ pub fn op_blob_create_object_url(
part_ids: Vec<Uuid>,
) -> Result<String, AnyError> {
let mut parts = Vec::with_capacity(part_ids.len());
let blob_store = state.borrow::<BlobStore>();
let blob_store = state.borrow::<Arc<BlobStore>>();
for part_id in part_ids {
let part = blob_store
.get_part(&part_id)
@ -239,7 +244,7 @@ pub fn op_blob_create_object_url(
let blob = Blob { media_type, parts };
let maybe_location = state.try_borrow::<Location>();
let blob_store = state.borrow::<BlobStore>();
let blob_store = state.borrow::<Arc<BlobStore>>();
let url = blob_store
.insert_object_url(blob, maybe_location.map(|location| location.0.clone()));
@ -253,7 +258,7 @@ pub fn op_blob_revoke_object_url(
url: &str,
) -> Result<(), AnyError> {
let url = Url::parse(url)?;
let blob_store = state.borrow::<BlobStore>();
let blob_store = state.borrow::<Arc<BlobStore>>();
blob_store.remove_object_url(&url);
Ok(())
}
@ -280,7 +285,7 @@ pub fn op_blob_from_object_url(
return Ok(None);
}
let blob_store = state.try_borrow::<BlobStore>().ok_or_else(|| {
let blob_store = state.try_borrow::<Arc<BlobStore>>().ok_or_else(|| {
type_error("Blob URLs are not supported in this context.")
})?;
if let Some(blob) = blob_store.get_object_url(url) {

View file

@ -29,6 +29,7 @@ use std::borrow::Cow;
use std::cell::RefCell;
use std::fmt;
use std::path::PathBuf;
use std::sync::Arc;
use std::usize;
use crate::blob::op_blob_create_object_url;
@ -110,7 +111,7 @@ deno_core::extension!(deno_web,
"15_performance.js",
],
options = {
blob_store: BlobStore,
blob_store: Arc<BlobStore>,
maybe_location: Option<Url>,
},
state = |state, options| {

View file

@ -51,7 +51,7 @@ impl op_blob_revoke_object_url {
#[allow(clippy::extra_unused_lifetimes)]
pub fn call<'scope>(state: &mut OpState, url: String) -> Result<(), AnyError> {
let url = Url::parse(&url)?;
let blob_store = state.borrow::<BlobStore>();
let blob_store = state.borrow::<Arc<BlobStore>>();
blob_store.remove_object_url(&url);
Ok(())
}

View file

@ -4,7 +4,7 @@ pub fn op_blob_revoke_object_url(
) -> Result<(), AnyError> {
// TODO(@littledivy): fast compatible https://github.com/denoland/deno/issues/17159
let url = Url::parse(&url)?;
let blob_store = state.borrow::<BlobStore>();
let blob_store = state.borrow::<Arc<BlobStore>>();
blob_store.remove_object_url(&url);
Ok(())
}

View file

@ -298,7 +298,7 @@ mod startup_snapshot {
deno_console::deno_console::init_ops_and_esm(),
deno_url::deno_url::init_ops_and_esm(),
deno_web::deno_web::init_ops_and_esm::<Permissions>(
deno_web::BlobStore::default(),
Default::default(),
Default::default(),
),
deno_fetch::deno_fetch::init_ops_and_esm::<Permissions>(

View file

@ -1,5 +1,7 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
use std::sync::Arc;
use crate::web_worker::WebWorkerInternalHandle;
use crate::web_worker::WebWorkerType;
use deno_core::error::type_error;
@ -46,7 +48,7 @@ pub fn op_worker_sync_fetch(
// URLs when none of the script URLs use the blob scheme.
// Also, in which contexts are blob URLs not supported?
let blob_store = state
.try_borrow::<BlobStore>()
.try_borrow::<Arc<BlobStore>>()
.ok_or_else(|| type_error("Blob URLs are not supported in this context."))?
.clone();

View file

@ -343,7 +343,7 @@ pub struct WebWorkerOptions {
pub worker_type: WebWorkerType,
pub maybe_inspector_server: Option<Arc<InspectorServer>>,
pub get_error_class_fn: Option<GetErrorClassFn>,
pub blob_store: BlobStore,
pub blob_store: Arc<BlobStore>,
pub broadcast_channel: InMemoryBroadcastChannel,
pub shared_array_buffer_store: Option<SharedArrayBufferStore>,
pub compiled_wasm_module_store: Option<CompiledWasmModuleStore>,

View file

@ -123,7 +123,7 @@ pub struct WorkerOptions {
pub get_error_class_fn: Option<GetErrorClassFn>,
pub cache_storage_dir: Option<std::path::PathBuf>,
pub origin_storage_dir: Option<std::path::PathBuf>,
pub blob_store: BlobStore,
pub blob_store: Arc<BlobStore>,
pub broadcast_channel: InMemoryBroadcastChannel,
/// The store to use for transferring SharedArrayBuffers between isolates.