mirror of
https://github.com/denoland/deno.git
synced 2024-12-18 13:22:55 -05:00
fix: clear dep analysis when module loading is done (#27204)
Closes https://github.com/denoland/deno/issues/26663
This commit is contained in:
parent
3bae68eda6
commit
ab4568a03d
4 changed files with 111 additions and 7 deletions
12
Cargo.lock
generated
12
Cargo.lock
generated
|
@ -1474,9 +1474,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_core"
|
||||
version = "0.323.0"
|
||||
version = "0.324.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a781bcfe1b5211b8497f45bf5b3dba73036b8d5d1533c1f05d26ccf0afb25a78"
|
||||
checksum = "24503eda646f246aa6eb0f794909f9a857c8f05095fed66f36e0eaef92edce23"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"az",
|
||||
|
@ -2042,9 +2042,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_ops"
|
||||
version = "0.199.0"
|
||||
version = "0.200.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a24a1f3e22029a57d3094b32070b8328eac793920b5a022027d360f085e6b245"
|
||||
checksum = "03a529a2c488cd3042f12f35666569ebe5b3cf89d2b7d1cafc1a652f6d7bcc8f"
|
||||
dependencies = [
|
||||
"proc-macro-rules",
|
||||
"proc-macro2",
|
||||
|
@ -6684,9 +6684,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "serde_v8"
|
||||
version = "0.232.0"
|
||||
version = "0.233.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c9feae92f7293fcc1a32a86be1a399859c0637e55dad8991d5258c43f7ff4d2"
|
||||
checksum = "307f176b7475480cee690c34c7118f96fe564d1f2a974bf990294b8310ae4983"
|
||||
dependencies = [
|
||||
"num-bigint",
|
||||
"serde",
|
||||
|
|
|
@ -48,7 +48,7 @@ repository = "https://github.com/denoland/deno"
|
|||
|
||||
[workspace.dependencies]
|
||||
deno_ast = { version = "=0.44.0", features = ["transpiling"] }
|
||||
deno_core = { version = "0.323.0" }
|
||||
deno_core = { version = "0.324.0" }
|
||||
|
||||
deno_bench_util = { version = "0.174.0", path = "./bench_util" }
|
||||
deno_config = { version = "=0.39.3", features = ["workspace", "sync"] }
|
||||
|
|
10
cli/cache/parsed_source.rs
vendored
10
cli/cache/parsed_source.rs
vendored
|
@ -95,11 +95,21 @@ impl ParsedSourceCache {
|
|||
self.sources.lock().remove(specifier);
|
||||
}
|
||||
|
||||
/// Fress all parsed sources from memory.
|
||||
pub fn free_all(&self) {
|
||||
self.sources.lock().clear();
|
||||
}
|
||||
|
||||
/// Creates a parser that will reuse a ParsedSource from the store
|
||||
/// if it exists, or else parse.
|
||||
pub fn as_capturing_parser(&self) -> CapturingEsParser {
|
||||
CapturingEsParser::new(None, self)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn len(&self) -> usize {
|
||||
self.sources.lock().len()
|
||||
}
|
||||
}
|
||||
|
||||
/// It's ok that this is racy since in non-LSP situations
|
||||
|
|
|
@ -7,6 +7,8 @@ use std::path::PathBuf;
|
|||
use std::pin::Pin;
|
||||
use std::rc::Rc;
|
||||
use std::str;
|
||||
use std::sync::atomic::AtomicU16;
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::args::jsr_url;
|
||||
|
@ -49,6 +51,7 @@ use deno_core::error::generic_error;
|
|||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::future::FutureExt;
|
||||
use deno_core::futures::Future;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::resolve_url;
|
||||
use deno_core::ModuleCodeString;
|
||||
use deno_core::ModuleLoader;
|
||||
|
@ -222,6 +225,42 @@ struct SharedCliModuleLoaderState {
|
|||
npm_module_loader: NpmModuleLoader,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
resolver: Arc<CliResolver>,
|
||||
in_flight_loads_tracker: InFlightModuleLoadsTracker,
|
||||
}
|
||||
|
||||
struct InFlightModuleLoadsTracker {
|
||||
loads_number: Arc<AtomicU16>,
|
||||
cleanup_task_timeout: u64,
|
||||
cleanup_task_handle: Arc<Mutex<Option<tokio::task::JoinHandle<()>>>>,
|
||||
}
|
||||
|
||||
impl InFlightModuleLoadsTracker {
|
||||
pub fn increase(&self) {
|
||||
self.loads_number.fetch_add(1, Ordering::Relaxed);
|
||||
if let Some(task) = self.cleanup_task_handle.lock().take() {
|
||||
task.abort();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn decrease(&self, parsed_source_cache: &Arc<ParsedSourceCache>) {
|
||||
let prev = self.loads_number.fetch_sub(1, Ordering::Relaxed);
|
||||
if prev == 1 {
|
||||
let parsed_source_cache = parsed_source_cache.clone();
|
||||
let timeout = self.cleanup_task_timeout;
|
||||
let task_handle = tokio::spawn(async move {
|
||||
// We use a timeout here, which is defined to 10s,
|
||||
// so that in situations when dynamic imports are loaded after the startup,
|
||||
// we don't need to recompute and analyze multiple modules.
|
||||
tokio::time::sleep(std::time::Duration::from_millis(timeout)).await;
|
||||
parsed_source_cache.free_all();
|
||||
});
|
||||
let maybe_prev_task =
|
||||
self.cleanup_task_handle.lock().replace(task_handle);
|
||||
if let Some(prev_task) = maybe_prev_task {
|
||||
prev_task.abort();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CliModuleLoaderFactory {
|
||||
|
@ -272,6 +311,11 @@ impl CliModuleLoaderFactory {
|
|||
npm_module_loader,
|
||||
parsed_source_cache,
|
||||
resolver,
|
||||
in_flight_loads_tracker: InFlightModuleLoadsTracker {
|
||||
loads_number: Arc::new(AtomicU16::new(0)),
|
||||
cleanup_task_timeout: 10_000,
|
||||
cleanup_task_handle: Arc::new(Mutex::new(None)),
|
||||
},
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
@ -867,6 +911,7 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
|
|||
_maybe_referrer: Option<String>,
|
||||
is_dynamic: bool,
|
||||
) -> Pin<Box<dyn Future<Output = Result<(), AnyError>>>> {
|
||||
self.0.shared.in_flight_loads_tracker.increase();
|
||||
if self.0.shared.in_npm_pkg_checker.in_npm_package(specifier) {
|
||||
return Box::pin(deno_core::futures::future::ready(Ok(())));
|
||||
}
|
||||
|
@ -921,6 +966,14 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
|
|||
.boxed_local()
|
||||
}
|
||||
|
||||
fn finish_load(&self) {
|
||||
self
|
||||
.0
|
||||
.shared
|
||||
.in_flight_loads_tracker
|
||||
.decrease(&self.0.shared.parsed_source_cache);
|
||||
}
|
||||
|
||||
fn code_cache_ready(
|
||||
&self,
|
||||
specifier: ModuleSpecifier,
|
||||
|
@ -1103,3 +1156,44 @@ impl<TGraphContainer: ModuleGraphContainer> NodeRequireLoader
|
|||
self.cjs_tracker.is_maybe_cjs(specifier, media_type)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use deno_graph::ParsedSourceStore;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_inflight_module_loads_tracker() {
|
||||
let tracker = InFlightModuleLoadsTracker {
|
||||
loads_number: Default::default(),
|
||||
cleanup_task_timeout: 10,
|
||||
cleanup_task_handle: Default::default(),
|
||||
};
|
||||
|
||||
let specifier = ModuleSpecifier::parse("file:///a.js").unwrap();
|
||||
let source = "const a = 'hello';";
|
||||
let parsed_source_cache = Arc::new(ParsedSourceCache::default());
|
||||
let parsed_source = parsed_source_cache
|
||||
.remove_or_parse_module(&specifier, source.into(), MediaType::JavaScript)
|
||||
.unwrap();
|
||||
parsed_source_cache.set_parsed_source(specifier, parsed_source);
|
||||
|
||||
assert_eq!(parsed_source_cache.len(), 1);
|
||||
assert!(tracker.cleanup_task_handle.lock().is_none());
|
||||
tracker.increase();
|
||||
tracker.increase();
|
||||
assert!(tracker.cleanup_task_handle.lock().is_none());
|
||||
tracker.decrease(&parsed_source_cache);
|
||||
assert!(tracker.cleanup_task_handle.lock().is_none());
|
||||
tracker.decrease(&parsed_source_cache);
|
||||
assert!(tracker.cleanup_task_handle.lock().is_some());
|
||||
assert_eq!(parsed_source_cache.len(), 1);
|
||||
tracker.increase();
|
||||
assert!(tracker.cleanup_task_handle.lock().is_none());
|
||||
assert_eq!(parsed_source_cache.len(), 1);
|
||||
tracker.decrease(&parsed_source_cache);
|
||||
// Rather long timeout, but to make sure CI is not flaking on it.
|
||||
tokio::time::sleep(std::time::Duration::from_millis(500)).await;
|
||||
assert_eq!(parsed_source_cache.len(), 0);
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue