1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-11-21 15:04:11 -05:00

refactor: break out ModuleInfoCache from ParsedSourceCache (#20977)

As title. This will help use the two independently from the other, which
will help in an upcoming deno doc PR where I need to parse the source
files with scope analysis.
This commit is contained in:
David Sherret 2023-10-25 18:13:22 -04:00 committed by GitHub
parent 79a9f2a77c
commit 842e29057d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 403 additions and 357 deletions

4
cli/cache/caches.rs vendored
View file

@ -10,8 +10,8 @@ use super::cache_db::CacheDBConfiguration;
use super::check::TYPE_CHECK_CACHE_DB;
use super::deno_dir::DenoDirProvider;
use super::incremental::INCREMENTAL_CACHE_DB;
use super::module_info::MODULE_INFO_CACHE_DB;
use super::node::NODE_ANALYSIS_CACHE_DB;
use super::parsed_source::PARSED_SOURCE_CACHE_DB;
pub struct Caches {
dir_provider: Arc<DenoDirProvider>,
@ -77,7 +77,7 @@ impl Caches {
pub fn dep_analysis_db(&self) -> CacheDB {
Self::make_db(
&self.dep_analysis_db,
&PARSED_SOURCE_CACHE_DB,
&MODULE_INFO_CACHE_DB,
self
.dir_provider
.get_or_create()

11
cli/cache/mod.rs vendored
View file

@ -32,6 +32,7 @@ mod deno_dir;
mod disk_cache;
mod emit;
mod incremental;
mod module_info;
mod node;
mod parsed_source;
@ -43,6 +44,8 @@ pub use deno_dir::DenoDirProvider;
pub use disk_cache::DiskCache;
pub use emit::EmitCache;
pub use incremental::IncrementalCache;
pub use module_info::ModuleInfoCache;
pub use module_info::ModuleInfoCacheModuleAnalyzer;
pub use node::NodeAnalysisCache;
pub use parsed_source::ParsedSourceCache;
@ -103,7 +106,7 @@ pub struct FetchCacher {
file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
global_http_cache: Arc<GlobalHttpCache>,
npm_resolver: Arc<dyn CliNpmResolver>,
parsed_source_cache: Arc<ParsedSourceCache>,
module_info_cache: Arc<ModuleInfoCache>,
permissions: PermissionsContainer,
cache_info_enabled: bool,
}
@ -115,7 +118,7 @@ impl FetchCacher {
file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
global_http_cache: Arc<GlobalHttpCache>,
npm_resolver: Arc<dyn CliNpmResolver>,
parsed_source_cache: Arc<ParsedSourceCache>,
module_info_cache: Arc<ModuleInfoCache>,
permissions: PermissionsContainer,
) -> Self {
Self {
@ -124,7 +127,7 @@ impl FetchCacher {
file_header_overrides,
global_http_cache,
npm_resolver,
parsed_source_cache,
module_info_cache,
permissions,
cache_info_enabled: false,
}
@ -297,7 +300,7 @@ impl Loader for FetchCacher {
source: &str,
module_info: &deno_graph::ModuleInfo,
) {
let result = self.parsed_source_cache.cache_module_info(
let result = self.module_info_cache.set_module_info(
specifier,
MediaType::from_specifier(specifier),
source,

291
cli/cache/module_info.rs vendored Normal file
View file

@ -0,0 +1,291 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
use std::sync::Arc;
use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_graph::CapturingModuleParser;
use deno_graph::DefaultModuleAnalyzer;
use deno_graph::ModuleInfo;
use deno_graph::ModuleParser;
use deno_graph::ParsedSourceStore;
use deno_runtime::deno_webstorage::rusqlite::params;
use super::cache_db::CacheDB;
use super::cache_db::CacheDBConfiguration;
use super::cache_db::CacheFailure;
use super::FastInsecureHasher;
const SELECT_MODULE_INFO: &str = "
SELECT
module_info
FROM
moduleinfocache
WHERE
specifier=?1
AND media_type=?2
AND source_hash=?3
LIMIT 1";
pub static MODULE_INFO_CACHE_DB: CacheDBConfiguration = CacheDBConfiguration {
table_initializer: "CREATE TABLE IF NOT EXISTS moduleinfocache (
specifier TEXT PRIMARY KEY,
media_type TEXT NOT NULL,
source_hash TEXT NOT NULL,
module_info TEXT NOT NULL
);",
on_version_change: "DELETE FROM moduleinfocache;",
preheat_queries: &[SELECT_MODULE_INFO],
on_failure: CacheFailure::InMemory,
};
/// A cache of `deno_graph::ModuleInfo` objects. Using this leads to a considerable
/// performance improvement because when it exists we can skip parsing a module for
/// deno_graph.
pub struct ModuleInfoCache {
conn: CacheDB,
}
impl ModuleInfoCache {
#[cfg(test)]
pub fn new_in_memory(version: &'static str) -> Self {
Self::new(CacheDB::in_memory(&MODULE_INFO_CACHE_DB, version))
}
pub fn new(conn: CacheDB) -> Self {
Self { conn }
}
/// Useful for testing: re-create this cache DB with a different current version.
#[cfg(test)]
pub(crate) fn recreate_with_version(self, version: &'static str) -> Self {
Self {
conn: self.conn.recreate_with_version(version),
}
}
pub fn get_module_info(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
expected_source_hash: &str,
) -> Result<Option<ModuleInfo>, AnyError> {
let query = SELECT_MODULE_INFO;
let res = self.conn.query_row(
query,
params![
&specifier.as_str(),
serialize_media_type(media_type),
&expected_source_hash,
],
|row| {
let module_info: String = row.get(0)?;
let module_info = serde_json::from_str(&module_info)?;
Ok(module_info)
},
)?;
Ok(res)
}
pub fn set_module_info(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
source_hash: &str,
module_info: &ModuleInfo,
) -> Result<(), AnyError> {
let sql = "
INSERT OR REPLACE INTO
moduleinfocache (specifier, media_type, source_hash, module_info)
VALUES
(?1, ?2, ?3, ?4)";
self.conn.execute(
sql,
params![
specifier.as_str(),
serialize_media_type(media_type),
&source_hash,
&serde_json::to_string(&module_info)?,
],
)?;
Ok(())
}
pub fn as_module_analyzer<'a>(
&'a self,
parser: Option<&'a dyn ModuleParser>,
store: &'a dyn ParsedSourceStore,
) -> ModuleInfoCacheModuleAnalyzer<'a> {
ModuleInfoCacheModuleAnalyzer {
module_info_cache: self,
parser: CapturingModuleParser::new(parser, store),
}
}
}
pub struct ModuleInfoCacheModuleAnalyzer<'a> {
module_info_cache: &'a ModuleInfoCache,
parser: CapturingModuleParser<'a>,
}
impl<'a> deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'a> {
fn analyze(
&self,
specifier: &ModuleSpecifier,
source: Arc<str>,
media_type: MediaType,
) -> Result<ModuleInfo, deno_ast::Diagnostic> {
// attempt to load from the cache
let source_hash = FastInsecureHasher::hash(source.as_bytes()).to_string();
match self.module_info_cache.get_module_info(
specifier,
media_type,
&source_hash,
) {
Ok(Some(info)) => return Ok(info),
Ok(None) => {}
Err(err) => {
log::debug!(
"Error loading module cache info for {}. {:#}",
specifier,
err
);
}
}
// otherwise, get the module info from the parsed source cache
let analyzer = DefaultModuleAnalyzer::new(&self.parser);
let module_info = analyzer.analyze(specifier, source, media_type)?;
// then attempt to cache it
if let Err(err) = self.module_info_cache.set_module_info(
specifier,
media_type,
&source_hash,
&module_info,
) {
log::debug!(
"Error saving module cache info for {}. {:#}",
specifier,
err
);
}
Ok(module_info)
}
}
// todo(dsherret): change this to be stored as an integer next time
// the cache version is bumped
fn serialize_media_type(media_type: MediaType) -> &'static str {
use MediaType::*;
match media_type {
JavaScript => "1",
Jsx => "2",
Mjs => "3",
Cjs => "4",
TypeScript => "5",
Mts => "6",
Cts => "7",
Dts => "8",
Dmts => "9",
Dcts => "10",
Tsx => "11",
Json => "12",
Wasm => "13",
TsBuildInfo => "14",
SourceMap => "15",
Unknown => "16",
}
}
#[cfg(test)]
mod test {
use deno_graph::PositionRange;
use deno_graph::SpecifierWithRange;
use super::*;
#[test]
pub fn module_info_cache_general_use() {
let cache = ModuleInfoCache::new_in_memory("1.0.0");
let specifier1 =
ModuleSpecifier::parse("https://localhost/mod.ts").unwrap();
let specifier2 =
ModuleSpecifier::parse("https://localhost/mod2.ts").unwrap();
assert_eq!(
cache
.get_module_info(&specifier1, MediaType::JavaScript, "1")
.unwrap(),
None
);
let mut module_info = ModuleInfo::default();
module_info.jsdoc_imports.push(SpecifierWithRange {
range: PositionRange {
start: deno_graph::Position {
line: 0,
character: 3,
},
end: deno_graph::Position {
line: 1,
character: 2,
},
},
text: "test".to_string(),
});
cache
.set_module_info(&specifier1, MediaType::JavaScript, "1", &module_info)
.unwrap();
assert_eq!(
cache
.get_module_info(&specifier1, MediaType::JavaScript, "1")
.unwrap(),
Some(module_info.clone())
);
assert_eq!(
cache
.get_module_info(&specifier2, MediaType::JavaScript, "1")
.unwrap(),
None,
);
// different media type
assert_eq!(
cache
.get_module_info(&specifier1, MediaType::TypeScript, "1")
.unwrap(),
None,
);
// different source hash
assert_eq!(
cache
.get_module_info(&specifier1, MediaType::JavaScript, "2")
.unwrap(),
None,
);
// try recreating with the same version
let cache = cache.recreate_with_version("1.0.0");
// should get it
assert_eq!(
cache
.get_module_info(&specifier1, MediaType::JavaScript, "1")
.unwrap(),
Some(module_info)
);
// try recreating with a different version
let cache = cache.recreate_with_version("1.0.1");
// should no longer exist
assert_eq!(
cache
.get_module_info(&specifier1, MediaType::JavaScript, "1")
.unwrap(),
None,
);
}
}

View file

@ -6,94 +6,16 @@ use std::sync::Arc;
use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
use deno_ast::ParsedSource;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::serde_json;
use deno_graph::CapturingModuleParser;
use deno_graph::DefaultModuleAnalyzer;
use deno_graph::ModuleInfo;
use deno_graph::ModuleParser;
use deno_runtime::deno_webstorage::rusqlite::params;
use super::cache_db::CacheDB;
use super::cache_db::CacheDBConfiguration;
use super::cache_db::CacheFailure;
use super::FastInsecureHasher;
const SELECT_MODULE_INFO: &str = "
SELECT
module_info
FROM
moduleinfocache
WHERE
specifier=?1
AND media_type=?2
AND source_hash=?3
LIMIT 1";
pub static PARSED_SOURCE_CACHE_DB: CacheDBConfiguration =
CacheDBConfiguration {
table_initializer: "CREATE TABLE IF NOT EXISTS moduleinfocache (
specifier TEXT PRIMARY KEY,
media_type TEXT NOT NULL,
source_hash TEXT NOT NULL,
module_info TEXT NOT NULL
);",
on_version_change: "DELETE FROM moduleinfocache;",
preheat_queries: &[SELECT_MODULE_INFO],
on_failure: CacheFailure::InMemory,
};
#[derive(Clone, Default)]
struct ParsedSourceCacheSources(
Arc<Mutex<HashMap<ModuleSpecifier, ParsedSource>>>,
);
/// It's ok that this is racy since in non-LSP situations
/// this will only ever store one form of a parsed source
/// and in LSP settings the concurrency will be enforced
/// at a higher level to ensure this will have the latest
/// parsed source.
impl deno_graph::ParsedSourceStore for ParsedSourceCacheSources {
fn set_parsed_source(
&self,
specifier: deno_graph::ModuleSpecifier,
parsed_source: ParsedSource,
) -> Option<ParsedSource> {
self.0.lock().insert(specifier, parsed_source)
}
fn get_parsed_source(
&self,
specifier: &deno_graph::ModuleSpecifier,
) -> Option<ParsedSource> {
self.0.lock().get(specifier).cloned()
}
}
/// A cache of `ParsedSource`s, which may be used with `deno_graph`
/// for cached dependency analysis.
#[derive(Default)]
pub struct ParsedSourceCache {
db: CacheDB,
sources: ParsedSourceCacheSources,
sources: Mutex<HashMap<ModuleSpecifier, ParsedSource>>,
}
impl ParsedSourceCache {
#[cfg(test)]
pub fn new_in_memory() -> Self {
Self {
db: CacheDB::in_memory(&PARSED_SOURCE_CACHE_DB, crate::version::deno()),
sources: Default::default(),
}
}
pub fn new(db: CacheDB) -> Self {
Self {
db,
sources: Default::default(),
}
}
pub fn get_parsed_source_from_esm_module(
&self,
module: &deno_graph::EsmModule,
@ -120,251 +42,38 @@ impl ParsedSourceCache {
/// Frees the parsed source from memory.
pub fn free(&self, specifier: &ModuleSpecifier) {
self.sources.0.lock().remove(specifier);
}
pub fn as_analyzer(&self) -> Box<dyn deno_graph::ModuleAnalyzer> {
Box::new(ParsedSourceCacheModuleAnalyzer::new(
self.db.clone(),
self.sources.clone(),
))
self.sources.lock().remove(specifier);
}
/// Creates a parser that will reuse a ParsedSource from the store
/// if it exists, or else parse.
pub fn as_capturing_parser(&self) -> CapturingModuleParser {
CapturingModuleParser::new(None, &self.sources)
CapturingModuleParser::new(None, self)
}
pub fn cache_module_info(
pub fn as_store(self: &Arc<Self>) -> Arc<dyn deno_graph::ParsedSourceStore> {
self.clone()
}
}
/// It's ok that this is racy since in non-LSP situations
/// this will only ever store one form of a parsed source
/// and in LSP settings the concurrency will be enforced
/// at a higher level to ensure this will have the latest
/// parsed source.
impl deno_graph::ParsedSourceStore for ParsedSourceCache {
fn set_parsed_source(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
source: &str,
module_info: &ModuleInfo,
) -> Result<(), AnyError> {
let source_hash = compute_source_hash(source.as_bytes());
ParsedSourceCacheModuleAnalyzer::new(self.db.clone(), self.sources.clone())
.set_module_info(specifier, media_type, &source_hash, module_info)
}
}
struct ParsedSourceCacheModuleAnalyzer {
conn: CacheDB,
sources: ParsedSourceCacheSources,
}
impl ParsedSourceCacheModuleAnalyzer {
pub fn new(conn: CacheDB, sources: ParsedSourceCacheSources) -> Self {
Self { conn, sources }
specifier: deno_graph::ModuleSpecifier,
parsed_source: ParsedSource,
) -> Option<ParsedSource> {
self.sources.lock().insert(specifier, parsed_source)
}
pub fn get_module_info(
fn get_parsed_source(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
expected_source_hash: &str,
) -> Result<Option<ModuleInfo>, AnyError> {
let query = SELECT_MODULE_INFO;
let res = self.conn.query_row(
query,
params![
&specifier.as_str(),
serialize_media_type(media_type),
&expected_source_hash,
],
|row| {
let module_info: String = row.get(0)?;
let module_info = serde_json::from_str(&module_info)?;
Ok(module_info)
},
)?;
Ok(res)
}
pub fn set_module_info(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
source_hash: &str,
module_info: &ModuleInfo,
) -> Result<(), AnyError> {
let sql = "
INSERT OR REPLACE INTO
moduleinfocache (specifier, media_type, source_hash, module_info)
VALUES
(?1, ?2, ?3, ?4)";
self.conn.execute(
sql,
params![
specifier.as_str(),
serialize_media_type(media_type),
&source_hash,
&serde_json::to_string(&module_info)?,
],
)?;
Ok(())
}
}
// todo(dsherret): change this to be stored as an integer next time
// the cache version is bumped
fn serialize_media_type(media_type: MediaType) -> &'static str {
use MediaType::*;
match media_type {
JavaScript => "1",
Jsx => "2",
Mjs => "3",
Cjs => "4",
TypeScript => "5",
Mts => "6",
Cts => "7",
Dts => "8",
Dmts => "9",
Dcts => "10",
Tsx => "11",
Json => "12",
Wasm => "13",
TsBuildInfo => "14",
SourceMap => "15",
Unknown => "16",
}
}
impl deno_graph::ModuleAnalyzer for ParsedSourceCacheModuleAnalyzer {
fn analyze(
&self,
specifier: &ModuleSpecifier,
source: Arc<str>,
media_type: MediaType,
) -> Result<ModuleInfo, deno_ast::Diagnostic> {
// attempt to load from the cache
let source_hash = compute_source_hash(source.as_bytes());
match self.get_module_info(specifier, media_type, &source_hash) {
Ok(Some(info)) => return Ok(info),
Ok(None) => {}
Err(err) => {
log::debug!(
"Error loading module cache info for {}. {:#}",
specifier,
err
);
}
}
// otherwise, get the module info from the parsed source cache
let parser = CapturingModuleParser::new(None, &self.sources);
let analyzer = DefaultModuleAnalyzer::new(&parser);
let module_info = analyzer.analyze(specifier, source, media_type)?;
// then attempt to cache it
if let Err(err) =
self.set_module_info(specifier, media_type, &source_hash, &module_info)
{
log::debug!(
"Error saving module cache info for {}. {:#}",
specifier,
err
);
}
Ok(module_info)
}
}
fn compute_source_hash(bytes: &[u8]) -> String {
FastInsecureHasher::hash(bytes).to_string()
}
#[cfg(test)]
mod test {
use deno_graph::PositionRange;
use deno_graph::SpecifierWithRange;
use super::*;
#[test]
pub fn parsed_source_cache_module_analyzer_general_use() {
let conn = CacheDB::in_memory(&PARSED_SOURCE_CACHE_DB, "1.0.0");
let cache = ParsedSourceCacheModuleAnalyzer::new(conn, Default::default());
let specifier1 =
ModuleSpecifier::parse("https://localhost/mod.ts").unwrap();
let specifier2 =
ModuleSpecifier::parse("https://localhost/mod2.ts").unwrap();
assert_eq!(
cache
.get_module_info(&specifier1, MediaType::JavaScript, "1")
.unwrap(),
None
);
let mut module_info = ModuleInfo::default();
module_info.jsdoc_imports.push(SpecifierWithRange {
range: PositionRange {
start: deno_graph::Position {
line: 0,
character: 3,
},
end: deno_graph::Position {
line: 1,
character: 2,
},
},
text: "test".to_string(),
});
cache
.set_module_info(&specifier1, MediaType::JavaScript, "1", &module_info)
.unwrap();
assert_eq!(
cache
.get_module_info(&specifier1, MediaType::JavaScript, "1")
.unwrap(),
Some(module_info.clone())
);
assert_eq!(
cache
.get_module_info(&specifier2, MediaType::JavaScript, "1")
.unwrap(),
None,
);
// different media type
assert_eq!(
cache
.get_module_info(&specifier1, MediaType::TypeScript, "1")
.unwrap(),
None,
);
// different source hash
assert_eq!(
cache
.get_module_info(&specifier1, MediaType::JavaScript, "2")
.unwrap(),
None,
);
// try recreating with the same version
let conn = cache.conn.recreate_with_version("1.0.0");
let cache = ParsedSourceCacheModuleAnalyzer::new(conn, Default::default());
// should get it
assert_eq!(
cache
.get_module_info(&specifier1, MediaType::JavaScript, "1")
.unwrap(),
Some(module_info)
);
// try recreating with a different version
let conn = cache.conn.recreate_with_version("1.0.1");
let cache = ParsedSourceCacheModuleAnalyzer::new(conn, Default::default());
// should no longer exist
assert_eq!(
cache
.get_module_info(&specifier1, MediaType::JavaScript, "1")
.unwrap(),
None,
);
specifier: &deno_graph::ModuleSpecifier,
) -> Option<ParsedSource> {
self.sources.lock().get(specifier).cloned()
}
}

View file

@ -15,6 +15,7 @@ use crate::cache::EmitCache;
use crate::cache::GlobalHttpCache;
use crate::cache::HttpCache;
use crate::cache::LocalHttpCache;
use crate::cache::ModuleInfoCache;
use crate::cache::NodeAnalysisCache;
use crate::cache::ParsedSourceCache;
use crate::emit::Emitter;
@ -152,6 +153,7 @@ struct CliFactoryServices {
maybe_inspector_server: Deferred<Option<Arc<InspectorServer>>>,
root_cert_store_provider: Deferred<Arc<dyn RootCertStoreProvider>>,
blob_store: Deferred<Arc<BlobStore>>,
module_info_cache: Deferred<Arc<ModuleInfoCache>>,
parsed_source_cache: Deferred<Arc<ParsedSourceCache>>,
resolver: Deferred<Arc<CliGraphResolver>>,
maybe_file_watcher_reporter: Deferred<Option<FileWatcherReporter>>,
@ -413,16 +415,21 @@ impl CliFactory {
})
}
pub fn parsed_source_cache(
&self,
) -> Result<&Arc<ParsedSourceCache>, AnyError> {
self.services.parsed_source_cache.get_or_try_init(|| {
Ok(Arc::new(ParsedSourceCache::new(
pub fn module_info_cache(&self) -> Result<&Arc<ModuleInfoCache>, AnyError> {
self.services.module_info_cache.get_or_try_init(|| {
Ok(Arc::new(ModuleInfoCache::new(
self.caches()?.dep_analysis_db(),
)))
})
}
pub fn parsed_source_cache(&self) -> &Arc<ParsedSourceCache> {
self
.services
.parsed_source_cache
.get_or_init(Default::default)
}
pub fn emitter(&self) -> Result<&Arc<Emitter>, AnyError> {
self.services.emitter.get_or_try_init(|| {
let ts_config_result = self
@ -435,7 +442,7 @@ impl CliFactory {
crate::args::ts_config_to_emit_options(ts_config_result.ts_config);
Ok(Arc::new(Emitter::new(
self.emit_cache()?.clone(),
self.parsed_source_cache()?.clone(),
self.parsed_source_cache().clone(),
emit_options,
)))
})
@ -503,7 +510,8 @@ impl CliFactory {
self.options.clone(),
self.resolver().await?.clone(),
self.npm_resolver().await?.clone(),
self.parsed_source_cache()?.clone(),
self.module_info_cache()?.clone(),
self.parsed_source_cache().clone(),
self.maybe_lockfile().clone(),
self.maybe_file_watcher_reporter().clone(),
self.emit_cache()?.clone(),
@ -547,7 +555,8 @@ impl CliFactory {
self.maybe_lockfile().clone(),
self.maybe_file_watcher_reporter().clone(),
self.module_graph_builder().await?.clone(),
self.parsed_source_cache()?.clone(),
self.module_info_cache()?.clone(),
self.parsed_source_cache().clone(),
self.text_only_progress_bar().clone(),
self.resolver().await?.clone(),
self.type_checker().await?.clone(),
@ -622,7 +631,7 @@ impl CliFactory {
self.emitter()?.clone(),
self.graph_container().clone(),
self.module_load_preparer().await?.clone(),
self.parsed_source_cache()?.clone(),
self.parsed_source_cache().clone(),
self.resolver().await?.clone(),
cli_node_resolver.clone(),
NpmModuleLoader::new(

View file

@ -5,6 +5,7 @@ use crate::args::Lockfile;
use crate::args::TsTypeLib;
use crate::cache;
use crate::cache::GlobalHttpCache;
use crate::cache::ModuleInfoCache;
use crate::cache::ParsedSourceCache;
use crate::colors;
use crate::errors::get_error_class_name;
@ -27,6 +28,7 @@ use deno_graph::source::Loader;
use deno_graph::source::ResolveError;
use deno_graph::GraphKind;
use deno_graph::Module;
use deno_graph::ModuleAnalyzer;
use deno_graph::ModuleError;
use deno_graph::ModuleGraph;
use deno_graph::ModuleGraphError;
@ -182,10 +184,18 @@ pub fn graph_lock_or_exit(graph: &ModuleGraph, lockfile: &mut Lockfile) {
}
}
pub struct CreateGraphOptions<'a> {
pub graph_kind: GraphKind,
pub roots: Vec<ModuleSpecifier>,
pub loader: &'a mut dyn Loader,
pub analyzer: &'a dyn ModuleAnalyzer,
}
pub struct ModuleGraphBuilder {
options: Arc<CliOptions>,
resolver: Arc<CliGraphResolver>,
npm_resolver: Arc<dyn CliNpmResolver>,
module_info_cache: Arc<ModuleInfoCache>,
parsed_source_cache: Arc<ParsedSourceCache>,
lockfile: Option<Arc<Mutex<Lockfile>>>,
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
@ -201,6 +211,7 @@ impl ModuleGraphBuilder {
options: Arc<CliOptions>,
resolver: Arc<CliGraphResolver>,
npm_resolver: Arc<dyn CliNpmResolver>,
module_info_cache: Arc<ModuleInfoCache>,
parsed_source_cache: Arc<ParsedSourceCache>,
lockfile: Option<Arc<Mutex<Lockfile>>>,
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
@ -213,6 +224,7 @@ impl ModuleGraphBuilder {
options,
resolver,
npm_resolver,
module_info_cache,
parsed_source_cache,
lockfile,
maybe_file_watcher_reporter,
@ -223,35 +235,61 @@ impl ModuleGraphBuilder {
}
}
pub async fn create_graph(
&self,
graph_kind: GraphKind,
roots: Vec<ModuleSpecifier>,
) -> Result<deno_graph::ModuleGraph, AnyError> {
let mut cache = self.create_graph_loader();
self
.create_graph_with_loader(graph_kind, roots, &mut cache)
.await
}
pub async fn create_graph_with_loader(
&self,
graph_kind: GraphKind,
roots: Vec<ModuleSpecifier>,
loader: &mut dyn Loader,
) -> Result<deno_graph::ModuleGraph, AnyError> {
let store = self.parsed_source_cache.as_store();
let analyzer = self.module_info_cache.as_module_analyzer(None, &*store);
self
.create_graph_with_options(CreateGraphOptions {
graph_kind,
roots,
loader,
analyzer: &analyzer,
})
.await
}
pub async fn create_graph_with_options(
&self,
options: CreateGraphOptions<'_>,
) -> Result<deno_graph::ModuleGraph, AnyError> {
let maybe_imports = self.options.to_maybe_imports()?;
let cli_resolver = self.resolver.clone();
let graph_resolver = cli_resolver.as_graph_resolver();
let graph_npm_resolver = cli_resolver.as_graph_npm_resolver();
let analyzer = self.parsed_source_cache.as_analyzer();
let maybe_file_watcher_reporter = self
.maybe_file_watcher_reporter
.as_ref()
.map(|r| r.as_reporter());
let mut graph = ModuleGraph::new(graph_kind);
let mut graph = ModuleGraph::new(options.graph_kind);
self
.build_graph_with_npm_resolution(
&mut graph,
roots,
loader,
options.roots,
options.loader,
deno_graph::BuildOptions {
is_dynamic: false,
imports: maybe_imports,
resolver: Some(graph_resolver),
npm_resolver: Some(graph_npm_resolver),
module_analyzer: Some(&*analyzer),
module_analyzer: Some(options.analyzer),
reporter: maybe_file_watcher_reporter,
// todo(dsherret): workspace support
workspace_members: vec![],
@ -277,7 +315,8 @@ impl ModuleGraphBuilder {
let cli_resolver = self.resolver.clone();
let graph_resolver = cli_resolver.as_graph_resolver();
let graph_npm_resolver = cli_resolver.as_graph_npm_resolver();
let analyzer = self.parsed_source_cache.as_analyzer();
let store = self.parsed_source_cache.as_store();
let analyzer = self.module_info_cache.as_module_analyzer(None, &*store);
let graph_kind = self.options.type_check_mode().as_graph_kind();
let mut graph = ModuleGraph::new(graph_kind);
let maybe_file_watcher_reporter = self
@ -295,7 +334,7 @@ impl ModuleGraphBuilder {
imports: maybe_imports,
resolver: Some(graph_resolver),
npm_resolver: Some(graph_npm_resolver),
module_analyzer: Some(&*analyzer),
module_analyzer: Some(&analyzer),
reporter: maybe_file_watcher_reporter,
// todo(dsherret): workspace support
workspace_members: vec![],
@ -436,21 +475,10 @@ impl ModuleGraphBuilder {
self.options.resolve_file_header_overrides(),
self.global_http_cache.clone(),
self.npm_resolver.clone(),
self.parsed_source_cache.clone(),
self.module_info_cache.clone(),
permissions,
)
}
pub async fn create_graph(
&self,
graph_kind: GraphKind,
roots: Vec<ModuleSpecifier>,
) -> Result<deno_graph::ModuleGraph, AnyError> {
let mut cache = self.create_graph_loader();
self
.create_graph_with_loader(graph_kind, roots, &mut cache)
.await
}
}
pub fn error_for_any_npm_specifier(

View file

@ -3,6 +3,7 @@
use crate::args::CliOptions;
use crate::args::DenoSubcommand;
use crate::args::TsTypeLib;
use crate::cache::ModuleInfoCache;
use crate::cache::ParsedSourceCache;
use crate::emit::Emitter;
use crate::graph_util::graph_lock_or_exit;
@ -66,6 +67,7 @@ pub struct ModuleLoadPreparer {
lockfile: Option<Arc<Mutex<Lockfile>>>,
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
module_graph_builder: Arc<ModuleGraphBuilder>,
module_info_cache: Arc<ModuleInfoCache>,
parsed_source_cache: Arc<ParsedSourceCache>,
progress_bar: ProgressBar,
resolver: Arc<CliGraphResolver>,
@ -80,6 +82,7 @@ impl ModuleLoadPreparer {
lockfile: Option<Arc<Mutex<Lockfile>>>,
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
module_graph_builder: Arc<ModuleGraphBuilder>,
module_info_cache: Arc<ModuleInfoCache>,
parsed_source_cache: Arc<ParsedSourceCache>,
progress_bar: ProgressBar,
resolver: Arc<CliGraphResolver>,
@ -91,6 +94,7 @@ impl ModuleLoadPreparer {
lockfile,
maybe_file_watcher_reporter,
module_graph_builder,
module_info_cache,
parsed_source_cache,
progress_bar,
resolver,
@ -122,7 +126,8 @@ impl ModuleLoadPreparer {
.as_ref()
.map(|r| r.as_reporter());
let analyzer = self.parsed_source_cache.as_analyzer();
let store = self.parsed_source_cache.as_store();
let analyzer = self.module_info_cache.as_module_analyzer(None, &*store);
log::debug!("Creating module graph.");
let mut graph_update_permit =
@ -145,7 +150,7 @@ impl ModuleLoadPreparer {
imports: maybe_imports,
resolver: Some(graph_resolver),
npm_resolver: Some(graph_npm_resolver),
module_analyzer: Some(&*analyzer),
module_analyzer: Some(&analyzer),
reporter: maybe_file_watcher_reporter,
// todo(dsherret): workspace support
workspace_members: vec![],

View file

@ -25,7 +25,7 @@ pub async fn compile(
let factory = CliFactory::from_flags(flags).await?;
let cli_options = factory.cli_options();
let module_graph_builder = factory.module_graph_builder().await?;
let parsed_source_cache = factory.parsed_source_cache()?;
let parsed_source_cache = factory.parsed_source_cache();
let binary_writer = factory.create_compile_binary_writer().await?;
let module_specifier = cli_options.resolve_main_module()?;
let module_roots = {

View file

@ -65,7 +65,7 @@ pub async fn print_docs(
let file_fetcher = factory.file_fetcher()?;
let module_graph_builder = factory.module_graph_builder().await?;
let maybe_lockfile = factory.maybe_lockfile();
let parsed_source_cache = factory.parsed_source_cache()?;
let parsed_source_cache = factory.parsed_source_cache();
let module_specifier =
resolve_url_or_path(&source_file, cli_options.initial_cwd())?;

View file

@ -62,7 +62,7 @@ pub async fn vendor(
}
.boxed_local()
},
parsed_source_cache: factory.parsed_source_cache()?,
parsed_source_cache: factory.parsed_source_cache(),
output_dir: &output_dir,
maybe_original_import_map: factory.maybe_import_map().await?.as_deref(),
maybe_lockfile: factory.maybe_lockfile().clone(),

View file

@ -17,6 +17,7 @@ use deno_core::serde_json;
use deno_graph::source::LoadFuture;
use deno_graph::source::LoadResponse;
use deno_graph::source::Loader;
use deno_graph::DefaultModuleAnalyzer;
use deno_graph::GraphKind;
use deno_graph::ModuleGraph;
use deno_runtime::deno_fs::RealFs;
@ -234,8 +235,7 @@ impl VendorTestBuilder {
let output_dir = make_path("/vendor");
let entry_points = self.entry_points.clone();
let loader = self.loader.clone();
let parsed_source_cache = ParsedSourceCache::new_in_memory();
let analyzer = parsed_source_cache.as_analyzer();
let parsed_source_cache = ParsedSourceCache::default();
let resolver = Arc::new(build_resolver(
self.jsx_import_source_config.clone(),
self.original_import_map.clone(),
@ -246,12 +246,13 @@ impl VendorTestBuilder {
let resolver = resolver.clone();
move |entry_points| {
async move {
let analyzer = DefaultModuleAnalyzer::default();
Ok(
build_test_graph(
entry_points,
loader,
resolver.as_graph_resolver(),
&*analyzer,
&analyzer,
)
.await,
)