mirror of
https://github.com/denoland/deno.git
synced 2024-11-21 15:04:11 -05:00
refactor: break up ProcState
(#18707)
1. Breaks up functionality within `ProcState` into several other structs to break out the responsibilities (`ProcState` is only a data struct now). 2. Moves towards being able to inject dependencies more easily and have functionality only require what it needs. 3. Exposes `Arc<T>` around the "service structs" instead of it being embedded within them. The idea behind embedding them was to reduce the verbosity of needing to pass around `Arc<...>`, but I don't think it was exactly working and as we move more of these structs to be more injectable I don't think the extra verbosity will be a big deal.
This commit is contained in:
parent
a411144219
commit
136dce67ce
33 changed files with 1506 additions and 1285 deletions
15
cli/cache/caches.rs
vendored
15
cli/cache/caches.rs
vendored
|
@ -1,7 +1,6 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use once_cell::sync::OnceCell;
|
||||
|
||||
|
@ -13,18 +12,18 @@ use super::node::NODE_ANALYSIS_CACHE_DB;
|
|||
use super::parsed_source::PARSED_SOURCE_CACHE_DB;
|
||||
use super::DenoDir;
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
#[derive(Default)]
|
||||
pub struct Caches {
|
||||
fmt_incremental_cache_db: Arc<OnceCell<CacheDB>>,
|
||||
lint_incremental_cache_db: Arc<OnceCell<CacheDB>>,
|
||||
dep_analysis_db: Arc<OnceCell<CacheDB>>,
|
||||
node_analysis_db: Arc<OnceCell<CacheDB>>,
|
||||
type_checking_cache_db: Arc<OnceCell<CacheDB>>,
|
||||
fmt_incremental_cache_db: OnceCell<CacheDB>,
|
||||
lint_incremental_cache_db: OnceCell<CacheDB>,
|
||||
dep_analysis_db: OnceCell<CacheDB>,
|
||||
node_analysis_db: OnceCell<CacheDB>,
|
||||
type_checking_cache_db: OnceCell<CacheDB>,
|
||||
}
|
||||
|
||||
impl Caches {
|
||||
fn make_db(
|
||||
cell: &Arc<OnceCell<CacheDB>>,
|
||||
cell: &OnceCell<CacheDB>,
|
||||
config: &'static CacheDBConfiguration,
|
||||
path: PathBuf,
|
||||
) -> CacheDB {
|
||||
|
|
8
cli/cache/parsed_source.rs
vendored
8
cli/cache/parsed_source.rs
vendored
|
@ -73,7 +73,6 @@ impl deno_graph::ParsedSourceStore for ParsedSourceCacheSources {
|
|||
|
||||
/// A cache of `ParsedSource`s, which may be used with `deno_graph`
|
||||
/// for cached dependency analysis.
|
||||
#[derive(Clone)]
|
||||
pub struct ParsedSourceCache {
|
||||
db: CacheDB,
|
||||
sources: ParsedSourceCacheSources,
|
||||
|
@ -95,11 +94,8 @@ impl ParsedSourceCache {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn reset_for_file_watcher(&self) -> Self {
|
||||
Self {
|
||||
db: self.db.clone(),
|
||||
sources: Default::default(),
|
||||
}
|
||||
pub fn clear(&self) {
|
||||
self.sources.0.lock().clear();
|
||||
}
|
||||
|
||||
pub fn get_parsed_source_from_esm_module(
|
||||
|
|
17
cli/emit.rs
17
cli/emit.rs
|
@ -12,10 +12,9 @@ use deno_graph::Module;
|
|||
use deno_graph::ModuleGraph;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Emitter {
|
||||
emit_cache: EmitCache,
|
||||
parsed_source_cache: ParsedSourceCache,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
emit_options: deno_ast::EmitOptions,
|
||||
// cached hash of the emit options
|
||||
emit_options_hash: u64,
|
||||
|
@ -24,7 +23,7 @@ pub struct Emitter {
|
|||
impl Emitter {
|
||||
pub fn new(
|
||||
emit_cache: EmitCache,
|
||||
parsed_source_cache: ParsedSourceCache,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
emit_options: deno_ast::EmitOptions,
|
||||
) -> Self {
|
||||
let emit_options_hash = FastInsecureHasher::new()
|
||||
|
@ -64,6 +63,16 @@ impl Emitter {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Gets a cached emit if the source matches the hash found in the cache.
|
||||
pub fn maybed_cached_emit(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
source: &str,
|
||||
) -> Option<String> {
|
||||
let source_hash = self.get_source_hash(source);
|
||||
self.emit_cache.get_emit_code(specifier, source_hash)
|
||||
}
|
||||
|
||||
pub fn emit_parsed_source(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
|
@ -97,7 +106,7 @@ impl Emitter {
|
|||
/// A hashing function that takes the source code and uses the global emit
|
||||
/// options then generates a string hash which can be stored to
|
||||
/// determine if the cached emit is valid or not.
|
||||
pub fn get_source_hash(&self, source_text: &str) -> u64 {
|
||||
fn get_source_hash(&self, source_text: &str) -> u64 {
|
||||
FastInsecureHasher::new()
|
||||
.write_str(source_text)
|
||||
.write_u64(self.emit_options_hash)
|
||||
|
|
|
@ -6,21 +6,25 @@ use crate::args::TsConfigType;
|
|||
use crate::args::TsTypeLib;
|
||||
use crate::args::TypeCheckMode;
|
||||
use crate::cache;
|
||||
use crate::cache::DenoDir;
|
||||
use crate::cache::ParsedSourceCache;
|
||||
use crate::cache::TypeCheckCache;
|
||||
use crate::colors;
|
||||
use crate::errors::get_error_class_name;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::npm::NpmPackageResolver;
|
||||
use crate::proc_state::ProcState;
|
||||
use crate::resolver::CliGraphResolver;
|
||||
use crate::tools::check;
|
||||
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::error::custom_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::parking_lot::RwLock;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_core::TaskQueue;
|
||||
use deno_core::TaskQueuePermit;
|
||||
use deno_graph::source::Loader;
|
||||
use deno_graph::Module;
|
||||
use deno_graph::ModuleError;
|
||||
use deno_graph::ModuleGraph;
|
||||
|
@ -160,115 +164,215 @@ pub fn graph_lock_or_exit(graph: &ModuleGraph, lockfile: &mut Lockfile) {
|
|||
}
|
||||
}
|
||||
|
||||
pub async fn create_graph_and_maybe_check(
|
||||
roots: Vec<ModuleSpecifier>,
|
||||
ps: &ProcState,
|
||||
) -> Result<Arc<deno_graph::ModuleGraph>, AnyError> {
|
||||
let mut cache = cache::FetchCacher::new(
|
||||
ps.emit_cache.clone(),
|
||||
ps.file_fetcher.clone(),
|
||||
ps.options.resolve_file_header_overrides(),
|
||||
PermissionsContainer::allow_all(),
|
||||
PermissionsContainer::allow_all(),
|
||||
ps.options.node_modules_dir_specifier(),
|
||||
);
|
||||
let maybe_imports = ps.options.to_maybe_imports()?;
|
||||
let cli_resolver = CliGraphResolver::new(
|
||||
ps.options.to_maybe_jsx_import_source_config(),
|
||||
ps.maybe_import_map.clone(),
|
||||
ps.options.no_npm(),
|
||||
ps.npm_api.clone(),
|
||||
ps.npm_resolution.clone(),
|
||||
ps.package_json_deps_installer.clone(),
|
||||
);
|
||||
let graph_resolver = cli_resolver.as_graph_resolver();
|
||||
let graph_npm_resolver = cli_resolver.as_graph_npm_resolver();
|
||||
let analyzer = ps.parsed_source_cache.as_analyzer();
|
||||
let mut graph = ModuleGraph::default();
|
||||
build_graph_with_npm_resolution(
|
||||
&mut graph,
|
||||
&cli_resolver,
|
||||
&ps.npm_resolver,
|
||||
roots,
|
||||
&mut cache,
|
||||
deno_graph::BuildOptions {
|
||||
is_dynamic: false,
|
||||
imports: maybe_imports,
|
||||
resolver: Some(graph_resolver),
|
||||
npm_resolver: Some(graph_npm_resolver),
|
||||
module_analyzer: Some(&*analyzer),
|
||||
reporter: None,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
pub struct ModuleGraphBuilder {
|
||||
options: Arc<CliOptions>,
|
||||
resolver: Arc<CliGraphResolver>,
|
||||
npm_resolver: Arc<NpmPackageResolver>,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
caches: Arc<cache::Caches>,
|
||||
emit_cache: cache::EmitCache,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
deno_dir: DenoDir,
|
||||
}
|
||||
|
||||
graph_valid_with_cli_options(&graph, &graph.roots, &ps.options)?;
|
||||
let graph = Arc::new(graph);
|
||||
if let Some(lockfile) = &ps.lockfile {
|
||||
graph_lock_or_exit(&graph, &mut lockfile.lock());
|
||||
impl ModuleGraphBuilder {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
options: Arc<CliOptions>,
|
||||
resolver: Arc<CliGraphResolver>,
|
||||
npm_resolver: Arc<NpmPackageResolver>,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
caches: Arc<cache::Caches>,
|
||||
emit_cache: cache::EmitCache,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
deno_dir: DenoDir,
|
||||
) -> Self {
|
||||
Self {
|
||||
options,
|
||||
resolver,
|
||||
npm_resolver,
|
||||
parsed_source_cache,
|
||||
lockfile,
|
||||
caches,
|
||||
emit_cache,
|
||||
file_fetcher,
|
||||
deno_dir,
|
||||
}
|
||||
}
|
||||
|
||||
if ps.options.type_check_mode() != TypeCheckMode::None {
|
||||
// node built-in specifiers use the @types/node package to determine
|
||||
// types, so inject that now after the lockfile has been written
|
||||
if graph.has_node_specifier {
|
||||
ps.npm_resolver
|
||||
pub async fn create_graph_with_loader(
|
||||
&self,
|
||||
roots: Vec<ModuleSpecifier>,
|
||||
loader: &mut dyn Loader,
|
||||
) -> Result<deno_graph::ModuleGraph, AnyError> {
|
||||
let maybe_imports = self.options.to_maybe_imports()?;
|
||||
|
||||
let cli_resolver = self.resolver.clone();
|
||||
let graph_resolver = cli_resolver.as_graph_resolver();
|
||||
let graph_npm_resolver = cli_resolver.as_graph_npm_resolver();
|
||||
let analyzer = self.parsed_source_cache.as_analyzer();
|
||||
|
||||
let mut graph = ModuleGraph::default();
|
||||
self
|
||||
.build_graph_with_npm_resolution(
|
||||
&mut graph,
|
||||
roots,
|
||||
loader,
|
||||
deno_graph::BuildOptions {
|
||||
is_dynamic: false,
|
||||
imports: maybe_imports,
|
||||
resolver: Some(graph_resolver),
|
||||
npm_resolver: Some(graph_npm_resolver),
|
||||
module_analyzer: Some(&*analyzer),
|
||||
reporter: None,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
if graph.has_node_specifier
|
||||
&& self.options.type_check_mode() != TypeCheckMode::None
|
||||
{
|
||||
self
|
||||
.npm_resolver
|
||||
.inject_synthetic_types_node_package()
|
||||
.await?;
|
||||
}
|
||||
|
||||
let ts_config_result =
|
||||
ps.options.resolve_ts_config_for_emit(TsConfigType::Check {
|
||||
lib: ps.options.ts_type_lib_window(),
|
||||
})?;
|
||||
if let Some(ignored_options) = ts_config_result.maybe_ignored_options {
|
||||
log::warn!("{}", ignored_options);
|
||||
}
|
||||
let maybe_config_specifier = ps.options.maybe_config_file_specifier();
|
||||
let cache = TypeCheckCache::new(ps.caches.type_checking_cache_db(&ps.dir));
|
||||
let check_result = check::check(
|
||||
graph.clone(),
|
||||
&cache,
|
||||
&ps.npm_resolver,
|
||||
check::CheckOptions {
|
||||
type_check_mode: ps.options.type_check_mode(),
|
||||
debug: ps.options.log_level() == Some(log::Level::Debug),
|
||||
maybe_config_specifier,
|
||||
ts_config: ts_config_result.ts_config,
|
||||
log_checks: true,
|
||||
reload: ps.options.reload_flag(),
|
||||
},
|
||||
)?;
|
||||
log::debug!("{}", check_result.stats);
|
||||
if !check_result.diagnostics.is_empty() {
|
||||
return Err(check_result.diagnostics.into());
|
||||
}
|
||||
Ok(graph)
|
||||
}
|
||||
|
||||
Ok(graph)
|
||||
}
|
||||
pub async fn create_graph_and_maybe_check(
|
||||
&self,
|
||||
roots: Vec<ModuleSpecifier>,
|
||||
) -> Result<Arc<deno_graph::ModuleGraph>, AnyError> {
|
||||
let mut cache = self.create_graph_loader();
|
||||
let maybe_imports = self.options.to_maybe_imports()?;
|
||||
let cli_resolver = self.resolver.clone();
|
||||
let graph_resolver = cli_resolver.as_graph_resolver();
|
||||
let graph_npm_resolver = cli_resolver.as_graph_npm_resolver();
|
||||
let analyzer = self.parsed_source_cache.as_analyzer();
|
||||
let mut graph = ModuleGraph::default();
|
||||
self
|
||||
.build_graph_with_npm_resolution(
|
||||
&mut graph,
|
||||
roots,
|
||||
&mut cache,
|
||||
deno_graph::BuildOptions {
|
||||
is_dynamic: false,
|
||||
imports: maybe_imports,
|
||||
resolver: Some(graph_resolver),
|
||||
npm_resolver: Some(graph_npm_resolver),
|
||||
module_analyzer: Some(&*analyzer),
|
||||
reporter: None,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
pub async fn build_graph_with_npm_resolution<'a>(
|
||||
graph: &mut ModuleGraph,
|
||||
cli_graph_resolver: &CliGraphResolver,
|
||||
npm_resolver: &NpmPackageResolver,
|
||||
roots: Vec<ModuleSpecifier>,
|
||||
loader: &mut dyn deno_graph::source::Loader,
|
||||
options: deno_graph::BuildOptions<'a>,
|
||||
) -> Result<(), AnyError> {
|
||||
graph.build(roots, loader, options).await;
|
||||
graph_valid_with_cli_options(&graph, &graph.roots, &self.options)?;
|
||||
let graph = Arc::new(graph);
|
||||
if let Some(lockfile) = &self.lockfile {
|
||||
graph_lock_or_exit(&graph, &mut lockfile.lock());
|
||||
}
|
||||
|
||||
// ensure that the top level package.json is installed if a
|
||||
// specifier was matched in the package.json
|
||||
cli_graph_resolver
|
||||
.top_level_package_json_install_if_necessary()
|
||||
.await?;
|
||||
if self.options.type_check_mode() != TypeCheckMode::None {
|
||||
// node built-in specifiers use the @types/node package to determine
|
||||
// types, so inject that now after the lockfile has been written
|
||||
if graph.has_node_specifier {
|
||||
self
|
||||
.npm_resolver
|
||||
.inject_synthetic_types_node_package()
|
||||
.await?;
|
||||
}
|
||||
|
||||
// resolve the dependencies of any pending dependencies
|
||||
// that were inserted by building the graph
|
||||
npm_resolver.resolve_pending().await?;
|
||||
let ts_config_result =
|
||||
self
|
||||
.options
|
||||
.resolve_ts_config_for_emit(TsConfigType::Check {
|
||||
lib: self.options.ts_type_lib_window(),
|
||||
})?;
|
||||
if let Some(ignored_options) = ts_config_result.maybe_ignored_options {
|
||||
log::warn!("{}", ignored_options);
|
||||
}
|
||||
let maybe_config_specifier = self.options.maybe_config_file_specifier();
|
||||
let cache =
|
||||
TypeCheckCache::new(self.caches.type_checking_cache_db(&self.deno_dir));
|
||||
let check_result = check::check(
|
||||
graph.clone(),
|
||||
&cache,
|
||||
self.npm_resolver.clone(),
|
||||
check::CheckOptions {
|
||||
type_check_mode: self.options.type_check_mode(),
|
||||
debug: self.options.log_level() == Some(log::Level::Debug),
|
||||
maybe_config_specifier,
|
||||
ts_config: ts_config_result.ts_config,
|
||||
log_checks: true,
|
||||
reload: self.options.reload_flag(),
|
||||
},
|
||||
)?;
|
||||
log::debug!("{}", check_result.stats);
|
||||
if !check_result.diagnostics.is_empty() {
|
||||
return Err(check_result.diagnostics.into());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
Ok(graph)
|
||||
}
|
||||
|
||||
pub async fn build_graph_with_npm_resolution<'a>(
|
||||
&self,
|
||||
graph: &mut ModuleGraph,
|
||||
roots: Vec<ModuleSpecifier>,
|
||||
loader: &mut dyn deno_graph::source::Loader,
|
||||
options: deno_graph::BuildOptions<'a>,
|
||||
) -> Result<(), AnyError> {
|
||||
graph.build(roots, loader, options).await;
|
||||
|
||||
// ensure that the top level package.json is installed if a
|
||||
// specifier was matched in the package.json
|
||||
self
|
||||
.resolver
|
||||
.top_level_package_json_install_if_necessary()
|
||||
.await?;
|
||||
|
||||
// resolve the dependencies of any pending dependencies
|
||||
// that were inserted by building the graph
|
||||
self.npm_resolver.resolve_pending().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Creates the default loader used for creating a graph.
|
||||
pub fn create_graph_loader(&self) -> cache::FetchCacher {
|
||||
self.create_fetch_cacher(
|
||||
PermissionsContainer::allow_all(),
|
||||
PermissionsContainer::allow_all(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn create_fetch_cacher(
|
||||
&self,
|
||||
root_permissions: PermissionsContainer,
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
) -> cache::FetchCacher {
|
||||
cache::FetchCacher::new(
|
||||
self.emit_cache.clone(),
|
||||
self.file_fetcher.clone(),
|
||||
self.options.resolve_file_header_overrides(),
|
||||
root_permissions,
|
||||
dynamic_permissions,
|
||||
self.options.node_modules_dir_specifier(),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn create_graph(
|
||||
&self,
|
||||
roots: Vec<ModuleSpecifier>,
|
||||
) -> Result<deno_graph::ModuleGraph, AnyError> {
|
||||
let mut cache = self.create_graph_loader();
|
||||
self.create_graph_with_loader(roots, &mut cache).await
|
||||
}
|
||||
}
|
||||
|
||||
pub fn error_for_any_npm_specifier(
|
||||
|
@ -338,7 +442,7 @@ struct GraphData {
|
|||
}
|
||||
|
||||
/// Holds the `ModuleGraph` and what parts of it are type checked.
|
||||
#[derive(Clone, Default)]
|
||||
#[derive(Default)]
|
||||
pub struct ModuleGraphContainer {
|
||||
// Allow only one request to update the graph data at a time,
|
||||
// but allow other requests to read from it at any time even
|
||||
|
@ -348,6 +452,10 @@ pub struct ModuleGraphContainer {
|
|||
}
|
||||
|
||||
impl ModuleGraphContainer {
|
||||
pub fn clear(&self) {
|
||||
self.graph_data.write().graph = Default::default();
|
||||
}
|
||||
|
||||
/// Acquires a permit to modify the module graph without other code
|
||||
/// having the chance to modify it. In the meantime, other code may
|
||||
/// still read from the existing module graph.
|
||||
|
|
|
@ -828,7 +828,7 @@ pub struct Documents {
|
|||
imports: Arc<IndexMap<ModuleSpecifier, GraphImport>>,
|
||||
/// A resolver that takes into account currently loaded import map and JSX
|
||||
/// settings.
|
||||
resolver: CliGraphResolver,
|
||||
resolver: Arc<CliGraphResolver>,
|
||||
/// The npm package requirements found in npm specifiers.
|
||||
npm_specifier_reqs: Arc<Vec<NpmPackageReq>>,
|
||||
/// Gets if any document had a node: specifier such that a @types/node package
|
||||
|
@ -849,7 +849,7 @@ impl Documents {
|
|||
lsp_client_kind,
|
||||
resolver_config_hash: 0,
|
||||
imports: Default::default(),
|
||||
resolver: CliGraphResolver::default(),
|
||||
resolver: Default::default(),
|
||||
npm_specifier_reqs: Default::default(),
|
||||
has_injected_types_node_package: false,
|
||||
specifier_resolver: Arc::new(SpecifierResolver::new(location)),
|
||||
|
@ -1057,7 +1057,7 @@ impl Documents {
|
|||
&self,
|
||||
specifiers: Vec<String>,
|
||||
referrer_doc: &AssetOrDocument,
|
||||
maybe_npm_resolver: Option<&NpmPackageResolver>,
|
||||
maybe_npm_resolver: Option<&Arc<NpmPackageResolver>>,
|
||||
) -> Vec<Option<(ModuleSpecifier, MediaType)>> {
|
||||
let referrer = referrer_doc.specifier();
|
||||
let dependencies = match referrer_doc {
|
||||
|
@ -1074,7 +1074,7 @@ impl Documents {
|
|||
&specifier,
|
||||
referrer,
|
||||
NodeResolutionMode::Types,
|
||||
npm_resolver,
|
||||
&npm_resolver.as_require_npm_resolver(),
|
||||
&mut PermissionsContainer::allow_all(),
|
||||
)
|
||||
.ok()
|
||||
|
@ -1166,8 +1166,8 @@ impl Documents {
|
|||
maybe_import_map: Option<Arc<import_map::ImportMap>>,
|
||||
maybe_config_file: Option<&ConfigFile>,
|
||||
maybe_package_json: Option<&PackageJson>,
|
||||
npm_registry_api: CliNpmRegistryApi,
|
||||
npm_resolution: NpmResolution,
|
||||
npm_registry_api: Arc<CliNpmRegistryApi>,
|
||||
npm_resolution: Arc<NpmResolution>,
|
||||
) {
|
||||
fn calculate_resolver_config_hash(
|
||||
enabled_urls: &[Url],
|
||||
|
@ -1218,19 +1218,19 @@ impl Documents {
|
|||
maybe_jsx_config.as_ref(),
|
||||
maybe_package_json_deps.as_ref(),
|
||||
);
|
||||
let deps_installer = PackageJsonDepsInstaller::new(
|
||||
let deps_installer = Arc::new(PackageJsonDepsInstaller::new(
|
||||
npm_registry_api.clone(),
|
||||
npm_resolution.clone(),
|
||||
maybe_package_json_deps,
|
||||
);
|
||||
self.resolver = CliGraphResolver::new(
|
||||
));
|
||||
self.resolver = Arc::new(CliGraphResolver::new(
|
||||
maybe_jsx_config,
|
||||
maybe_import_map,
|
||||
false,
|
||||
npm_registry_api,
|
||||
npm_resolution,
|
||||
deps_installer,
|
||||
);
|
||||
));
|
||||
self.imports = Arc::new(
|
||||
if let Some(Ok(imports)) =
|
||||
maybe_config_file.map(|cf| cf.to_maybe_imports())
|
||||
|
@ -1418,7 +1418,7 @@ impl Documents {
|
|||
fn resolve_dependency(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
maybe_npm_resolver: Option<&NpmPackageResolver>,
|
||||
maybe_npm_resolver: Option<&Arc<NpmPackageResolver>>,
|
||||
) -> Option<(ModuleSpecifier, MediaType)> {
|
||||
if let Ok(npm_ref) = NpmPackageReqReference::from_specifier(specifier) {
|
||||
return node_resolve_npm_req_ref(npm_ref, maybe_npm_resolver);
|
||||
|
@ -1453,7 +1453,7 @@ impl Documents {
|
|||
|
||||
fn node_resolve_npm_req_ref(
|
||||
npm_req_ref: NpmPackageReqReference,
|
||||
maybe_npm_resolver: Option<&NpmPackageResolver>,
|
||||
maybe_npm_resolver: Option<&Arc<NpmPackageResolver>>,
|
||||
) -> Option<(ModuleSpecifier, MediaType)> {
|
||||
maybe_npm_resolver.map(|npm_resolver| {
|
||||
NodeResolution::into_specifier_and_media_type(
|
||||
|
@ -1864,9 +1864,12 @@ console.log(b, "hello deno");
|
|||
|
||||
#[test]
|
||||
fn test_documents_refresh_dependencies_config_change() {
|
||||
let npm_registry_api = CliNpmRegistryApi::new_uninitialized();
|
||||
let npm_resolution =
|
||||
NpmResolution::from_serialized(npm_registry_api.clone(), None, None);
|
||||
let npm_registry_api = Arc::new(CliNpmRegistryApi::new_uninitialized());
|
||||
let npm_resolution = Arc::new(NpmResolution::from_serialized(
|
||||
npm_registry_api.clone(),
|
||||
None,
|
||||
None,
|
||||
));
|
||||
|
||||
// it should never happen that a user of this API causes this to happen,
|
||||
// but we'll guard against it anyway
|
||||
|
|
|
@ -101,7 +101,7 @@ pub struct StateSnapshot {
|
|||
pub cache_metadata: cache::CacheMetadata,
|
||||
pub documents: Documents,
|
||||
pub maybe_import_map: Option<Arc<ImportMap>>,
|
||||
pub maybe_npm_resolver: Option<NpmPackageResolver>,
|
||||
pub maybe_npm_resolver: Option<Arc<NpmPackageResolver>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
@ -145,13 +145,13 @@ pub struct Inner {
|
|||
/// A lazily create "server" for handling test run requests.
|
||||
maybe_testing_server: Option<testing::TestServer>,
|
||||
/// Npm's registry api.
|
||||
npm_api: CliNpmRegistryApi,
|
||||
npm_api: Arc<CliNpmRegistryApi>,
|
||||
/// Npm cache
|
||||
npm_cache: NpmCache,
|
||||
npm_cache: Arc<NpmCache>,
|
||||
/// Npm resolution that is stored in memory.
|
||||
npm_resolution: NpmResolution,
|
||||
npm_resolution: Arc<NpmResolution>,
|
||||
/// Resolver for npm packages.
|
||||
npm_resolver: NpmPackageResolver,
|
||||
npm_resolver: Arc<NpmPackageResolver>,
|
||||
/// A collection of measurements which instrument that performance of the LSP.
|
||||
performance: Arc<Performance>,
|
||||
/// A memoized version of fixable diagnostic codes retrieved from TypeScript.
|
||||
|
@ -182,13 +182,15 @@ impl LanguageServer {
|
|||
.into_iter()
|
||||
.map(|d| (d.specifier().clone(), d))
|
||||
.collect::<HashMap<_, _>>();
|
||||
// todo(dsherret): don't use ProcState here
|
||||
let ps = ProcState::from_cli_options(Arc::new(cli_options)).await?;
|
||||
let mut inner_loader = ps.create_graph_loader();
|
||||
let mut inner_loader = ps.module_graph_builder.create_graph_loader();
|
||||
let mut loader = crate::lsp::documents::OpenDocumentsGraphLoader {
|
||||
inner_loader: &mut inner_loader,
|
||||
open_docs: &open_docs,
|
||||
};
|
||||
let graph = ps
|
||||
.module_graph_builder
|
||||
.create_graph_with_loader(roots.clone(), &mut loader)
|
||||
.await?;
|
||||
graph_util::graph_valid(
|
||||
|
@ -418,14 +420,14 @@ fn create_lsp_structs(
|
|||
dir: &DenoDir,
|
||||
http_client: HttpClient,
|
||||
) -> (
|
||||
CliNpmRegistryApi,
|
||||
NpmCache,
|
||||
NpmPackageResolver,
|
||||
NpmResolution,
|
||||
Arc<CliNpmRegistryApi>,
|
||||
Arc<NpmCache>,
|
||||
Arc<NpmPackageResolver>,
|
||||
Arc<NpmResolution>,
|
||||
) {
|
||||
let registry_url = CliNpmRegistryApi::default_url();
|
||||
let progress_bar = ProgressBar::new(ProgressBarStyle::TextOnly);
|
||||
let npm_cache = NpmCache::from_deno_dir(
|
||||
let npm_cache = Arc::new(NpmCache::from_deno_dir(
|
||||
dir,
|
||||
// Use an "only" cache setting in order to make the
|
||||
// user do an explicit "cache" command and prevent
|
||||
|
@ -434,14 +436,15 @@ fn create_lsp_structs(
|
|||
CacheSetting::Only,
|
||||
http_client.clone(),
|
||||
progress_bar.clone(),
|
||||
);
|
||||
let api = CliNpmRegistryApi::new(
|
||||
));
|
||||
let api = Arc::new(CliNpmRegistryApi::new(
|
||||
registry_url.clone(),
|
||||
npm_cache.clone(),
|
||||
http_client,
|
||||
progress_bar.clone(),
|
||||
);
|
||||
let resolution = NpmResolution::from_serialized(api.clone(), None, None);
|
||||
));
|
||||
let resolution =
|
||||
Arc::new(NpmResolution::from_serialized(api.clone(), None, None));
|
||||
let fs_resolver = create_npm_fs_resolver(
|
||||
npm_cache.clone(),
|
||||
&progress_bar,
|
||||
|
@ -452,7 +455,11 @@ fn create_lsp_structs(
|
|||
(
|
||||
api,
|
||||
npm_cache,
|
||||
NpmPackageResolver::new(resolution.clone(), fs_resolver, None),
|
||||
Arc::new(NpmPackageResolver::new(
|
||||
resolution.clone(),
|
||||
fs_resolver,
|
||||
None,
|
||||
)),
|
||||
resolution,
|
||||
)
|
||||
}
|
||||
|
@ -695,12 +702,12 @@ impl Inner {
|
|||
maybe_import_map: self.maybe_import_map.clone(),
|
||||
maybe_npm_resolver: Some({
|
||||
// create a new snapshotted npm resolution and resolver
|
||||
let resolution = NpmResolution::new(
|
||||
let resolution = Arc::new(NpmResolution::new(
|
||||
self.npm_api.clone(),
|
||||
self.npm_resolution.snapshot(),
|
||||
None,
|
||||
);
|
||||
NpmPackageResolver::new(
|
||||
));
|
||||
Arc::new(NpmPackageResolver::new(
|
||||
resolution.clone(),
|
||||
create_npm_fs_resolver(
|
||||
self.npm_cache.clone(),
|
||||
|
@ -710,7 +717,7 @@ impl Inner {
|
|||
None,
|
||||
),
|
||||
None,
|
||||
)
|
||||
))
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
@ -1130,7 +1137,6 @@ impl Inner {
|
|||
self.client.show_message(MessageType::WARNING, err);
|
||||
}
|
||||
|
||||
// self.refresh_documents_config(); // todo(THIS PR): REMOVE
|
||||
self.assets.intitialize(self.snapshot()).await;
|
||||
|
||||
self.performance.measure(mark);
|
||||
|
|
10
cli/main.rs
10
cli/main.rs
|
@ -70,13 +70,17 @@ async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> {
|
|||
}
|
||||
DenoSubcommand::Cache(cache_flags) => {
|
||||
let ps = ProcState::from_flags(flags).await?;
|
||||
ps.load_and_type_check_files(&cache_flags.files).await?;
|
||||
ps.emitter.cache_module_emits(&ps.graph())?;
|
||||
ps.module_load_preparer
|
||||
.load_and_type_check_files(&cache_flags.files)
|
||||
.await?;
|
||||
ps.emitter.cache_module_emits(&ps.graph_container.graph())?;
|
||||
Ok(0)
|
||||
}
|
||||
DenoSubcommand::Check(check_flags) => {
|
||||
let ps = ProcState::from_flags(flags).await?;
|
||||
ps.load_and_type_check_files(&check_flags.files).await?;
|
||||
ps.module_load_preparer
|
||||
.load_and_type_check_files(&check_flags.files)
|
||||
.await?;
|
||||
Ok(0)
|
||||
}
|
||||
DenoSubcommand::Compile(compile_flags) => {
|
||||
|
|
|
@ -1,18 +1,44 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::DenoSubcommand;
|
||||
use crate::args::TsConfigType;
|
||||
use crate::args::TsTypeLib;
|
||||
use crate::args::TypeCheckMode;
|
||||
use crate::cache::Caches;
|
||||
use crate::cache::DenoDir;
|
||||
use crate::cache::ParsedSourceCache;
|
||||
use crate::cache::TypeCheckCache;
|
||||
use crate::emit::Emitter;
|
||||
use crate::graph_util::graph_lock_or_exit;
|
||||
use crate::graph_util::graph_valid_with_cli_options;
|
||||
use crate::graph_util::ModuleGraphBuilder;
|
||||
use crate::graph_util::ModuleGraphContainer;
|
||||
use crate::node;
|
||||
use crate::node::NodeCodeTranslator;
|
||||
use crate::node::NodeResolution;
|
||||
use crate::npm::NpmPackageResolver;
|
||||
use crate::npm::NpmResolution;
|
||||
use crate::proc_state::CjsResolutionStore;
|
||||
use crate::proc_state::FileWatcherReporter;
|
||||
use crate::proc_state::ProcState;
|
||||
use crate::resolver::CliGraphResolver;
|
||||
use crate::tools::check;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
use crate::util::text_encoding::code_without_source_map;
|
||||
use crate::util::text_encoding::source_map_from_code;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::custom_error;
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::future::FutureExt;
|
||||
use deno_core::futures::Future;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::resolve_url;
|
||||
use deno_core::resolve_url_or_path;
|
||||
use deno_core::ModuleCode;
|
||||
use deno_core::ModuleLoader;
|
||||
use deno_core::ModuleSource;
|
||||
|
@ -21,13 +47,210 @@ use deno_core::ModuleType;
|
|||
use deno_core::OpState;
|
||||
use deno_core::ResolutionKind;
|
||||
use deno_core::SourceMapGetter;
|
||||
use deno_graph::source::Resolver;
|
||||
use deno_graph::EsmModule;
|
||||
use deno_graph::JsonModule;
|
||||
use deno_graph::Module;
|
||||
use deno_graph::Resolution;
|
||||
use deno_lockfile::Lockfile;
|
||||
use deno_runtime::deno_node::NodeResolutionMode;
|
||||
use deno_runtime::permissions::PermissionsContainer;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use std::borrow::Cow;
|
||||
use std::cell::RefCell;
|
||||
use std::collections::HashSet;
|
||||
use std::pin::Pin;
|
||||
use std::rc::Rc;
|
||||
use std::str;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub struct ModuleLoadPreparer {
|
||||
options: Arc<CliOptions>,
|
||||
caches: Arc<Caches>,
|
||||
deno_dir: DenoDir,
|
||||
graph_container: Arc<ModuleGraphContainer>,
|
||||
lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
|
||||
module_graph_builder: Arc<ModuleGraphBuilder>,
|
||||
npm_resolver: Arc<NpmPackageResolver>,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
progress_bar: ProgressBar,
|
||||
resolver: Arc<CliGraphResolver>,
|
||||
}
|
||||
|
||||
impl ModuleLoadPreparer {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
options: Arc<CliOptions>,
|
||||
caches: Arc<Caches>,
|
||||
deno_dir: DenoDir,
|
||||
graph_container: Arc<ModuleGraphContainer>,
|
||||
lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
|
||||
module_graph_builder: Arc<ModuleGraphBuilder>,
|
||||
npm_resolver: Arc<NpmPackageResolver>,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
progress_bar: ProgressBar,
|
||||
resolver: Arc<CliGraphResolver>,
|
||||
) -> Self {
|
||||
Self {
|
||||
options,
|
||||
caches,
|
||||
deno_dir,
|
||||
graph_container,
|
||||
lockfile,
|
||||
maybe_file_watcher_reporter,
|
||||
module_graph_builder,
|
||||
npm_resolver,
|
||||
parsed_source_cache,
|
||||
progress_bar,
|
||||
resolver,
|
||||
}
|
||||
}
|
||||
|
||||
/// This method must be called for a module or a static importer of that
|
||||
/// module before attempting to `load()` it from a `JsRuntime`. It will
|
||||
/// populate the graph data in memory with the necessary source code, write
|
||||
/// emits where necessary or report any module graph / type checking errors.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn prepare_module_load(
|
||||
&self,
|
||||
roots: Vec<ModuleSpecifier>,
|
||||
is_dynamic: bool,
|
||||
lib: TsTypeLib,
|
||||
root_permissions: PermissionsContainer,
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
) -> Result<(), AnyError> {
|
||||
log::debug!("Preparing module load.");
|
||||
let _pb_clear_guard = self.progress_bar.clear_guard();
|
||||
|
||||
let mut cache = self
|
||||
.module_graph_builder
|
||||
.create_fetch_cacher(root_permissions, dynamic_permissions);
|
||||
let maybe_imports = self.options.to_maybe_imports()?;
|
||||
let graph_resolver = self.resolver.as_graph_resolver();
|
||||
let graph_npm_resolver = self.resolver.as_graph_npm_resolver();
|
||||
let maybe_file_watcher_reporter: Option<&dyn deno_graph::source::Reporter> =
|
||||
if let Some(reporter) = &self.maybe_file_watcher_reporter {
|
||||
Some(reporter)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let analyzer = self.parsed_source_cache.as_analyzer();
|
||||
|
||||
log::debug!("Creating module graph.");
|
||||
let mut graph_update_permit =
|
||||
self.graph_container.acquire_update_permit().await;
|
||||
let graph = graph_update_permit.graph_mut();
|
||||
|
||||
// Determine any modules that have already been emitted this session and
|
||||
// should be skipped.
|
||||
let reload_exclusions: HashSet<ModuleSpecifier> =
|
||||
graph.specifiers().map(|(s, _)| s.clone()).collect();
|
||||
|
||||
self
|
||||
.module_graph_builder
|
||||
.build_graph_with_npm_resolution(
|
||||
graph,
|
||||
roots.clone(),
|
||||
&mut cache,
|
||||
deno_graph::BuildOptions {
|
||||
is_dynamic,
|
||||
imports: maybe_imports,
|
||||
resolver: Some(graph_resolver),
|
||||
npm_resolver: Some(graph_npm_resolver),
|
||||
module_analyzer: Some(&*analyzer),
|
||||
reporter: maybe_file_watcher_reporter,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
// If there is a lockfile, validate the integrity of all the modules.
|
||||
if let Some(lockfile) = &self.lockfile {
|
||||
graph_lock_or_exit(graph, &mut lockfile.lock());
|
||||
}
|
||||
|
||||
graph_valid_with_cli_options(graph, &roots, &self.options)?;
|
||||
// save the graph and get a reference to the new graph
|
||||
let graph = graph_update_permit.commit();
|
||||
|
||||
if graph.has_node_specifier
|
||||
&& self.options.type_check_mode() != TypeCheckMode::None
|
||||
{
|
||||
self
|
||||
.npm_resolver
|
||||
.inject_synthetic_types_node_package()
|
||||
.await?;
|
||||
}
|
||||
|
||||
drop(_pb_clear_guard);
|
||||
|
||||
// type check if necessary
|
||||
if self.options.type_check_mode() != TypeCheckMode::None
|
||||
&& !self.graph_container.is_type_checked(&roots, lib)
|
||||
{
|
||||
// todo(dsherret): consolidate this with what's done in graph_util
|
||||
log::debug!("Type checking.");
|
||||
let maybe_config_specifier = self.options.maybe_config_file_specifier();
|
||||
let graph = Arc::new(graph.segment(&roots));
|
||||
let options = check::CheckOptions {
|
||||
type_check_mode: self.options.type_check_mode(),
|
||||
debug: self.options.log_level() == Some(log::Level::Debug),
|
||||
maybe_config_specifier,
|
||||
ts_config: self
|
||||
.options
|
||||
.resolve_ts_config_for_emit(TsConfigType::Check { lib })?
|
||||
.ts_config,
|
||||
log_checks: true,
|
||||
reload: self.options.reload_flag()
|
||||
&& !roots.iter().all(|r| reload_exclusions.contains(r)),
|
||||
};
|
||||
let check_cache =
|
||||
TypeCheckCache::new(self.caches.type_checking_cache_db(&self.deno_dir));
|
||||
let check_result =
|
||||
check::check(graph, &check_cache, self.npm_resolver.clone(), options)?;
|
||||
self.graph_container.set_type_checked(&roots, lib);
|
||||
if !check_result.diagnostics.is_empty() {
|
||||
return Err(anyhow!(check_result.diagnostics));
|
||||
}
|
||||
log::debug!("{}", check_result.stats);
|
||||
}
|
||||
|
||||
// any updates to the lockfile should be updated now
|
||||
if let Some(ref lockfile) = self.lockfile {
|
||||
let g = lockfile.lock();
|
||||
g.write()?;
|
||||
}
|
||||
|
||||
log::debug!("Prepared module load.");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Helper around prepare_module_load that loads and type checks
|
||||
/// the provided files.
|
||||
pub async fn load_and_type_check_files(
|
||||
&self,
|
||||
files: &[String],
|
||||
) -> Result<(), AnyError> {
|
||||
let lib = self.options.ts_type_lib_window();
|
||||
|
||||
let specifiers = files
|
||||
.iter()
|
||||
.map(|file| resolve_url_or_path(file, self.options.initial_cwd()))
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
self
|
||||
.prepare_module_load(
|
||||
specifiers,
|
||||
false,
|
||||
lib,
|
||||
PermissionsContainer::allow_all(),
|
||||
PermissionsContainer::allow_all(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
struct ModuleCodeSource {
|
||||
pub code: ModuleCode,
|
||||
|
@ -36,15 +259,24 @@ struct ModuleCodeSource {
|
|||
}
|
||||
|
||||
pub struct CliModuleLoader {
|
||||
pub lib: TsTypeLib,
|
||||
lib: TsTypeLib,
|
||||
/// The initial set of permissions used to resolve the static imports in the
|
||||
/// worker. These are "allow all" for main worker, and parent thread
|
||||
/// permissions for Web Worker.
|
||||
pub root_permissions: PermissionsContainer,
|
||||
root_permissions: PermissionsContainer,
|
||||
/// Permissions used to resolve dynamic imports, these get passed as
|
||||
/// "root permissions" for Web Worker.
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
pub ps: ProcState,
|
||||
cli_options: Arc<CliOptions>,
|
||||
cjs_resolutions: Arc<CjsResolutionStore>,
|
||||
emitter: Arc<Emitter>,
|
||||
graph_container: Arc<ModuleGraphContainer>,
|
||||
module_load_preparer: Arc<ModuleLoadPreparer>,
|
||||
node_code_translator: Arc<NodeCodeTranslator>,
|
||||
npm_resolution: Arc<NpmResolution>,
|
||||
npm_resolver: Arc<NpmPackageResolver>,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
resolver: Arc<CliGraphResolver>,
|
||||
}
|
||||
|
||||
impl CliModuleLoader {
|
||||
|
@ -57,7 +289,16 @@ impl CliModuleLoader {
|
|||
lib: ps.options.ts_type_lib_window(),
|
||||
root_permissions,
|
||||
dynamic_permissions,
|
||||
ps,
|
||||
cli_options: ps.options.clone(),
|
||||
cjs_resolutions: ps.cjs_resolutions.clone(),
|
||||
emitter: ps.emitter.clone(),
|
||||
graph_container: ps.graph_container.clone(),
|
||||
module_load_preparer: ps.module_load_preparer.clone(),
|
||||
node_code_translator: ps.node_code_translator.clone(),
|
||||
npm_resolution: ps.npm_resolution.clone(),
|
||||
npm_resolver: ps.npm_resolver.clone(),
|
||||
parsed_source_cache: ps.parsed_source_cache.clone(),
|
||||
resolver: ps.resolver.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -70,7 +311,16 @@ impl CliModuleLoader {
|
|||
lib: ps.options.ts_type_lib_worker(),
|
||||
root_permissions,
|
||||
dynamic_permissions,
|
||||
ps,
|
||||
cli_options: ps.options.clone(),
|
||||
cjs_resolutions: ps.cjs_resolutions.clone(),
|
||||
emitter: ps.emitter.clone(),
|
||||
graph_container: ps.graph_container.clone(),
|
||||
module_load_preparer: ps.module_load_preparer.clone(),
|
||||
node_code_translator: ps.node_code_translator.clone(),
|
||||
npm_resolution: ps.npm_resolution.clone(),
|
||||
npm_resolver: ps.npm_resolver.clone(),
|
||||
parsed_source_cache: ps.parsed_source_cache.clone(),
|
||||
resolver: ps.resolver.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -83,7 +333,7 @@ impl CliModuleLoader {
|
|||
unreachable!(); // Node built-in modules should be handled internally.
|
||||
}
|
||||
|
||||
let graph = self.ps.graph();
|
||||
let graph = self.graph_container.graph();
|
||||
match graph.get(specifier) {
|
||||
Some(deno_graph::Module::Json(JsonModule {
|
||||
source,
|
||||
|
@ -116,11 +366,9 @@ impl CliModuleLoader {
|
|||
| MediaType::Jsx
|
||||
| MediaType::Tsx => {
|
||||
// get emit text
|
||||
self.ps.emitter.emit_parsed_source(
|
||||
specifier,
|
||||
*media_type,
|
||||
source,
|
||||
)?
|
||||
self
|
||||
.emitter
|
||||
.emit_parsed_source(specifier, *media_type, source)?
|
||||
}
|
||||
MediaType::TsBuildInfo | MediaType::Wasm | MediaType::SourceMap => {
|
||||
panic!("Unexpected media type {media_type} for {specifier}")
|
||||
|
@ -128,7 +376,7 @@ impl CliModuleLoader {
|
|||
};
|
||||
|
||||
// at this point, we no longer need the parsed source in memory, so free it
|
||||
self.ps.parsed_source_cache.free(specifier);
|
||||
self.parsed_source_cache.free(specifier);
|
||||
|
||||
Ok(ModuleCodeSource {
|
||||
code,
|
||||
|
@ -152,7 +400,7 @@ impl CliModuleLoader {
|
|||
maybe_referrer: Option<&ModuleSpecifier>,
|
||||
is_dynamic: bool,
|
||||
) -> Result<ModuleSource, AnyError> {
|
||||
let code_source = if self.ps.npm_resolver.in_npm_package(specifier) {
|
||||
let code_source = if self.npm_resolver.in_npm_package(specifier) {
|
||||
let file_path = specifier.to_file_path().unwrap();
|
||||
let code = std::fs::read_to_string(&file_path).with_context(|| {
|
||||
let mut msg = "Unable to load ".to_string();
|
||||
|
@ -164,29 +412,24 @@ impl CliModuleLoader {
|
|||
msg
|
||||
})?;
|
||||
|
||||
let code = if self.ps.cjs_resolutions.lock().contains(specifier) {
|
||||
let code = if self.cjs_resolutions.contains(specifier) {
|
||||
let mut permissions = if is_dynamic {
|
||||
self.dynamic_permissions.clone()
|
||||
} else {
|
||||
self.root_permissions.clone()
|
||||
};
|
||||
// translate cjs to esm if it's cjs and inject node globals
|
||||
node::translate_cjs_to_esm(
|
||||
&self.ps.file_fetcher,
|
||||
self.node_code_translator.translate_cjs_to_esm(
|
||||
specifier,
|
||||
code,
|
||||
MediaType::Cjs,
|
||||
&self.ps.npm_resolver,
|
||||
&self.ps.node_analysis_cache,
|
||||
&mut permissions,
|
||||
)?
|
||||
} else {
|
||||
// only inject node globals for esm
|
||||
node::esm_code_with_node_globals(
|
||||
&self.ps.node_analysis_cache,
|
||||
specifier,
|
||||
code,
|
||||
)?
|
||||
self
|
||||
.node_code_translator
|
||||
.esm_code_with_node_globals(specifier, code)?
|
||||
};
|
||||
ModuleCodeSource {
|
||||
code: code.into(),
|
||||
|
@ -196,7 +439,7 @@ impl CliModuleLoader {
|
|||
} else {
|
||||
self.load_prepared_module(specifier, maybe_referrer)?
|
||||
};
|
||||
let code = if self.ps.options.is_inspecting() {
|
||||
let code = if self.cli_options.is_inspecting() {
|
||||
// we need the code with the source map in order for
|
||||
// it to work with --inspect or --inspect-brk
|
||||
code_source.code
|
||||
|
@ -215,6 +458,23 @@ impl CliModuleLoader {
|
|||
&code_source.found_url,
|
||||
))
|
||||
}
|
||||
|
||||
fn handle_node_resolve_result(
|
||||
&self,
|
||||
result: Result<Option<node::NodeResolution>, AnyError>,
|
||||
) -> Result<ModuleSpecifier, AnyError> {
|
||||
let response = match result? {
|
||||
Some(response) => response,
|
||||
None => return Err(generic_error("not found")),
|
||||
};
|
||||
if let NodeResolution::CommonJs(specifier) = &response {
|
||||
// remember that this was a common js resolution
|
||||
self.cjs_resolutions.insert(specifier.clone());
|
||||
} else if let NodeResolution::BuiltIn(specifier) = &response {
|
||||
return node::resolve_builtin_node_module(specifier);
|
||||
}
|
||||
Ok(response.into_url())
|
||||
}
|
||||
}
|
||||
|
||||
impl ModuleLoader for CliModuleLoader {
|
||||
|
@ -229,7 +489,117 @@ impl ModuleLoader for CliModuleLoader {
|
|||
} else {
|
||||
self.root_permissions.clone()
|
||||
};
|
||||
self.ps.resolve(specifier, referrer, &mut permissions)
|
||||
|
||||
// TODO(bartlomieju): ideally we shouldn't need to call `current_dir()` on each
|
||||
// call - maybe it should be caller's responsibility to pass it as an arg?
|
||||
let cwd = std::env::current_dir().context("Unable to get CWD")?;
|
||||
let referrer_result = deno_core::resolve_url_or_path(referrer, &cwd);
|
||||
|
||||
if let Ok(referrer) = referrer_result.as_ref() {
|
||||
if self.npm_resolver.in_npm_package(referrer) {
|
||||
// we're in an npm package, so use node resolution
|
||||
return self
|
||||
.handle_node_resolve_result(node::node_resolve(
|
||||
specifier,
|
||||
referrer,
|
||||
NodeResolutionMode::Execution,
|
||||
&self.npm_resolver.as_require_npm_resolver(),
|
||||
&mut permissions,
|
||||
))
|
||||
.with_context(|| {
|
||||
format!("Could not resolve '{specifier}' from '{referrer}'.")
|
||||
});
|
||||
}
|
||||
|
||||
let graph = self.graph_container.graph();
|
||||
let maybe_resolved = match graph.get(referrer) {
|
||||
Some(Module::Esm(module)) => {
|
||||
module.dependencies.get(specifier).map(|d| &d.maybe_code)
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
||||
match maybe_resolved {
|
||||
Some(Resolution::Ok(resolved)) => {
|
||||
let specifier = &resolved.specifier;
|
||||
|
||||
return match graph.get(specifier) {
|
||||
Some(Module::Npm(module)) => self
|
||||
.handle_node_resolve_result(node::node_resolve_npm_reference(
|
||||
&module.nv_reference,
|
||||
NodeResolutionMode::Execution,
|
||||
&self.npm_resolver,
|
||||
&mut permissions,
|
||||
))
|
||||
.with_context(|| {
|
||||
format!("Could not resolve '{}'.", module.nv_reference)
|
||||
}),
|
||||
Some(Module::Node(module)) => {
|
||||
node::resolve_builtin_node_module(&module.module_name)
|
||||
}
|
||||
Some(Module::Esm(module)) => Ok(module.specifier.clone()),
|
||||
Some(Module::Json(module)) => Ok(module.specifier.clone()),
|
||||
Some(Module::External(module)) => {
|
||||
Ok(node::resolve_specifier_into_node_modules(&module.specifier))
|
||||
}
|
||||
None => Ok(specifier.clone()),
|
||||
};
|
||||
}
|
||||
Some(Resolution::Err(err)) => {
|
||||
return Err(custom_error(
|
||||
"TypeError",
|
||||
format!("{}\n", err.to_string_with_range()),
|
||||
))
|
||||
}
|
||||
Some(Resolution::None) | None => {}
|
||||
}
|
||||
}
|
||||
|
||||
// Built-in Node modules
|
||||
if let Some(module_name) = specifier.strip_prefix("node:") {
|
||||
return node::resolve_builtin_node_module(module_name);
|
||||
}
|
||||
|
||||
// FIXME(bartlomieju): this is a hacky way to provide compatibility with REPL
|
||||
// and `Deno.core.evalContext` API. Ideally we should always have a referrer filled
|
||||
// but sadly that's not the case due to missing APIs in V8.
|
||||
let is_repl =
|
||||
matches!(self.cli_options.sub_command(), DenoSubcommand::Repl(_));
|
||||
let referrer = if referrer.is_empty() && is_repl {
|
||||
deno_core::resolve_path("./$deno$repl.ts", &cwd)?
|
||||
} else {
|
||||
referrer_result?
|
||||
};
|
||||
|
||||
// FIXME(bartlomieju): this is another hack way to provide NPM specifier
|
||||
// support in REPL. This should be fixed.
|
||||
let resolution = self.resolver.resolve(specifier, &referrer);
|
||||
|
||||
if is_repl {
|
||||
let specifier = resolution
|
||||
.as_ref()
|
||||
.ok()
|
||||
.map(Cow::Borrowed)
|
||||
.or_else(|| ModuleSpecifier::parse(specifier).ok().map(Cow::Owned));
|
||||
if let Some(specifier) = specifier {
|
||||
if let Ok(reference) =
|
||||
NpmPackageReqReference::from_specifier(&specifier)
|
||||
{
|
||||
let reference =
|
||||
self.npm_resolution.pkg_req_ref_to_nv_ref(reference)?;
|
||||
return self
|
||||
.handle_node_resolve_result(node::node_resolve_npm_reference(
|
||||
&reference,
|
||||
deno_runtime::deno_node::NodeResolutionMode::Execution,
|
||||
&self.npm_resolver,
|
||||
&mut permissions,
|
||||
))
|
||||
.with_context(|| format!("Could not resolve '{reference}'."));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
resolution
|
||||
}
|
||||
|
||||
fn load(
|
||||
|
@ -255,13 +625,13 @@ impl ModuleLoader for CliModuleLoader {
|
|||
_maybe_referrer: Option<String>,
|
||||
is_dynamic: bool,
|
||||
) -> Pin<Box<dyn Future<Output = Result<(), AnyError>>>> {
|
||||
if self.ps.npm_resolver.in_npm_package(specifier) {
|
||||
if self.npm_resolver.in_npm_package(specifier) {
|
||||
// nothing to prepare
|
||||
return Box::pin(deno_core::futures::future::ready(Ok(())));
|
||||
}
|
||||
|
||||
let specifier = specifier.clone();
|
||||
let ps = self.ps.clone();
|
||||
let module_load_preparer = self.module_load_preparer.clone();
|
||||
|
||||
let dynamic_permissions = self.dynamic_permissions.clone();
|
||||
let root_permissions = if is_dynamic {
|
||||
|
@ -272,14 +642,15 @@ impl ModuleLoader for CliModuleLoader {
|
|||
let lib = self.lib;
|
||||
|
||||
async move {
|
||||
ps.prepare_module_load(
|
||||
vec![specifier],
|
||||
is_dynamic,
|
||||
lib,
|
||||
root_permissions,
|
||||
dynamic_permissions,
|
||||
)
|
||||
.await
|
||||
module_load_preparer
|
||||
.prepare_module_load(
|
||||
vec![specifier],
|
||||
is_dynamic,
|
||||
lib,
|
||||
root_permissions,
|
||||
dynamic_permissions,
|
||||
)
|
||||
.await
|
||||
}
|
||||
.boxed_local()
|
||||
}
|
||||
|
@ -303,7 +674,7 @@ impl SourceMapGetter for CliModuleLoader {
|
|||
file_name: &str,
|
||||
line_number: usize,
|
||||
) -> Option<String> {
|
||||
let graph = self.ps.graph();
|
||||
let graph = self.graph_container.graph();
|
||||
let code = match graph.get(&resolve_url(file_name).ok()?) {
|
||||
Some(deno_graph::Module::Esm(module)) => &module.source,
|
||||
Some(deno_graph::Module::Json(module)) => &module.source,
|
||||
|
|
|
@ -1,18 +1,36 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::collections::VecDeque;
|
||||
use std::fmt::Write;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::swc::common::SyntaxContext;
|
||||
use deno_ast::view::Node;
|
||||
use deno_ast::view::NodeTrait;
|
||||
use deno_ast::CjsAnalysis;
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_ast::ParsedSource;
|
||||
use deno_ast::SourceRanged;
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_runtime::deno_node::package_exports_resolve;
|
||||
use deno_runtime::deno_node::NodeModuleKind;
|
||||
use deno_runtime::deno_node::NodePermissions;
|
||||
use deno_runtime::deno_node::NodeResolutionMode;
|
||||
use deno_runtime::deno_node::PackageJson;
|
||||
use deno_runtime::deno_node::PathClean;
|
||||
use deno_runtime::deno_node::RealFs;
|
||||
use deno_runtime::deno_node::RequireNpmResolver;
|
||||
use deno_runtime::deno_node::NODE_GLOBAL_THIS_NAME;
|
||||
use std::fmt::Write;
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
use crate::cache::NodeAnalysisCache;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::npm::NpmPackageResolver;
|
||||
|
||||
static NODE_GLOBALS: &[&str] = &[
|
||||
"Buffer",
|
||||
|
@ -27,18 +45,287 @@ static NODE_GLOBALS: &[&str] = &[
|
|||
"setTimeout",
|
||||
];
|
||||
|
||||
// TODO(dsherret): this code is way more inefficient than it needs to be.
|
||||
//
|
||||
// In the future, we should disable capturing tokens & scope analysis
|
||||
// and instead only use swc's APIs to go through the portions of the tree
|
||||
// that we know will affect the global scope while still ensuring that
|
||||
// `var` decls are taken into consideration.
|
||||
pub struct NodeCodeTranslator {
|
||||
analysis_cache: NodeAnalysisCache,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
npm_resolver: Arc<NpmPackageResolver>,
|
||||
}
|
||||
|
||||
pub fn esm_code_with_node_globals(
|
||||
impl NodeCodeTranslator {
|
||||
pub fn new(
|
||||
analysis_cache: NodeAnalysisCache,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
npm_resolver: Arc<NpmPackageResolver>,
|
||||
) -> Self {
|
||||
Self {
|
||||
analysis_cache,
|
||||
file_fetcher,
|
||||
npm_resolver,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn esm_code_with_node_globals(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
code: String,
|
||||
) -> Result<String, AnyError> {
|
||||
esm_code_with_node_globals(&self.analysis_cache, specifier, code)
|
||||
}
|
||||
|
||||
/// Translates given CJS module into ESM. This function will perform static
|
||||
/// analysis on the file to find defined exports and reexports.
|
||||
///
|
||||
/// For all discovered reexports the analysis will be performed recursively.
|
||||
///
|
||||
/// If successful a source code for equivalent ES module is returned.
|
||||
pub fn translate_cjs_to_esm(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
code: String,
|
||||
media_type: MediaType,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
) -> Result<String, AnyError> {
|
||||
let mut temp_var_count = 0;
|
||||
let mut handled_reexports: HashSet<String> = HashSet::default();
|
||||
|
||||
let mut source = vec![
|
||||
r#"import {createRequire as __internalCreateRequire} from "node:module";
|
||||
const require = __internalCreateRequire(import.meta.url);"#
|
||||
.to_string(),
|
||||
];
|
||||
|
||||
let analysis =
|
||||
self.perform_cjs_analysis(specifier.as_str(), media_type, code)?;
|
||||
|
||||
let mut all_exports = analysis
|
||||
.exports
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect::<HashSet<_>>();
|
||||
|
||||
// (request, referrer)
|
||||
let mut reexports_to_handle = VecDeque::new();
|
||||
for reexport in analysis.reexports {
|
||||
reexports_to_handle.push_back((reexport, specifier.clone()));
|
||||
}
|
||||
|
||||
while let Some((reexport, referrer)) = reexports_to_handle.pop_front() {
|
||||
if handled_reexports.contains(&reexport) {
|
||||
continue;
|
||||
}
|
||||
|
||||
handled_reexports.insert(reexport.to_string());
|
||||
|
||||
// First, resolve relate reexport specifier
|
||||
let resolved_reexport = self.resolve(
|
||||
&reexport,
|
||||
&referrer,
|
||||
// FIXME(bartlomieju): check if these conditions are okay, probably
|
||||
// should be `deno-require`, because `deno` is already used in `esm_resolver.rs`
|
||||
&["deno", "require", "default"],
|
||||
NodeResolutionMode::Execution,
|
||||
permissions,
|
||||
)?;
|
||||
let reexport_specifier =
|
||||
ModuleSpecifier::from_file_path(resolved_reexport).unwrap();
|
||||
// Second, read the source code from disk
|
||||
let reexport_file = self
|
||||
.file_fetcher
|
||||
.get_source(&reexport_specifier)
|
||||
.ok_or_else(|| {
|
||||
anyhow!(
|
||||
"Could not find '{}' ({}) referenced from {}",
|
||||
reexport,
|
||||
reexport_specifier,
|
||||
referrer
|
||||
)
|
||||
})?;
|
||||
|
||||
{
|
||||
let analysis = self.perform_cjs_analysis(
|
||||
reexport_specifier.as_str(),
|
||||
reexport_file.media_type,
|
||||
reexport_file.source.to_string(),
|
||||
)?;
|
||||
|
||||
for reexport in analysis.reexports {
|
||||
reexports_to_handle.push_back((reexport, reexport_specifier.clone()));
|
||||
}
|
||||
|
||||
all_exports.extend(
|
||||
analysis
|
||||
.exports
|
||||
.into_iter()
|
||||
.filter(|e| e.as_str() != "default"),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
source.push(format!(
|
||||
"const mod = require(\"{}\");",
|
||||
specifier
|
||||
.to_file_path()
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.replace('\\', "\\\\")
|
||||
.replace('\'', "\\\'")
|
||||
.replace('\"', "\\\"")
|
||||
));
|
||||
|
||||
for export in &all_exports {
|
||||
if export.as_str() != "default" {
|
||||
add_export(
|
||||
&mut source,
|
||||
export,
|
||||
&format!("mod[\"{export}\"]"),
|
||||
&mut temp_var_count,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
source.push("export default mod;".to_string());
|
||||
|
||||
let translated_source = source.join("\n");
|
||||
Ok(translated_source)
|
||||
}
|
||||
|
||||
fn perform_cjs_analysis(
|
||||
&self,
|
||||
specifier: &str,
|
||||
media_type: MediaType,
|
||||
code: String,
|
||||
) -> Result<CjsAnalysis, AnyError> {
|
||||
let source_hash = NodeAnalysisCache::compute_source_hash(&code);
|
||||
if let Some(analysis) = self
|
||||
.analysis_cache
|
||||
.get_cjs_analysis(specifier, &source_hash)
|
||||
{
|
||||
return Ok(analysis);
|
||||
}
|
||||
|
||||
if media_type == MediaType::Json {
|
||||
return Ok(CjsAnalysis {
|
||||
exports: vec![],
|
||||
reexports: vec![],
|
||||
});
|
||||
}
|
||||
|
||||
let parsed_source = deno_ast::parse_script(deno_ast::ParseParams {
|
||||
specifier: specifier.to_string(),
|
||||
text_info: deno_ast::SourceTextInfo::new(code.into()),
|
||||
media_type,
|
||||
capture_tokens: true,
|
||||
scope_analysis: false,
|
||||
maybe_syntax: None,
|
||||
})?;
|
||||
let analysis = parsed_source.analyze_cjs();
|
||||
self
|
||||
.analysis_cache
|
||||
.set_cjs_analysis(specifier, &source_hash, &analysis);
|
||||
|
||||
Ok(analysis)
|
||||
}
|
||||
|
||||
fn resolve(
|
||||
&self,
|
||||
specifier: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
conditions: &[&str],
|
||||
mode: NodeResolutionMode,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
if specifier.starts_with('/') {
|
||||
todo!();
|
||||
}
|
||||
|
||||
let referrer_path = referrer.to_file_path().unwrap();
|
||||
if specifier.starts_with("./") || specifier.starts_with("../") {
|
||||
if let Some(parent) = referrer_path.parent() {
|
||||
return file_extension_probe(parent.join(specifier), &referrer_path);
|
||||
} else {
|
||||
todo!();
|
||||
}
|
||||
}
|
||||
|
||||
// We've got a bare specifier or maybe bare_specifier/blah.js"
|
||||
|
||||
let (package_specifier, package_subpath) =
|
||||
parse_specifier(specifier).unwrap();
|
||||
|
||||
// todo(dsherret): use not_found error on not found here
|
||||
let resolver = self.npm_resolver.as_require_npm_resolver();
|
||||
let module_dir = resolver.resolve_package_folder_from_package(
|
||||
package_specifier.as_str(),
|
||||
&referrer_path,
|
||||
mode,
|
||||
)?;
|
||||
|
||||
let package_json_path = module_dir.join("package.json");
|
||||
if package_json_path.exists() {
|
||||
let package_json = PackageJson::load::<RealFs>(
|
||||
&self.npm_resolver.as_require_npm_resolver(),
|
||||
permissions,
|
||||
package_json_path.clone(),
|
||||
)?;
|
||||
|
||||
if let Some(exports) = &package_json.exports {
|
||||
return package_exports_resolve::<RealFs>(
|
||||
&package_json_path,
|
||||
package_subpath,
|
||||
exports,
|
||||
referrer,
|
||||
NodeModuleKind::Esm,
|
||||
conditions,
|
||||
mode,
|
||||
&self.npm_resolver.as_require_npm_resolver(),
|
||||
permissions,
|
||||
);
|
||||
}
|
||||
|
||||
// old school
|
||||
if package_subpath != "." {
|
||||
let d = module_dir.join(package_subpath);
|
||||
if let Ok(m) = d.metadata() {
|
||||
if m.is_dir() {
|
||||
// subdir might have a package.json that specifies the entrypoint
|
||||
let package_json_path = d.join("package.json");
|
||||
if package_json_path.exists() {
|
||||
let package_json = PackageJson::load::<RealFs>(
|
||||
&self.npm_resolver.as_require_npm_resolver(),
|
||||
permissions,
|
||||
package_json_path,
|
||||
)?;
|
||||
if let Some(main) = package_json.main(NodeModuleKind::Cjs) {
|
||||
return Ok(d.join(main).clean());
|
||||
}
|
||||
}
|
||||
|
||||
return Ok(d.join("index.js").clean());
|
||||
}
|
||||
}
|
||||
return file_extension_probe(d, &referrer_path);
|
||||
} else if let Some(main) = package_json.main(NodeModuleKind::Cjs) {
|
||||
return Ok(module_dir.join(main).clean());
|
||||
} else {
|
||||
return Ok(module_dir.join("index.js").clean());
|
||||
}
|
||||
}
|
||||
Err(not_found(specifier, &referrer_path))
|
||||
}
|
||||
}
|
||||
|
||||
fn esm_code_with_node_globals(
|
||||
analysis_cache: &NodeAnalysisCache,
|
||||
specifier: &ModuleSpecifier,
|
||||
code: String,
|
||||
) -> Result<String, AnyError> {
|
||||
// TODO(dsherret): this code is way more inefficient than it needs to be.
|
||||
//
|
||||
// In the future, we should disable capturing tokens & scope analysis
|
||||
// and instead only use swc's APIs to go through the portions of the tree
|
||||
// that we know will affect the global scope while still ensuring that
|
||||
// `var` decls are taken into consideration.
|
||||
let source_hash = NodeAnalysisCache::compute_source_hash(&code);
|
||||
let text_info = deno_ast::SourceTextInfo::from_string(code);
|
||||
let top_level_decls = if let Some(decls) =
|
||||
|
@ -63,6 +350,16 @@ pub fn esm_code_with_node_globals(
|
|||
top_level_decls
|
||||
};
|
||||
|
||||
Ok(esm_code_from_top_level_decls(
|
||||
text_info.text_str(),
|
||||
&top_level_decls,
|
||||
))
|
||||
}
|
||||
|
||||
fn esm_code_from_top_level_decls(
|
||||
file_text: &str,
|
||||
top_level_decls: &HashSet<String>,
|
||||
) -> String {
|
||||
let mut globals = Vec::with_capacity(NODE_GLOBALS.len());
|
||||
let has_global_this = top_level_decls.contains("globalThis");
|
||||
for global in NODE_GLOBALS.iter() {
|
||||
|
@ -83,7 +380,6 @@ pub fn esm_code_with_node_globals(
|
|||
write!(result, "var {global} = {global_this_expr}.{global};").unwrap();
|
||||
}
|
||||
|
||||
let file_text = text_info.text_str();
|
||||
// strip the shebang
|
||||
let file_text = if file_text.starts_with("#!/") {
|
||||
let start_index = file_text.find('\n').unwrap_or(file_text.len());
|
||||
|
@ -93,12 +389,28 @@ pub fn esm_code_with_node_globals(
|
|||
};
|
||||
result.push_str(file_text);
|
||||
|
||||
Ok(result)
|
||||
result
|
||||
}
|
||||
|
||||
fn analyze_top_level_decls(
|
||||
parsed_source: &ParsedSource,
|
||||
) -> Result<HashSet<String>, AnyError> {
|
||||
fn visit_children(
|
||||
node: Node,
|
||||
top_level_context: SyntaxContext,
|
||||
results: &mut HashSet<String>,
|
||||
) {
|
||||
if let Node::Ident(ident) = node {
|
||||
if ident.ctxt() == top_level_context && is_local_declaration_ident(node) {
|
||||
results.insert(ident.sym().to_string());
|
||||
}
|
||||
}
|
||||
|
||||
for child in node.children() {
|
||||
visit_children(child, top_level_context, results);
|
||||
}
|
||||
}
|
||||
|
||||
let top_level_context = parsed_source.top_level_context();
|
||||
|
||||
parsed_source.with_view(|program| {
|
||||
|
@ -108,22 +420,6 @@ fn analyze_top_level_decls(
|
|||
})
|
||||
}
|
||||
|
||||
fn visit_children(
|
||||
node: Node,
|
||||
top_level_context: SyntaxContext,
|
||||
results: &mut HashSet<String>,
|
||||
) {
|
||||
if let Node::Ident(ident) = node {
|
||||
if ident.ctxt() == top_level_context && is_local_declaration_ident(node) {
|
||||
results.insert(ident.sym().to_string());
|
||||
}
|
||||
}
|
||||
|
||||
for child in node.children() {
|
||||
visit_children(child, top_level_context, results);
|
||||
}
|
||||
}
|
||||
|
||||
fn is_local_declaration_ident(node: Node) -> bool {
|
||||
if let Some(parent) = node.parent() {
|
||||
match parent {
|
||||
|
@ -160,6 +456,162 @@ fn is_local_declaration_ident(node: Node) -> bool {
|
|||
}
|
||||
}
|
||||
|
||||
static RESERVED_WORDS: Lazy<HashSet<&str>> = Lazy::new(|| {
|
||||
HashSet::from([
|
||||
"break",
|
||||
"case",
|
||||
"catch",
|
||||
"class",
|
||||
"const",
|
||||
"continue",
|
||||
"debugger",
|
||||
"default",
|
||||
"delete",
|
||||
"do",
|
||||
"else",
|
||||
"export",
|
||||
"extends",
|
||||
"false",
|
||||
"finally",
|
||||
"for",
|
||||
"function",
|
||||
"if",
|
||||
"import",
|
||||
"in",
|
||||
"instanceof",
|
||||
"new",
|
||||
"null",
|
||||
"return",
|
||||
"super",
|
||||
"switch",
|
||||
"this",
|
||||
"throw",
|
||||
"true",
|
||||
"try",
|
||||
"typeof",
|
||||
"var",
|
||||
"void",
|
||||
"while",
|
||||
"with",
|
||||
"yield",
|
||||
"let",
|
||||
"enum",
|
||||
"implements",
|
||||
"interface",
|
||||
"package",
|
||||
"private",
|
||||
"protected",
|
||||
"public",
|
||||
"static",
|
||||
])
|
||||
});
|
||||
|
||||
fn add_export(
|
||||
source: &mut Vec<String>,
|
||||
name: &str,
|
||||
initializer: &str,
|
||||
temp_var_count: &mut usize,
|
||||
) {
|
||||
fn is_valid_var_decl(name: &str) -> bool {
|
||||
// it's ok to be super strict here
|
||||
name
|
||||
.chars()
|
||||
.all(|c| c.is_ascii_alphanumeric() || c == '_' || c == '$')
|
||||
}
|
||||
|
||||
// TODO(bartlomieju): Node actually checks if a given export exists in `exports` object,
|
||||
// but it might not be necessary here since our analysis is more detailed?
|
||||
if RESERVED_WORDS.contains(name) || !is_valid_var_decl(name) {
|
||||
*temp_var_count += 1;
|
||||
// we can't create an identifier with a reserved word or invalid identifier name,
|
||||
// so assign it to a temporary variable that won't have a conflict, then re-export
|
||||
// it as a string
|
||||
source.push(format!(
|
||||
"const __deno_export_{temp_var_count}__ = {initializer};"
|
||||
));
|
||||
source.push(format!(
|
||||
"export {{ __deno_export_{temp_var_count}__ as \"{name}\" }};"
|
||||
));
|
||||
} else {
|
||||
source.push(format!("export const {name} = {initializer};"));
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_specifier(specifier: &str) -> Option<(String, String)> {
|
||||
let mut separator_index = specifier.find('/');
|
||||
let mut valid_package_name = true;
|
||||
// let mut is_scoped = false;
|
||||
if specifier.is_empty() {
|
||||
valid_package_name = false;
|
||||
} else if specifier.starts_with('@') {
|
||||
// is_scoped = true;
|
||||
if let Some(index) = separator_index {
|
||||
separator_index = specifier[index + 1..].find('/').map(|i| i + index + 1);
|
||||
} else {
|
||||
valid_package_name = false;
|
||||
}
|
||||
}
|
||||
|
||||
let package_name = if let Some(index) = separator_index {
|
||||
specifier[0..index].to_string()
|
||||
} else {
|
||||
specifier.to_string()
|
||||
};
|
||||
|
||||
// Package name cannot have leading . and cannot have percent-encoding or separators.
|
||||
for ch in package_name.chars() {
|
||||
if ch == '%' || ch == '\\' {
|
||||
valid_package_name = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if !valid_package_name {
|
||||
return None;
|
||||
}
|
||||
|
||||
let package_subpath = if let Some(index) = separator_index {
|
||||
format!(".{}", specifier.chars().skip(index).collect::<String>())
|
||||
} else {
|
||||
".".to_string()
|
||||
};
|
||||
|
||||
Some((package_name, package_subpath))
|
||||
}
|
||||
|
||||
fn file_extension_probe(
|
||||
p: PathBuf,
|
||||
referrer: &Path,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
let p = p.clean();
|
||||
if p.exists() {
|
||||
let file_name = p.file_name().unwrap();
|
||||
let p_js = p.with_file_name(format!("{}.js", file_name.to_str().unwrap()));
|
||||
if p_js.exists() && p_js.is_file() {
|
||||
return Ok(p_js);
|
||||
} else if p.is_dir() {
|
||||
return Ok(p.join("index.js"));
|
||||
} else {
|
||||
return Ok(p);
|
||||
}
|
||||
} else if let Some(file_name) = p.file_name() {
|
||||
let p_js = p.with_file_name(format!("{}.js", file_name.to_str().unwrap()));
|
||||
if p_js.exists() && p_js.is_file() {
|
||||
return Ok(p_js);
|
||||
}
|
||||
}
|
||||
Err(not_found(&p.to_string_lossy(), referrer))
|
||||
}
|
||||
|
||||
fn not_found(path: &str, referrer: &Path) -> AnyError {
|
||||
let msg = format!(
|
||||
"[ERR_MODULE_NOT_FOUND] Cannot find module \"{}\" imported from \"{}\"",
|
||||
path,
|
||||
referrer.to_string_lossy()
|
||||
);
|
||||
std::io::Error::new(std::io::ErrorKind::NotFound, msg).into()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
@ -205,4 +657,34 @@ mod tests {
|
|||
)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_add_export() {
|
||||
let mut temp_var_count = 0;
|
||||
let mut source = vec![];
|
||||
|
||||
let exports = vec!["static", "server", "app", "dashed-export"];
|
||||
for export in exports {
|
||||
add_export(&mut source, export, "init", &mut temp_var_count);
|
||||
}
|
||||
assert_eq!(
|
||||
source,
|
||||
vec![
|
||||
"const __deno_export_1__ = init;".to_string(),
|
||||
"export { __deno_export_1__ as \"static\" };".to_string(),
|
||||
"export const server = init;".to_string(),
|
||||
"export const app = init;".to_string(),
|
||||
"const __deno_export_2__ = init;".to_string(),
|
||||
"export { __deno_export_2__ as \"dashed-export\" };".to_string(),
|
||||
]
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_specifier() {
|
||||
assert_eq!(
|
||||
parse_specifier("@some-package/core/actions"),
|
||||
Some(("@some-package/core".to_string(), "./actions".to_string()))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
460
cli/node/mod.rs
460
cli/node/mod.rs
|
@ -1,14 +1,11 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::collections::VecDeque;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::CjsAnalysis;
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::generic_error;
|
||||
|
@ -28,23 +25,19 @@ use deno_runtime::deno_node::NodeModuleKind;
|
|||
use deno_runtime::deno_node::NodePermissions;
|
||||
use deno_runtime::deno_node::NodeResolutionMode;
|
||||
use deno_runtime::deno_node::PackageJson;
|
||||
use deno_runtime::deno_node::PathClean;
|
||||
use deno_runtime::deno_node::RealFs;
|
||||
use deno_runtime::deno_node::RequireNpmResolver;
|
||||
use deno_runtime::deno_node::DEFAULT_CONDITIONS;
|
||||
use deno_runtime::permissions::PermissionsContainer;
|
||||
use deno_semver::npm::NpmPackageNv;
|
||||
use deno_semver::npm::NpmPackageNvReference;
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
use crate::cache::NodeAnalysisCache;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::npm::NpmPackageResolver;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists;
|
||||
|
||||
mod analyze;
|
||||
|
||||
pub use analyze::esm_code_with_node_globals;
|
||||
pub use analyze::NodeCodeTranslator;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum NodeResolution {
|
||||
|
@ -116,56 +109,6 @@ pub fn resolve_builtin_node_module(module_name: &str) -> Result<Url, AnyError> {
|
|||
)))
|
||||
}
|
||||
|
||||
static RESERVED_WORDS: Lazy<HashSet<&str>> = Lazy::new(|| {
|
||||
HashSet::from([
|
||||
"break",
|
||||
"case",
|
||||
"catch",
|
||||
"class",
|
||||
"const",
|
||||
"continue",
|
||||
"debugger",
|
||||
"default",
|
||||
"delete",
|
||||
"do",
|
||||
"else",
|
||||
"export",
|
||||
"extends",
|
||||
"false",
|
||||
"finally",
|
||||
"for",
|
||||
"function",
|
||||
"if",
|
||||
"import",
|
||||
"in",
|
||||
"instanceof",
|
||||
"new",
|
||||
"null",
|
||||
"return",
|
||||
"super",
|
||||
"switch",
|
||||
"this",
|
||||
"throw",
|
||||
"true",
|
||||
"try",
|
||||
"typeof",
|
||||
"var",
|
||||
"void",
|
||||
"while",
|
||||
"with",
|
||||
"yield",
|
||||
"let",
|
||||
"enum",
|
||||
"implements",
|
||||
"interface",
|
||||
"package",
|
||||
"private",
|
||||
"protected",
|
||||
"public",
|
||||
"static",
|
||||
])
|
||||
});
|
||||
|
||||
/// This function is an implementation of `defaultResolve` in
|
||||
/// `lib/internal/modules/esm/resolve.js` from Node.
|
||||
pub fn node_resolve(
|
||||
|
@ -245,7 +188,7 @@ pub fn node_resolve(
|
|||
pub fn node_resolve_npm_reference(
|
||||
reference: &NpmPackageNvReference,
|
||||
mode: NodeResolutionMode,
|
||||
npm_resolver: &NpmPackageResolver,
|
||||
npm_resolver: &Arc<NpmPackageResolver>,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
) -> Result<Option<NodeResolution>, AnyError> {
|
||||
let package_folder =
|
||||
|
@ -261,7 +204,7 @@ pub fn node_resolve_npm_reference(
|
|||
node_module_kind,
|
||||
DEFAULT_CONDITIONS,
|
||||
mode,
|
||||
npm_resolver,
|
||||
&npm_resolver.as_require_npm_resolver(),
|
||||
permissions,
|
||||
)
|
||||
.with_context(|| {
|
||||
|
@ -282,7 +225,8 @@ pub fn node_resolve_npm_reference(
|
|||
}
|
||||
};
|
||||
let url = ModuleSpecifier::from_file_path(resolved_path).unwrap();
|
||||
let resolve_response = url_to_node_resolution(url, npm_resolver)?;
|
||||
let resolve_response =
|
||||
url_to_node_resolution(url, &npm_resolver.as_require_npm_resolver())?;
|
||||
// TODO(bartlomieju): skipped checking errors for commonJS resolution and
|
||||
// "preserveSymlinksMain"/"preserveSymlinks" options.
|
||||
Ok(Some(resolve_response))
|
||||
|
@ -309,13 +253,13 @@ pub fn resolve_specifier_into_node_modules(
|
|||
|
||||
pub fn node_resolve_binary_commands(
|
||||
pkg_nv: &NpmPackageNv,
|
||||
npm_resolver: &NpmPackageResolver,
|
||||
npm_resolver: &Arc<NpmPackageResolver>,
|
||||
) -> Result<Vec<String>, AnyError> {
|
||||
let package_folder =
|
||||
npm_resolver.resolve_package_folder_from_deno_module(pkg_nv)?;
|
||||
let package_json_path = package_folder.join("package.json");
|
||||
let package_json = PackageJson::load::<RealFs>(
|
||||
npm_resolver,
|
||||
&npm_resolver.as_require_npm_resolver(),
|
||||
&mut PermissionsContainer::allow_all(),
|
||||
package_json_path,
|
||||
)?;
|
||||
|
@ -332,13 +276,13 @@ pub fn node_resolve_binary_commands(
|
|||
pub fn node_resolve_binary_export(
|
||||
pkg_nv: &NpmPackageNv,
|
||||
bin_name: Option<&str>,
|
||||
npm_resolver: &NpmPackageResolver,
|
||||
npm_resolver: &Arc<NpmPackageResolver>,
|
||||
) -> Result<NodeResolution, AnyError> {
|
||||
let package_folder =
|
||||
npm_resolver.resolve_package_folder_from_deno_module(pkg_nv)?;
|
||||
let package_json_path = package_folder.join("package.json");
|
||||
let package_json = PackageJson::load::<RealFs>(
|
||||
npm_resolver,
|
||||
&npm_resolver.as_require_npm_resolver(),
|
||||
&mut PermissionsContainer::allow_all(),
|
||||
package_json_path,
|
||||
)?;
|
||||
|
@ -353,7 +297,8 @@ pub fn node_resolve_binary_export(
|
|||
let url =
|
||||
ModuleSpecifier::from_file_path(package_folder.join(bin_entry)).unwrap();
|
||||
|
||||
let resolve_response = url_to_node_resolution(url, npm_resolver)?;
|
||||
let resolve_response =
|
||||
url_to_node_resolution(url, &npm_resolver.as_require_npm_resolver())?;
|
||||
// TODO(bartlomieju): skipped checking errors for commonJS resolution and
|
||||
// "preserveSymlinksMain"/"preserveSymlinks" options.
|
||||
Ok(resolve_response)
|
||||
|
@ -600,324 +545,6 @@ fn module_resolve(
|
|||
})
|
||||
}
|
||||
|
||||
fn add_export(
|
||||
source: &mut Vec<String>,
|
||||
name: &str,
|
||||
initializer: &str,
|
||||
temp_var_count: &mut usize,
|
||||
) {
|
||||
fn is_valid_var_decl(name: &str) -> bool {
|
||||
// it's ok to be super strict here
|
||||
name
|
||||
.chars()
|
||||
.all(|c| c.is_ascii_alphanumeric() || c == '_' || c == '$')
|
||||
}
|
||||
|
||||
// TODO(bartlomieju): Node actually checks if a given export exists in `exports` object,
|
||||
// but it might not be necessary here since our analysis is more detailed?
|
||||
if RESERVED_WORDS.contains(name) || !is_valid_var_decl(name) {
|
||||
*temp_var_count += 1;
|
||||
// we can't create an identifier with a reserved word or invalid identifier name,
|
||||
// so assign it to a temporary variable that won't have a conflict, then re-export
|
||||
// it as a string
|
||||
source.push(format!(
|
||||
"const __deno_export_{temp_var_count}__ = {initializer};"
|
||||
));
|
||||
source.push(format!(
|
||||
"export {{ __deno_export_{temp_var_count}__ as \"{name}\" }};"
|
||||
));
|
||||
} else {
|
||||
source.push(format!("export const {name} = {initializer};"));
|
||||
}
|
||||
}
|
||||
|
||||
/// Translates given CJS module into ESM. This function will perform static
|
||||
/// analysis on the file to find defined exports and reexports.
|
||||
///
|
||||
/// For all discovered reexports the analysis will be performed recursively.
|
||||
///
|
||||
/// If successful a source code for equivalent ES module is returned.
|
||||
pub fn translate_cjs_to_esm(
|
||||
file_fetcher: &FileFetcher,
|
||||
specifier: &ModuleSpecifier,
|
||||
code: String,
|
||||
media_type: MediaType,
|
||||
npm_resolver: &NpmPackageResolver,
|
||||
node_analysis_cache: &NodeAnalysisCache,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
) -> Result<String, AnyError> {
|
||||
fn perform_cjs_analysis(
|
||||
analysis_cache: &NodeAnalysisCache,
|
||||
specifier: &str,
|
||||
media_type: MediaType,
|
||||
code: String,
|
||||
) -> Result<CjsAnalysis, AnyError> {
|
||||
let source_hash = NodeAnalysisCache::compute_source_hash(&code);
|
||||
if let Some(analysis) =
|
||||
analysis_cache.get_cjs_analysis(specifier, &source_hash)
|
||||
{
|
||||
return Ok(analysis);
|
||||
}
|
||||
|
||||
if media_type == MediaType::Json {
|
||||
return Ok(CjsAnalysis {
|
||||
exports: vec![],
|
||||
reexports: vec![],
|
||||
});
|
||||
}
|
||||
|
||||
let parsed_source = deno_ast::parse_script(deno_ast::ParseParams {
|
||||
specifier: specifier.to_string(),
|
||||
text_info: deno_ast::SourceTextInfo::new(code.into()),
|
||||
media_type,
|
||||
capture_tokens: true,
|
||||
scope_analysis: false,
|
||||
maybe_syntax: None,
|
||||
})?;
|
||||
let analysis = parsed_source.analyze_cjs();
|
||||
analysis_cache.set_cjs_analysis(specifier, &source_hash, &analysis);
|
||||
|
||||
Ok(analysis)
|
||||
}
|
||||
|
||||
let mut temp_var_count = 0;
|
||||
let mut handled_reexports: HashSet<String> = HashSet::default();
|
||||
|
||||
let mut source = vec![
|
||||
r#"import {createRequire as __internalCreateRequire} from "node:module";
|
||||
const require = __internalCreateRequire(import.meta.url);"#
|
||||
.to_string(),
|
||||
];
|
||||
|
||||
let analysis = perform_cjs_analysis(
|
||||
node_analysis_cache,
|
||||
specifier.as_str(),
|
||||
media_type,
|
||||
code,
|
||||
)?;
|
||||
|
||||
let mut all_exports = analysis
|
||||
.exports
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect::<HashSet<_>>();
|
||||
|
||||
// (request, referrer)
|
||||
let mut reexports_to_handle = VecDeque::new();
|
||||
for reexport in analysis.reexports {
|
||||
reexports_to_handle.push_back((reexport, specifier.clone()));
|
||||
}
|
||||
|
||||
while let Some((reexport, referrer)) = reexports_to_handle.pop_front() {
|
||||
if handled_reexports.contains(&reexport) {
|
||||
continue;
|
||||
}
|
||||
|
||||
handled_reexports.insert(reexport.to_string());
|
||||
|
||||
// First, resolve relate reexport specifier
|
||||
let resolved_reexport = resolve(
|
||||
&reexport,
|
||||
&referrer,
|
||||
// FIXME(bartlomieju): check if these conditions are okay, probably
|
||||
// should be `deno-require`, because `deno` is already used in `esm_resolver.rs`
|
||||
&["deno", "require", "default"],
|
||||
NodeResolutionMode::Execution,
|
||||
npm_resolver,
|
||||
permissions,
|
||||
)?;
|
||||
let reexport_specifier =
|
||||
ModuleSpecifier::from_file_path(resolved_reexport).unwrap();
|
||||
// Second, read the source code from disk
|
||||
let reexport_file = file_fetcher
|
||||
.get_source(&reexport_specifier)
|
||||
.ok_or_else(|| {
|
||||
anyhow!(
|
||||
"Could not find '{}' ({}) referenced from {}",
|
||||
reexport,
|
||||
reexport_specifier,
|
||||
referrer
|
||||
)
|
||||
})?;
|
||||
|
||||
{
|
||||
let analysis = perform_cjs_analysis(
|
||||
node_analysis_cache,
|
||||
reexport_specifier.as_str(),
|
||||
reexport_file.media_type,
|
||||
reexport_file.source.to_string(),
|
||||
)?;
|
||||
|
||||
for reexport in analysis.reexports {
|
||||
reexports_to_handle.push_back((reexport, reexport_specifier.clone()));
|
||||
}
|
||||
|
||||
all_exports.extend(
|
||||
analysis
|
||||
.exports
|
||||
.into_iter()
|
||||
.filter(|e| e.as_str() != "default"),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
source.push(format!(
|
||||
"const mod = require(\"{}\");",
|
||||
specifier
|
||||
.to_file_path()
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.replace('\\', "\\\\")
|
||||
.replace('\'', "\\\'")
|
||||
.replace('\"', "\\\"")
|
||||
));
|
||||
|
||||
for export in &all_exports {
|
||||
if export.as_str() != "default" {
|
||||
add_export(
|
||||
&mut source,
|
||||
export,
|
||||
&format!("mod[\"{export}\"]"),
|
||||
&mut temp_var_count,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
source.push("export default mod;".to_string());
|
||||
|
||||
let translated_source = source.join("\n");
|
||||
Ok(translated_source)
|
||||
}
|
||||
|
||||
fn resolve(
|
||||
specifier: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
conditions: &[&str],
|
||||
mode: NodeResolutionMode,
|
||||
npm_resolver: &dyn RequireNpmResolver,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
if specifier.starts_with('/') {
|
||||
todo!();
|
||||
}
|
||||
|
||||
let referrer_path = referrer.to_file_path().unwrap();
|
||||
if specifier.starts_with("./") || specifier.starts_with("../") {
|
||||
if let Some(parent) = referrer_path.parent() {
|
||||
return file_extension_probe(parent.join(specifier), &referrer_path);
|
||||
} else {
|
||||
todo!();
|
||||
}
|
||||
}
|
||||
|
||||
// We've got a bare specifier or maybe bare_specifier/blah.js"
|
||||
|
||||
let (package_specifier, package_subpath) =
|
||||
parse_specifier(specifier).unwrap();
|
||||
|
||||
// todo(dsherret): use not_found error on not found here
|
||||
let module_dir = npm_resolver.resolve_package_folder_from_package(
|
||||
package_specifier.as_str(),
|
||||
&referrer_path,
|
||||
mode,
|
||||
)?;
|
||||
|
||||
let package_json_path = module_dir.join("package.json");
|
||||
if package_json_path.exists() {
|
||||
let package_json = PackageJson::load::<RealFs>(
|
||||
npm_resolver,
|
||||
permissions,
|
||||
package_json_path.clone(),
|
||||
)?;
|
||||
|
||||
if let Some(exports) = &package_json.exports {
|
||||
return package_exports_resolve::<deno_node::RealFs>(
|
||||
&package_json_path,
|
||||
package_subpath,
|
||||
exports,
|
||||
referrer,
|
||||
NodeModuleKind::Esm,
|
||||
conditions,
|
||||
mode,
|
||||
npm_resolver,
|
||||
permissions,
|
||||
);
|
||||
}
|
||||
|
||||
// old school
|
||||
if package_subpath != "." {
|
||||
let d = module_dir.join(package_subpath);
|
||||
if let Ok(m) = d.metadata() {
|
||||
if m.is_dir() {
|
||||
// subdir might have a package.json that specifies the entrypoint
|
||||
let package_json_path = d.join("package.json");
|
||||
if package_json_path.exists() {
|
||||
let package_json = PackageJson::load::<RealFs>(
|
||||
npm_resolver,
|
||||
permissions,
|
||||
package_json_path,
|
||||
)?;
|
||||
if let Some(main) = package_json.main(NodeModuleKind::Cjs) {
|
||||
return Ok(d.join(main).clean());
|
||||
}
|
||||
}
|
||||
|
||||
return Ok(d.join("index.js").clean());
|
||||
}
|
||||
}
|
||||
return file_extension_probe(d, &referrer_path);
|
||||
} else if let Some(main) = package_json.main(NodeModuleKind::Cjs) {
|
||||
return Ok(module_dir.join(main).clean());
|
||||
} else {
|
||||
return Ok(module_dir.join("index.js").clean());
|
||||
}
|
||||
}
|
||||
Err(not_found(specifier, &referrer_path))
|
||||
}
|
||||
|
||||
fn parse_specifier(specifier: &str) -> Option<(String, String)> {
|
||||
let mut separator_index = specifier.find('/');
|
||||
let mut valid_package_name = true;
|
||||
// let mut is_scoped = false;
|
||||
if specifier.is_empty() {
|
||||
valid_package_name = false;
|
||||
} else if specifier.starts_with('@') {
|
||||
// is_scoped = true;
|
||||
if let Some(index) = separator_index {
|
||||
separator_index = specifier[index + 1..].find('/').map(|i| i + index + 1);
|
||||
} else {
|
||||
valid_package_name = false;
|
||||
}
|
||||
}
|
||||
|
||||
let package_name = if let Some(index) = separator_index {
|
||||
specifier[0..index].to_string()
|
||||
} else {
|
||||
specifier.to_string()
|
||||
};
|
||||
|
||||
// Package name cannot have leading . and cannot have percent-encoding or separators.
|
||||
for ch in package_name.chars() {
|
||||
if ch == '%' || ch == '\\' {
|
||||
valid_package_name = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if !valid_package_name {
|
||||
return None;
|
||||
}
|
||||
|
||||
let package_subpath = if let Some(index) = separator_index {
|
||||
format!(".{}", specifier.chars().skip(index).collect::<String>())
|
||||
} else {
|
||||
".".to_string()
|
||||
};
|
||||
|
||||
Some((package_name, package_subpath))
|
||||
}
|
||||
|
||||
fn to_file_path(url: &ModuleSpecifier) -> PathBuf {
|
||||
url
|
||||
.to_file_path()
|
||||
|
@ -958,75 +585,12 @@ fn is_relative_specifier(specifier: &str) -> bool {
|
|||
false
|
||||
}
|
||||
|
||||
fn file_extension_probe(
|
||||
p: PathBuf,
|
||||
referrer: &Path,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
let p = p.clean();
|
||||
if p.exists() {
|
||||
let file_name = p.file_name().unwrap();
|
||||
let p_js = p.with_file_name(format!("{}.js", file_name.to_str().unwrap()));
|
||||
if p_js.exists() && p_js.is_file() {
|
||||
return Ok(p_js);
|
||||
} else if p.is_dir() {
|
||||
return Ok(p.join("index.js"));
|
||||
} else {
|
||||
return Ok(p);
|
||||
}
|
||||
} else if let Some(file_name) = p.file_name() {
|
||||
let p_js = p.with_file_name(format!("{}.js", file_name.to_str().unwrap()));
|
||||
if p_js.exists() && p_js.is_file() {
|
||||
return Ok(p_js);
|
||||
}
|
||||
}
|
||||
Err(not_found(&p.to_string_lossy(), referrer))
|
||||
}
|
||||
|
||||
fn not_found(path: &str, referrer: &Path) -> AnyError {
|
||||
let msg = format!(
|
||||
"[ERR_MODULE_NOT_FOUND] Cannot find module \"{}\" imported from \"{}\"",
|
||||
path,
|
||||
referrer.to_string_lossy()
|
||||
);
|
||||
std::io::Error::new(std::io::ErrorKind::NotFound, msg).into()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use deno_core::serde_json::json;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_add_export() {
|
||||
let mut temp_var_count = 0;
|
||||
let mut source = vec![];
|
||||
|
||||
let exports = vec!["static", "server", "app", "dashed-export"];
|
||||
for export in exports {
|
||||
add_export(&mut source, export, "init", &mut temp_var_count);
|
||||
}
|
||||
assert_eq!(
|
||||
source,
|
||||
vec![
|
||||
"const __deno_export_1__ = init;".to_string(),
|
||||
"export { __deno_export_1__ as \"static\" };".to_string(),
|
||||
"export const server = init;".to_string(),
|
||||
"export const app = init;".to_string(),
|
||||
"const __deno_export_2__ = init;".to_string(),
|
||||
"export { __deno_export_2__ as \"dashed-export\" };".to_string(),
|
||||
]
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_specifier() {
|
||||
assert_eq!(
|
||||
parse_specifier("@some-package/core/actions"),
|
||||
Some(("@some-package/core".to_string(), "./actions".to_string()))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_bin_entry_value() {
|
||||
// should resolve the specified value
|
||||
|
|
|
@ -4,7 +4,6 @@ use std::collections::HashSet;
|
|||
use std::fs;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::anyhow::bail;
|
||||
|
@ -296,14 +295,14 @@ impl ReadonlyNpmCache {
|
|||
}
|
||||
|
||||
/// Stores a single copy of npm packages in a cache.
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Debug)]
|
||||
pub struct NpmCache {
|
||||
readonly: ReadonlyNpmCache,
|
||||
cache_setting: CacheSetting,
|
||||
http_client: HttpClient,
|
||||
progress_bar: ProgressBar,
|
||||
/// ensures a package is only downloaded once per run
|
||||
previously_reloaded_packages: Arc<Mutex<HashSet<NpmPackageNv>>>,
|
||||
previously_reloaded_packages: Mutex<HashSet<NpmPackageNv>>,
|
||||
}
|
||||
|
||||
impl NpmCache {
|
||||
|
|
|
@ -19,8 +19,8 @@ use super::NpmResolution;
|
|||
#[derive(Debug)]
|
||||
struct PackageJsonDepsInstallerInner {
|
||||
has_installed_flag: AtomicFlag,
|
||||
npm_registry_api: CliNpmRegistryApi,
|
||||
npm_resolution: NpmResolution,
|
||||
npm_registry_api: Arc<CliNpmRegistryApi>,
|
||||
npm_resolution: Arc<NpmResolution>,
|
||||
package_deps: PackageJsonDeps,
|
||||
}
|
||||
|
||||
|
@ -58,22 +58,20 @@ impl PackageJsonDepsInstallerInner {
|
|||
}
|
||||
|
||||
/// Holds and controls installing dependencies from package.json.
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct PackageJsonDepsInstaller(Option<Arc<PackageJsonDepsInstallerInner>>);
|
||||
#[derive(Debug, Default)]
|
||||
pub struct PackageJsonDepsInstaller(Option<PackageJsonDepsInstallerInner>);
|
||||
|
||||
impl PackageJsonDepsInstaller {
|
||||
pub fn new(
|
||||
npm_registry_api: CliNpmRegistryApi,
|
||||
npm_resolution: NpmResolution,
|
||||
npm_registry_api: Arc<CliNpmRegistryApi>,
|
||||
npm_resolution: Arc<NpmResolution>,
|
||||
deps: Option<PackageJsonDeps>,
|
||||
) -> Self {
|
||||
Self(deps.map(|package_deps| {
|
||||
Arc::new(PackageJsonDepsInstallerInner {
|
||||
has_installed_flag: Default::default(),
|
||||
npm_registry_api,
|
||||
npm_resolution,
|
||||
package_deps,
|
||||
})
|
||||
Self(deps.map(|package_deps| PackageJsonDepsInstallerInner {
|
||||
has_installed_flag: Default::default(),
|
||||
npm_registry_api,
|
||||
npm_resolution,
|
||||
package_deps,
|
||||
}))
|
||||
}
|
||||
|
||||
|
|
|
@ -52,7 +52,7 @@ static NPM_REGISTRY_DEFAULT_URL: Lazy<Url> = Lazy::new(|| {
|
|||
Url::parse("https://registry.npmjs.org").unwrap()
|
||||
});
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Debug)]
|
||||
pub struct CliNpmRegistryApi(Option<Arc<CliNpmRegistryApiInner>>);
|
||||
|
||||
impl CliNpmRegistryApi {
|
||||
|
@ -62,7 +62,7 @@ impl CliNpmRegistryApi {
|
|||
|
||||
pub fn new(
|
||||
base_url: Url,
|
||||
cache: NpmCache,
|
||||
cache: Arc<NpmCache>,
|
||||
http_client: HttpClient,
|
||||
progress_bar: ProgressBar,
|
||||
) -> Self {
|
||||
|
@ -168,7 +168,7 @@ enum CacheItem {
|
|||
#[derive(Debug)]
|
||||
struct CliNpmRegistryApiInner {
|
||||
base_url: Url,
|
||||
cache: NpmCache,
|
||||
cache: Arc<NpmCache>,
|
||||
force_reload_flag: AtomicFlag,
|
||||
mem_cache: Mutex<HashMap<String, CacheItem>>,
|
||||
previously_reloaded_packages: Mutex<HashSet<String>>,
|
||||
|
|
|
@ -38,11 +38,8 @@ use super::registry::CliNpmRegistryApi;
|
|||
/// based on changes to the resolution.
|
||||
///
|
||||
/// This does not interact with the file system.
|
||||
#[derive(Clone)]
|
||||
pub struct NpmResolution(Arc<NpmResolutionInner>);
|
||||
|
||||
struct NpmResolutionInner {
|
||||
api: CliNpmRegistryApi,
|
||||
pub struct NpmResolution {
|
||||
api: Arc<CliNpmRegistryApi>,
|
||||
snapshot: RwLock<NpmResolutionSnapshot>,
|
||||
update_queue: TaskQueue,
|
||||
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
|
@ -50,7 +47,7 @@ struct NpmResolutionInner {
|
|||
|
||||
impl std::fmt::Debug for NpmResolution {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let snapshot = self.0.snapshot.read();
|
||||
let snapshot = self.snapshot.read();
|
||||
f.debug_struct("NpmResolution")
|
||||
.field("snapshot", &snapshot.as_serialized())
|
||||
.finish()
|
||||
|
@ -59,13 +56,13 @@ impl std::fmt::Debug for NpmResolution {
|
|||
|
||||
impl NpmResolution {
|
||||
pub fn from_serialized(
|
||||
api: CliNpmRegistryApi,
|
||||
api: Arc<CliNpmRegistryApi>,
|
||||
initial_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
|
||||
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
) -> Self {
|
||||
let snapshot =
|
||||
NpmResolutionSnapshot::new(NpmResolutionSnapshotCreateOptions {
|
||||
api: Arc::new(api.clone()),
|
||||
api: api.clone(),
|
||||
snapshot: initial_snapshot.unwrap_or_default(),
|
||||
// WARNING: When bumping this version, check if anything needs to be
|
||||
// updated in the `setNodeOnlyGlobalNames` call in 99_main_compiler.js
|
||||
|
@ -77,35 +74,33 @@ impl NpmResolution {
|
|||
}
|
||||
|
||||
pub fn new(
|
||||
api: CliNpmRegistryApi,
|
||||
api: Arc<CliNpmRegistryApi>,
|
||||
initial_snapshot: NpmResolutionSnapshot,
|
||||
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
) -> Self {
|
||||
Self(Arc::new(NpmResolutionInner {
|
||||
Self {
|
||||
api,
|
||||
snapshot: RwLock::new(initial_snapshot),
|
||||
update_queue: Default::default(),
|
||||
maybe_lockfile,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn add_package_reqs(
|
||||
&self,
|
||||
package_reqs: Vec<NpmPackageReq>,
|
||||
) -> Result<(), AnyError> {
|
||||
let inner = &self.0;
|
||||
|
||||
// only allow one thread in here at a time
|
||||
let _permit = inner.update_queue.acquire().await;
|
||||
let _permit = self.update_queue.acquire().await;
|
||||
let snapshot = add_package_reqs_to_snapshot(
|
||||
&inner.api,
|
||||
&self.api,
|
||||
package_reqs,
|
||||
self.0.maybe_lockfile.clone(),
|
||||
|| inner.snapshot.read().clone(),
|
||||
self.maybe_lockfile.clone(),
|
||||
|| self.snapshot.read().clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
*inner.snapshot.write() = snapshot;
|
||||
*self.snapshot.write() = snapshot;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -113,17 +108,16 @@ impl NpmResolution {
|
|||
&self,
|
||||
package_reqs: Vec<NpmPackageReq>,
|
||||
) -> Result<(), AnyError> {
|
||||
let inner = &self.0;
|
||||
// only allow one thread in here at a time
|
||||
let _permit = inner.update_queue.acquire().await;
|
||||
let _permit = self.update_queue.acquire().await;
|
||||
|
||||
let reqs_set = package_reqs.iter().cloned().collect::<HashSet<_>>();
|
||||
let snapshot = add_package_reqs_to_snapshot(
|
||||
&inner.api,
|
||||
&self.api,
|
||||
package_reqs,
|
||||
self.0.maybe_lockfile.clone(),
|
||||
self.maybe_lockfile.clone(),
|
||||
|| {
|
||||
let snapshot = inner.snapshot.read().clone();
|
||||
let snapshot = self.snapshot.read().clone();
|
||||
let has_removed_package = !snapshot
|
||||
.package_reqs()
|
||||
.keys()
|
||||
|
@ -138,25 +132,24 @@ impl NpmResolution {
|
|||
)
|
||||
.await?;
|
||||
|
||||
*inner.snapshot.write() = snapshot;
|
||||
*self.snapshot.write() = snapshot;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn resolve_pending(&self) -> Result<(), AnyError> {
|
||||
let inner = &self.0;
|
||||
// only allow one thread in here at a time
|
||||
let _permit = inner.update_queue.acquire().await;
|
||||
let _permit = self.update_queue.acquire().await;
|
||||
|
||||
let snapshot = add_package_reqs_to_snapshot(
|
||||
&inner.api,
|
||||
&self.api,
|
||||
Vec::new(),
|
||||
self.0.maybe_lockfile.clone(),
|
||||
|| inner.snapshot.read().clone(),
|
||||
self.maybe_lockfile.clone(),
|
||||
|| self.snapshot.read().clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
*inner.snapshot.write() = snapshot;
|
||||
*self.snapshot.write() = snapshot;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -177,7 +170,6 @@ impl NpmResolution {
|
|||
id: &NpmPackageId,
|
||||
) -> Option<NpmPackageCacheFolderId> {
|
||||
self
|
||||
.0
|
||||
.snapshot
|
||||
.read()
|
||||
.package_from_id(id)
|
||||
|
@ -190,7 +182,6 @@ impl NpmResolution {
|
|||
referrer: &NpmPackageCacheFolderId,
|
||||
) -> Result<NpmResolutionPackage, Box<PackageNotFoundFromReferrerError>> {
|
||||
self
|
||||
.0
|
||||
.snapshot
|
||||
.read()
|
||||
.resolve_package_from_package(name, referrer)
|
||||
|
@ -203,7 +194,6 @@ impl NpmResolution {
|
|||
req: &NpmPackageReq,
|
||||
) -> Result<NpmPackageId, PackageReqNotFoundError> {
|
||||
self
|
||||
.0
|
||||
.snapshot
|
||||
.read()
|
||||
.resolve_pkg_from_pkg_req(req)
|
||||
|
@ -215,7 +205,6 @@ impl NpmResolution {
|
|||
id: &NpmPackageNv,
|
||||
) -> Result<NpmPackageId, PackageNvNotFoundError> {
|
||||
self
|
||||
.0
|
||||
.snapshot
|
||||
.read()
|
||||
.resolve_package_from_deno_module(id)
|
||||
|
@ -230,8 +219,7 @@ impl NpmResolution {
|
|||
pkg_req: &NpmPackageReq,
|
||||
) -> Result<NpmPackageNv, NpmPackageVersionResolutionError> {
|
||||
// we should always have this because it should have been cached before here
|
||||
let package_info =
|
||||
self.0.api.get_cached_package_info(&pkg_req.name).unwrap();
|
||||
let package_info = self.api.get_cached_package_info(&pkg_req.name).unwrap();
|
||||
self.resolve_package_req_as_pending_with_info(pkg_req, &package_info)
|
||||
}
|
||||
|
||||
|
@ -244,30 +232,29 @@ impl NpmResolution {
|
|||
package_info: &NpmPackageInfo,
|
||||
) -> Result<NpmPackageNv, NpmPackageVersionResolutionError> {
|
||||
debug_assert_eq!(pkg_req.name, package_info.name);
|
||||
let inner = &self.0;
|
||||
let mut snapshot = inner.snapshot.write();
|
||||
let mut snapshot = self.snapshot.write();
|
||||
let nv = snapshot.resolve_package_req_as_pending(pkg_req, package_info)?;
|
||||
Ok(nv)
|
||||
}
|
||||
|
||||
pub fn all_packages_partitioned(&self) -> NpmPackagesPartitioned {
|
||||
self.0.snapshot.read().all_packages_partitioned()
|
||||
self.snapshot.read().all_packages_partitioned()
|
||||
}
|
||||
|
||||
pub fn has_packages(&self) -> bool {
|
||||
!self.0.snapshot.read().is_empty()
|
||||
!self.snapshot.read().is_empty()
|
||||
}
|
||||
|
||||
pub fn snapshot(&self) -> NpmResolutionSnapshot {
|
||||
self.0.snapshot.read().clone()
|
||||
self.snapshot.read().clone()
|
||||
}
|
||||
|
||||
pub fn serialized_snapshot(&self) -> SerializedNpmResolutionSnapshot {
|
||||
self.0.snapshot.read().as_serialized()
|
||||
self.snapshot.read().as_serialized()
|
||||
}
|
||||
|
||||
pub fn lock(&self, lockfile: &mut Lockfile) -> Result<(), AnyError> {
|
||||
let snapshot = self.0.snapshot.read();
|
||||
let snapshot = self.snapshot.read();
|
||||
populate_lockfile_from_snapshot(lockfile, &snapshot)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
use std::io::ErrorKind;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
|
@ -26,11 +27,10 @@ pub trait NpmPackageFsResolver: Send + Sync {
|
|||
/// The local node_modules folder if it is applicable to the implementation.
|
||||
fn node_modules_path(&self) -> Option<PathBuf>;
|
||||
|
||||
fn resolve_package_folder_from_deno_module(
|
||||
fn package_folder(
|
||||
&self,
|
||||
id: &NpmPackageId,
|
||||
package_id: &NpmPackageId,
|
||||
) -> Result<PathBuf, AnyError>;
|
||||
|
||||
fn resolve_package_folder_from_package(
|
||||
&self,
|
||||
name: &str,
|
||||
|
@ -43,8 +43,6 @@ pub trait NpmPackageFsResolver: Send + Sync {
|
|||
specifier: &ModuleSpecifier,
|
||||
) -> Result<PathBuf, AnyError>;
|
||||
|
||||
fn package_size(&self, package_id: &NpmPackageId) -> Result<u64, AnyError>;
|
||||
|
||||
async fn cache_packages(&self) -> Result<(), AnyError>;
|
||||
|
||||
fn ensure_read_permission(
|
||||
|
@ -57,7 +55,7 @@ pub trait NpmPackageFsResolver: Send + Sync {
|
|||
/// Caches all the packages in parallel.
|
||||
pub async fn cache_packages(
|
||||
mut packages: Vec<NpmResolutionPackage>,
|
||||
cache: &NpmCache,
|
||||
cache: &Arc<NpmCache>,
|
||||
registry_url: &Url,
|
||||
) -> Result<(), AnyError> {
|
||||
let sync_download = should_sync_download();
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
|
@ -25,18 +26,18 @@ use super::common::types_package_name;
|
|||
use super::common::NpmPackageFsResolver;
|
||||
|
||||
/// Resolves packages from the global npm cache.
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug)]
|
||||
pub struct GlobalNpmPackageResolver {
|
||||
cache: NpmCache,
|
||||
resolution: NpmResolution,
|
||||
cache: Arc<NpmCache>,
|
||||
resolution: Arc<NpmResolution>,
|
||||
registry_url: Url,
|
||||
}
|
||||
|
||||
impl GlobalNpmPackageResolver {
|
||||
pub fn new(
|
||||
cache: NpmCache,
|
||||
cache: Arc<NpmCache>,
|
||||
registry_url: Url,
|
||||
resolution: NpmResolution,
|
||||
resolution: Arc<NpmResolution>,
|
||||
) -> Self {
|
||||
Self {
|
||||
cache,
|
||||
|
@ -45,16 +46,6 @@ impl GlobalNpmPackageResolver {
|
|||
}
|
||||
}
|
||||
|
||||
fn package_folder(&self, id: &NpmPackageId) -> PathBuf {
|
||||
let folder_id = self
|
||||
.resolution
|
||||
.resolve_package_cache_folder_id_from_id(id)
|
||||
.unwrap();
|
||||
self
|
||||
.cache
|
||||
.package_folder_for_id(&folder_id, &self.registry_url)
|
||||
}
|
||||
|
||||
fn resolve_types_package(
|
||||
&self,
|
||||
package_name: &str,
|
||||
|
@ -77,11 +68,16 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
|
|||
None
|
||||
}
|
||||
|
||||
fn resolve_package_folder_from_deno_module(
|
||||
&self,
|
||||
id: &NpmPackageId,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
Ok(self.package_folder(id))
|
||||
fn package_folder(&self, id: &NpmPackageId) -> Result<PathBuf, AnyError> {
|
||||
let folder_id = self
|
||||
.resolution
|
||||
.resolve_package_cache_folder_id_from_id(id)
|
||||
.unwrap();
|
||||
Ok(
|
||||
self
|
||||
.cache
|
||||
.package_folder_for_id(&folder_id, &self.registry_url),
|
||||
)
|
||||
}
|
||||
|
||||
fn resolve_package_folder_from_package(
|
||||
|
@ -106,7 +102,7 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
|
|||
.resolution
|
||||
.resolve_package_from_package(name, &referrer_pkg_id)?
|
||||
};
|
||||
Ok(self.package_folder(&pkg.pkg_id))
|
||||
self.package_folder(&pkg.pkg_id)
|
||||
}
|
||||
|
||||
fn resolve_package_folder_from_specifier(
|
||||
|
@ -124,11 +120,6 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
|
|||
)
|
||||
}
|
||||
|
||||
fn package_size(&self, id: &NpmPackageId) -> Result<u64, AnyError> {
|
||||
let package_folder = self.package_folder(id);
|
||||
Ok(crate::util::fs::dir_size(&package_folder)?)
|
||||
}
|
||||
|
||||
async fn cache_packages(&self) -> Result<(), AnyError> {
|
||||
cache_packages_in_resolver(self).await
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@ use std::collections::VecDeque;
|
|||
use std::fs;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::util::fs::symlink_dir;
|
||||
use crate::util::fs::LaxSingleProcessFsFlag;
|
||||
|
@ -41,11 +42,11 @@ use super::common::NpmPackageFsResolver;
|
|||
|
||||
/// Resolver that creates a local node_modules directory
|
||||
/// and resolves packages from it.
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug)]
|
||||
pub struct LocalNpmPackageResolver {
|
||||
cache: NpmCache,
|
||||
cache: Arc<NpmCache>,
|
||||
progress_bar: ProgressBar,
|
||||
resolution: NpmResolution,
|
||||
resolution: Arc<NpmResolution>,
|
||||
registry_url: Url,
|
||||
root_node_modules_path: PathBuf,
|
||||
root_node_modules_url: Url,
|
||||
|
@ -53,11 +54,11 @@ pub struct LocalNpmPackageResolver {
|
|||
|
||||
impl LocalNpmPackageResolver {
|
||||
pub fn new(
|
||||
cache: NpmCache,
|
||||
cache: Arc<NpmCache>,
|
||||
progress_bar: ProgressBar,
|
||||
registry_url: Url,
|
||||
node_modules_folder: PathBuf,
|
||||
resolution: NpmResolution,
|
||||
resolution: Arc<NpmResolution>,
|
||||
) -> Self {
|
||||
Self {
|
||||
cache,
|
||||
|
@ -103,11 +104,19 @@ impl LocalNpmPackageResolver {
|
|||
// it's within the directory, so use it
|
||||
specifier.to_file_path().ok()
|
||||
}
|
||||
}
|
||||
|
||||
fn get_package_id_folder(
|
||||
&self,
|
||||
id: &NpmPackageId,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
#[async_trait]
|
||||
impl NpmPackageFsResolver for LocalNpmPackageResolver {
|
||||
fn root_dir_url(&self) -> &Url {
|
||||
&self.root_node_modules_url
|
||||
}
|
||||
|
||||
fn node_modules_path(&self) -> Option<PathBuf> {
|
||||
Some(self.root_node_modules_path.clone())
|
||||
}
|
||||
|
||||
fn package_folder(&self, id: &NpmPackageId) -> Result<PathBuf, AnyError> {
|
||||
match self.resolution.resolve_package_cache_folder_id_from_id(id) {
|
||||
// package is stored at:
|
||||
// node_modules/.deno/<package_cache_folder_id_folder_name>/node_modules/<package_name>
|
||||
|
@ -125,24 +134,6 @@ impl LocalNpmPackageResolver {
|
|||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl NpmPackageFsResolver for LocalNpmPackageResolver {
|
||||
fn root_dir_url(&self) -> &Url {
|
||||
&self.root_node_modules_url
|
||||
}
|
||||
|
||||
fn node_modules_path(&self) -> Option<PathBuf> {
|
||||
Some(self.root_node_modules_path.clone())
|
||||
}
|
||||
|
||||
fn resolve_package_folder_from_deno_module(
|
||||
&self,
|
||||
node_id: &NpmPackageId,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
self.get_package_id_folder(node_id)
|
||||
}
|
||||
|
||||
fn resolve_package_folder_from_package(
|
||||
&self,
|
||||
|
@ -198,12 +189,6 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
|
|||
Ok(package_root_path)
|
||||
}
|
||||
|
||||
fn package_size(&self, id: &NpmPackageId) -> Result<u64, AnyError> {
|
||||
let package_folder_path = self.get_package_id_folder(id)?;
|
||||
|
||||
Ok(crate::util::fs::dir_size(&package_folder_path)?)
|
||||
}
|
||||
|
||||
async fn cache_packages(&self) -> Result<(), AnyError> {
|
||||
sync_resolution_with_fs(
|
||||
&self.resolution.snapshot(),
|
||||
|
@ -231,7 +216,7 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
|
|||
/// Creates a pnpm style folder structure.
|
||||
async fn sync_resolution_with_fs(
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
cache: &NpmCache,
|
||||
cache: &Arc<NpmCache>,
|
||||
progress_bar: &ProgressBar,
|
||||
registry_url: &Url,
|
||||
root_node_modules_dir_path: &Path,
|
||||
|
|
|
@ -47,10 +47,9 @@ pub struct NpmProcessState {
|
|||
}
|
||||
|
||||
/// Brings together the npm resolution with the file system.
|
||||
#[derive(Clone)]
|
||||
pub struct NpmPackageResolver {
|
||||
fs_resolver: Arc<dyn NpmPackageFsResolver>,
|
||||
resolution: NpmResolution,
|
||||
resolution: Arc<NpmResolution>,
|
||||
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
}
|
||||
|
||||
|
@ -66,7 +65,7 @@ impl std::fmt::Debug for NpmPackageResolver {
|
|||
|
||||
impl NpmPackageResolver {
|
||||
pub fn new(
|
||||
resolution: NpmResolution,
|
||||
resolution: Arc<NpmResolution>,
|
||||
fs_resolver: Arc<dyn NpmPackageFsResolver>,
|
||||
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
) -> Self {
|
||||
|
@ -108,9 +107,7 @@ impl NpmPackageResolver {
|
|||
&self,
|
||||
pkg_id: &NpmPackageId,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
let path = self
|
||||
.fs_resolver
|
||||
.resolve_package_folder_from_deno_module(pkg_id)?;
|
||||
let path = self.fs_resolver.package_folder(pkg_id)?;
|
||||
let path = canonicalize_path_maybe_not_exists(&path)?;
|
||||
log::debug!(
|
||||
"Resolved package folder of {} to {}",
|
||||
|
@ -157,7 +154,8 @@ impl NpmPackageResolver {
|
|||
&self,
|
||||
package_id: &NpmPackageId,
|
||||
) -> Result<u64, AnyError> {
|
||||
self.fs_resolver.package_size(package_id)
|
||||
let package_folder = self.fs_resolver.package_folder(package_id)?;
|
||||
Ok(crate::util::fs::dir_size(&package_folder)?)
|
||||
}
|
||||
|
||||
/// Gets if the provided specifier is in an npm package.
|
||||
|
@ -239,9 +237,17 @@ impl NpmPackageResolver {
|
|||
self.fs_resolver.cache_packages().await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn as_require_npm_resolver(
|
||||
self: &Arc<Self>,
|
||||
) -> RequireNpmPackageResolver {
|
||||
RequireNpmPackageResolver(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl RequireNpmResolver for NpmPackageResolver {
|
||||
pub struct RequireNpmPackageResolver(Arc<NpmPackageResolver>);
|
||||
|
||||
impl RequireNpmResolver for RequireNpmPackageResolver {
|
||||
fn resolve_package_folder_from_package(
|
||||
&self,
|
||||
specifier: &str,
|
||||
|
@ -249,7 +255,9 @@ impl RequireNpmResolver for NpmPackageResolver {
|
|||
mode: NodeResolutionMode,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
let referrer = path_to_specifier(referrer)?;
|
||||
self.resolve_package_folder_from_package(specifier, &referrer, mode)
|
||||
self
|
||||
.0
|
||||
.resolve_package_folder_from_package(specifier, &referrer, mode)
|
||||
}
|
||||
|
||||
fn resolve_package_folder_from_path(
|
||||
|
@ -257,7 +265,7 @@ impl RequireNpmResolver for NpmPackageResolver {
|
|||
path: &Path,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
let specifier = path_to_specifier(path)?;
|
||||
self.resolve_package_folder_from_specifier(&specifier)
|
||||
self.0.resolve_package_folder_from_specifier(&specifier)
|
||||
}
|
||||
|
||||
fn in_npm_package(&self, path: &Path) -> bool {
|
||||
|
@ -267,6 +275,7 @@ impl RequireNpmResolver for NpmPackageResolver {
|
|||
Err(_) => return false,
|
||||
};
|
||||
self
|
||||
.0
|
||||
.resolve_package_folder_from_specifier(&specifier)
|
||||
.is_ok()
|
||||
}
|
||||
|
@ -276,15 +285,15 @@ impl RequireNpmResolver for NpmPackageResolver {
|
|||
permissions: &mut dyn NodePermissions,
|
||||
path: &Path,
|
||||
) -> Result<(), AnyError> {
|
||||
self.fs_resolver.ensure_read_permission(permissions, path)
|
||||
self.0.fs_resolver.ensure_read_permission(permissions, path)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_npm_fs_resolver(
|
||||
cache: NpmCache,
|
||||
cache: Arc<NpmCache>,
|
||||
progress_bar: &ProgressBar,
|
||||
registry_url: Url,
|
||||
resolution: NpmResolution,
|
||||
resolution: Arc<NpmResolution>,
|
||||
maybe_node_modules_path: Option<PathBuf>,
|
||||
) -> Arc<dyn NpmPackageFsResolver> {
|
||||
match maybe_node_modules_path {
|
||||
|
|
|
@ -5,25 +5,19 @@ use crate::args::DenoSubcommand;
|
|||
use crate::args::Flags;
|
||||
use crate::args::Lockfile;
|
||||
use crate::args::TsConfigType;
|
||||
use crate::args::TsTypeLib;
|
||||
use crate::args::TypeCheckMode;
|
||||
use crate::cache;
|
||||
use crate::cache::Caches;
|
||||
use crate::cache::DenoDir;
|
||||
use crate::cache::EmitCache;
|
||||
use crate::cache::HttpCache;
|
||||
use crate::cache::NodeAnalysisCache;
|
||||
use crate::cache::ParsedSourceCache;
|
||||
use crate::cache::TypeCheckCache;
|
||||
use crate::emit::Emitter;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::graph_util::build_graph_with_npm_resolution;
|
||||
use crate::graph_util::graph_lock_or_exit;
|
||||
use crate::graph_util::graph_valid_with_cli_options;
|
||||
use crate::graph_util::ModuleGraphBuilder;
|
||||
use crate::graph_util::ModuleGraphContainer;
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::node;
|
||||
use crate::node::NodeResolution;
|
||||
use crate::module_loader::ModuleLoadPreparer;
|
||||
use crate::node::NodeCodeTranslator;
|
||||
use crate::npm::create_npm_fs_resolver;
|
||||
use crate::npm::CliNpmRegistryApi;
|
||||
use crate::npm::NpmCache;
|
||||
|
@ -31,35 +25,21 @@ use crate::npm::NpmPackageResolver;
|
|||
use crate::npm::NpmResolution;
|
||||
use crate::npm::PackageJsonDepsInstaller;
|
||||
use crate::resolver::CliGraphResolver;
|
||||
use crate::tools::check;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
use crate::util::progress_bar::ProgressBarStyle;
|
||||
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::custom_error;
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::resolve_url_or_path;
|
||||
use deno_core::CompiledWasmModuleStore;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_core::SharedArrayBufferStore;
|
||||
use deno_graph::source::Loader;
|
||||
use deno_graph::source::Resolver;
|
||||
use deno_graph::Module;
|
||||
use deno_graph::ModuleGraph;
|
||||
use deno_graph::Resolution;
|
||||
|
||||
use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel;
|
||||
use deno_runtime::deno_node::NodeResolutionMode;
|
||||
use deno_runtime::deno_tls::rustls::RootCertStore;
|
||||
use deno_runtime::deno_web::BlobStore;
|
||||
use deno_runtime::inspector_server::InspectorServer;
|
||||
use deno_runtime::permissions::PermissionsContainer;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use import_map::ImportMap;
|
||||
use log::warn;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashSet;
|
||||
use std::ops::Deref;
|
||||
use std::path::PathBuf;
|
||||
|
@ -73,13 +53,13 @@ pub struct ProcState(Arc<Inner>);
|
|||
|
||||
pub struct Inner {
|
||||
pub dir: DenoDir,
|
||||
pub caches: Caches,
|
||||
pub caches: Arc<Caches>,
|
||||
pub file_fetcher: Arc<FileFetcher>,
|
||||
pub http_client: HttpClient,
|
||||
pub options: Arc<CliOptions>,
|
||||
pub emit_cache: EmitCache,
|
||||
pub emitter: Emitter,
|
||||
graph_container: ModuleGraphContainer,
|
||||
pub emitter: Arc<Emitter>,
|
||||
pub graph_container: Arc<ModuleGraphContainer>,
|
||||
pub lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
pub maybe_import_map: Option<Arc<ImportMap>>,
|
||||
pub maybe_inspector_server: Option<Arc<InspectorServer>>,
|
||||
|
@ -88,16 +68,18 @@ pub struct Inner {
|
|||
pub broadcast_channel: InMemoryBroadcastChannel,
|
||||
pub shared_array_buffer_store: SharedArrayBufferStore,
|
||||
pub compiled_wasm_module_store: CompiledWasmModuleStore,
|
||||
pub parsed_source_cache: ParsedSourceCache,
|
||||
pub parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
pub resolver: Arc<CliGraphResolver>,
|
||||
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
|
||||
pub node_analysis_cache: NodeAnalysisCache,
|
||||
pub npm_api: CliNpmRegistryApi,
|
||||
pub npm_cache: NpmCache,
|
||||
pub npm_resolver: NpmPackageResolver,
|
||||
pub npm_resolution: NpmResolution,
|
||||
pub package_json_deps_installer: PackageJsonDepsInstaller,
|
||||
pub cjs_resolutions: Mutex<HashSet<ModuleSpecifier>>,
|
||||
pub module_graph_builder: Arc<ModuleGraphBuilder>,
|
||||
pub module_load_preparer: Arc<ModuleLoadPreparer>,
|
||||
pub node_code_translator: Arc<NodeCodeTranslator>,
|
||||
pub npm_api: Arc<CliNpmRegistryApi>,
|
||||
pub npm_cache: Arc<NpmCache>,
|
||||
pub npm_resolver: Arc<NpmPackageResolver>,
|
||||
pub npm_resolution: Arc<NpmResolution>,
|
||||
pub package_json_deps_installer: Arc<PackageJsonDepsInstaller>,
|
||||
pub cjs_resolutions: Arc<CjsResolutionStore>,
|
||||
progress_bar: ProgressBar,
|
||||
}
|
||||
|
||||
|
@ -135,6 +117,10 @@ impl ProcState {
|
|||
/// Reset all runtime state to its default. This should be used on file
|
||||
/// watcher restarts.
|
||||
pub fn reset_for_file_watcher(&mut self) {
|
||||
self.cjs_resolutions.clear();
|
||||
self.parsed_source_cache.clear();
|
||||
self.graph_container.clear();
|
||||
|
||||
self.0 = Arc::new(Inner {
|
||||
dir: self.dir.clone(),
|
||||
caches: self.caches.clone(),
|
||||
|
@ -143,7 +129,7 @@ impl ProcState {
|
|||
emitter: self.emitter.clone(),
|
||||
file_fetcher: self.file_fetcher.clone(),
|
||||
http_client: self.http_client.clone(),
|
||||
graph_container: Default::default(),
|
||||
graph_container: self.graph_container.clone(),
|
||||
lockfile: self.lockfile.clone(),
|
||||
maybe_import_map: self.maybe_import_map.clone(),
|
||||
maybe_inspector_server: self.maybe_inspector_server.clone(),
|
||||
|
@ -152,16 +138,18 @@ impl ProcState {
|
|||
broadcast_channel: Default::default(),
|
||||
shared_array_buffer_store: Default::default(),
|
||||
compiled_wasm_module_store: Default::default(),
|
||||
parsed_source_cache: self.parsed_source_cache.reset_for_file_watcher(),
|
||||
parsed_source_cache: self.parsed_source_cache.clone(),
|
||||
resolver: self.resolver.clone(),
|
||||
maybe_file_watcher_reporter: self.maybe_file_watcher_reporter.clone(),
|
||||
node_analysis_cache: self.node_analysis_cache.clone(),
|
||||
module_graph_builder: self.module_graph_builder.clone(),
|
||||
module_load_preparer: self.module_load_preparer.clone(),
|
||||
node_code_translator: self.node_code_translator.clone(),
|
||||
npm_api: self.npm_api.clone(),
|
||||
npm_cache: self.npm_cache.clone(),
|
||||
npm_resolver: self.npm_resolver.clone(),
|
||||
npm_resolution: self.npm_resolution.clone(),
|
||||
package_json_deps_installer: self.package_json_deps_installer.clone(),
|
||||
cjs_resolutions: Default::default(),
|
||||
cjs_resolutions: self.cjs_resolutions.clone(),
|
||||
progress_bar: self.progress_bar.clone(),
|
||||
});
|
||||
self.init_watcher();
|
||||
|
@ -191,7 +179,7 @@ impl ProcState {
|
|||
maybe_sender: Option<tokio::sync::mpsc::UnboundedSender<Vec<PathBuf>>>,
|
||||
) -> Result<Self, AnyError> {
|
||||
let dir = cli_options.resolve_deno_dir()?;
|
||||
let caches = Caches::default();
|
||||
let caches = Arc::new(Caches::default());
|
||||
// Warm up the caches we know we'll likely need based on the CLI mode
|
||||
match cli_options.sub_command() {
|
||||
DenoSubcommand::Run(_) => {
|
||||
|
@ -230,26 +218,26 @@ impl ProcState {
|
|||
let lockfile = cli_options.maybe_lock_file();
|
||||
|
||||
let npm_registry_url = CliNpmRegistryApi::default_url().to_owned();
|
||||
let npm_cache = NpmCache::from_deno_dir(
|
||||
let npm_cache = Arc::new(NpmCache::from_deno_dir(
|
||||
&dir,
|
||||
cli_options.cache_setting(),
|
||||
http_client.clone(),
|
||||
progress_bar.clone(),
|
||||
);
|
||||
let npm_api = CliNpmRegistryApi::new(
|
||||
));
|
||||
let npm_api = Arc::new(CliNpmRegistryApi::new(
|
||||
npm_registry_url.clone(),
|
||||
npm_cache.clone(),
|
||||
http_client.clone(),
|
||||
progress_bar.clone(),
|
||||
);
|
||||
));
|
||||
let npm_snapshot = cli_options
|
||||
.resolve_npm_resolution_snapshot(&npm_api)
|
||||
.await?;
|
||||
let npm_resolution = NpmResolution::from_serialized(
|
||||
let npm_resolution = Arc::new(NpmResolution::from_serialized(
|
||||
npm_api.clone(),
|
||||
npm_snapshot,
|
||||
lockfile.as_ref().cloned(),
|
||||
);
|
||||
));
|
||||
let npm_fs_resolver = create_npm_fs_resolver(
|
||||
npm_cache,
|
||||
&progress_bar,
|
||||
|
@ -257,16 +245,16 @@ impl ProcState {
|
|||
npm_resolution.clone(),
|
||||
cli_options.node_modules_dir_path(),
|
||||
);
|
||||
let npm_resolver = NpmPackageResolver::new(
|
||||
let npm_resolver = Arc::new(NpmPackageResolver::new(
|
||||
npm_resolution.clone(),
|
||||
npm_fs_resolver,
|
||||
lockfile.as_ref().cloned(),
|
||||
);
|
||||
let package_json_deps_installer = PackageJsonDepsInstaller::new(
|
||||
));
|
||||
let package_json_deps_installer = Arc::new(PackageJsonDepsInstaller::new(
|
||||
npm_api.clone(),
|
||||
npm_resolution.clone(),
|
||||
cli_options.maybe_package_json_deps(),
|
||||
);
|
||||
));
|
||||
let maybe_import_map = cli_options
|
||||
.resolve_import_map(&file_fetcher)
|
||||
.await?
|
||||
|
@ -296,21 +284,52 @@ impl ProcState {
|
|||
}
|
||||
let emit_cache = EmitCache::new(dir.gen_cache.clone());
|
||||
let parsed_source_cache =
|
||||
ParsedSourceCache::new(caches.dep_analysis_db(&dir));
|
||||
Arc::new(ParsedSourceCache::new(caches.dep_analysis_db(&dir)));
|
||||
let emit_options: deno_ast::EmitOptions = ts_config_result.ts_config.into();
|
||||
let emitter = Emitter::new(
|
||||
let emitter = Arc::new(Emitter::new(
|
||||
emit_cache.clone(),
|
||||
parsed_source_cache.clone(),
|
||||
emit_options,
|
||||
);
|
||||
let npm_cache = NpmCache::from_deno_dir(
|
||||
));
|
||||
let npm_cache = Arc::new(NpmCache::from_deno_dir(
|
||||
&dir,
|
||||
cli_options.cache_setting(),
|
||||
http_client.clone(),
|
||||
progress_bar.clone(),
|
||||
);
|
||||
));
|
||||
let file_fetcher = Arc::new(file_fetcher);
|
||||
let node_analysis_cache =
|
||||
NodeAnalysisCache::new(caches.node_analysis_db(&dir));
|
||||
let node_code_translator = Arc::new(NodeCodeTranslator::new(
|
||||
node_analysis_cache,
|
||||
file_fetcher.clone(),
|
||||
npm_resolver.clone(),
|
||||
));
|
||||
let module_graph_builder = Arc::new(ModuleGraphBuilder::new(
|
||||
cli_options.clone(),
|
||||
resolver.clone(),
|
||||
npm_resolver.clone(),
|
||||
parsed_source_cache.clone(),
|
||||
lockfile.clone(),
|
||||
caches.clone(),
|
||||
emit_cache.clone(),
|
||||
file_fetcher.clone(),
|
||||
dir.clone(),
|
||||
));
|
||||
let graph_container: Arc<ModuleGraphContainer> = Default::default();
|
||||
let module_load_preparer = Arc::new(ModuleLoadPreparer::new(
|
||||
cli_options.clone(),
|
||||
caches.clone(),
|
||||
dir.clone(),
|
||||
graph_container.clone(),
|
||||
lockfile.clone(),
|
||||
maybe_file_watcher_reporter.clone(),
|
||||
module_graph_builder.clone(),
|
||||
npm_resolver.clone(),
|
||||
parsed_source_cache.clone(),
|
||||
progress_bar.clone(),
|
||||
resolver.clone(),
|
||||
));
|
||||
|
||||
Ok(ProcState(Arc::new(Inner {
|
||||
dir,
|
||||
|
@ -318,9 +337,9 @@ impl ProcState {
|
|||
options: cli_options,
|
||||
emit_cache,
|
||||
emitter,
|
||||
file_fetcher: Arc::new(file_fetcher),
|
||||
file_fetcher,
|
||||
http_client,
|
||||
graph_container: Default::default(),
|
||||
graph_container,
|
||||
lockfile,
|
||||
maybe_import_map,
|
||||
maybe_inspector_server,
|
||||
|
@ -332,374 +351,40 @@ impl ProcState {
|
|||
parsed_source_cache,
|
||||
resolver,
|
||||
maybe_file_watcher_reporter,
|
||||
node_analysis_cache,
|
||||
module_graph_builder,
|
||||
node_code_translator,
|
||||
npm_api,
|
||||
npm_cache,
|
||||
npm_resolver,
|
||||
npm_resolution,
|
||||
package_json_deps_installer,
|
||||
cjs_resolutions: Default::default(),
|
||||
module_load_preparer,
|
||||
progress_bar,
|
||||
})))
|
||||
}
|
||||
}
|
||||
|
||||
/// This method must be called for a module or a static importer of that
|
||||
/// module before attempting to `load()` it from a `JsRuntime`. It will
|
||||
/// populate `self.graph_data` in memory with the necessary source code, write
|
||||
/// emits where necessary or report any module graph / type checking errors.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn prepare_module_load(
|
||||
&self,
|
||||
roots: Vec<ModuleSpecifier>,
|
||||
is_dynamic: bool,
|
||||
lib: TsTypeLib,
|
||||
root_permissions: PermissionsContainer,
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
) -> Result<(), AnyError> {
|
||||
log::debug!("Preparing module load.");
|
||||
let _pb_clear_guard = self.progress_bar.clear_guard();
|
||||
/// Keeps track of what module specifiers were resolved as CJS.
|
||||
#[derive(Default)]
|
||||
pub struct CjsResolutionStore(Mutex<HashSet<ModuleSpecifier>>);
|
||||
|
||||
let mut cache = cache::FetchCacher::new(
|
||||
self.emit_cache.clone(),
|
||||
self.file_fetcher.clone(),
|
||||
self.options.resolve_file_header_overrides(),
|
||||
root_permissions,
|
||||
dynamic_permissions,
|
||||
self.options.node_modules_dir_specifier(),
|
||||
);
|
||||
let maybe_imports = self.options.to_maybe_imports()?;
|
||||
let graph_resolver = self.resolver.as_graph_resolver();
|
||||
let graph_npm_resolver = self.resolver.as_graph_npm_resolver();
|
||||
let maybe_file_watcher_reporter: Option<&dyn deno_graph::source::Reporter> =
|
||||
if let Some(reporter) = &self.maybe_file_watcher_reporter {
|
||||
Some(reporter)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let analyzer = self.parsed_source_cache.as_analyzer();
|
||||
|
||||
log::debug!("Creating module graph.");
|
||||
let mut graph_update_permit =
|
||||
self.graph_container.acquire_update_permit().await;
|
||||
let graph = graph_update_permit.graph_mut();
|
||||
|
||||
// Determine any modules that have already been emitted this session and
|
||||
// should be skipped.
|
||||
let reload_exclusions: HashSet<ModuleSpecifier> =
|
||||
graph.specifiers().map(|(s, _)| s.clone()).collect();
|
||||
|
||||
build_graph_with_npm_resolution(
|
||||
graph,
|
||||
&self.resolver,
|
||||
&self.npm_resolver,
|
||||
roots.clone(),
|
||||
&mut cache,
|
||||
deno_graph::BuildOptions {
|
||||
is_dynamic,
|
||||
imports: maybe_imports,
|
||||
resolver: Some(graph_resolver),
|
||||
npm_resolver: Some(graph_npm_resolver),
|
||||
module_analyzer: Some(&*analyzer),
|
||||
reporter: maybe_file_watcher_reporter,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
// If there is a lockfile, validate the integrity of all the modules.
|
||||
if let Some(lockfile) = &self.lockfile {
|
||||
graph_lock_or_exit(graph, &mut lockfile.lock());
|
||||
}
|
||||
|
||||
graph_valid_with_cli_options(graph, &roots, &self.options)?;
|
||||
// save the graph and get a reference to the new graph
|
||||
let graph = graph_update_permit.commit();
|
||||
|
||||
if graph.has_node_specifier
|
||||
&& self.options.type_check_mode() != TypeCheckMode::None
|
||||
{
|
||||
self
|
||||
.npm_resolver
|
||||
.inject_synthetic_types_node_package()
|
||||
.await?;
|
||||
}
|
||||
|
||||
drop(_pb_clear_guard);
|
||||
|
||||
// type check if necessary
|
||||
if self.options.type_check_mode() != TypeCheckMode::None
|
||||
&& !self.graph_container.is_type_checked(&roots, lib)
|
||||
{
|
||||
log::debug!("Type checking.");
|
||||
let maybe_config_specifier = self.options.maybe_config_file_specifier();
|
||||
let graph = Arc::new(graph.segment(&roots));
|
||||
let options = check::CheckOptions {
|
||||
type_check_mode: self.options.type_check_mode(),
|
||||
debug: self.options.log_level() == Some(log::Level::Debug),
|
||||
maybe_config_specifier,
|
||||
ts_config: self
|
||||
.options
|
||||
.resolve_ts_config_for_emit(TsConfigType::Check { lib })?
|
||||
.ts_config,
|
||||
log_checks: true,
|
||||
reload: self.options.reload_flag()
|
||||
&& !roots.iter().all(|r| reload_exclusions.contains(r)),
|
||||
};
|
||||
let check_cache =
|
||||
TypeCheckCache::new(self.caches.type_checking_cache_db(&self.dir));
|
||||
let check_result =
|
||||
check::check(graph, &check_cache, &self.npm_resolver, options)?;
|
||||
self.graph_container.set_type_checked(&roots, lib);
|
||||
if !check_result.diagnostics.is_empty() {
|
||||
return Err(anyhow!(check_result.diagnostics));
|
||||
}
|
||||
log::debug!("{}", check_result.stats);
|
||||
}
|
||||
|
||||
// any updates to the lockfile should be updated now
|
||||
if let Some(ref lockfile) = self.lockfile {
|
||||
let g = lockfile.lock();
|
||||
g.write()?;
|
||||
}
|
||||
|
||||
log::debug!("Prepared module load.");
|
||||
|
||||
Ok(())
|
||||
impl CjsResolutionStore {
|
||||
pub fn clear(&self) {
|
||||
self.0.lock().clear();
|
||||
}
|
||||
|
||||
/// Helper around prepare_module_load that loads and type checks
|
||||
/// the provided files.
|
||||
pub async fn load_and_type_check_files(
|
||||
&self,
|
||||
files: &[String],
|
||||
) -> Result<(), AnyError> {
|
||||
let lib = self.options.ts_type_lib_window();
|
||||
|
||||
let specifiers = files
|
||||
.iter()
|
||||
.map(|file| resolve_url_or_path(file, self.options.initial_cwd()))
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
self
|
||||
.prepare_module_load(
|
||||
specifiers,
|
||||
false,
|
||||
lib,
|
||||
PermissionsContainer::allow_all(),
|
||||
PermissionsContainer::allow_all(),
|
||||
)
|
||||
.await
|
||||
pub fn contains(&self, specifier: &ModuleSpecifier) -> bool {
|
||||
self.0.lock().contains(specifier)
|
||||
}
|
||||
|
||||
fn handle_node_resolve_result(
|
||||
&self,
|
||||
result: Result<Option<node::NodeResolution>, AnyError>,
|
||||
) -> Result<ModuleSpecifier, AnyError> {
|
||||
let response = match result? {
|
||||
Some(response) => response,
|
||||
None => return Err(generic_error("not found")),
|
||||
};
|
||||
if let NodeResolution::CommonJs(specifier) = &response {
|
||||
// remember that this was a common js resolution
|
||||
self.cjs_resolutions.lock().insert(specifier.clone());
|
||||
} else if let NodeResolution::BuiltIn(specifier) = &response {
|
||||
return node::resolve_builtin_node_module(specifier);
|
||||
}
|
||||
Ok(response.into_url())
|
||||
}
|
||||
|
||||
pub fn resolve(
|
||||
&self,
|
||||
specifier: &str,
|
||||
referrer: &str,
|
||||
permissions: &mut PermissionsContainer,
|
||||
) -> Result<ModuleSpecifier, AnyError> {
|
||||
// TODO(bartlomieju): ideally we shouldn't need to call `current_dir()` on each
|
||||
// call - maybe it should be caller's responsibility to pass it as an arg?
|
||||
let cwd = std::env::current_dir().context("Unable to get CWD")?;
|
||||
let referrer_result = deno_core::resolve_url_or_path(referrer, &cwd);
|
||||
|
||||
if let Ok(referrer) = referrer_result.as_ref() {
|
||||
if self.npm_resolver.in_npm_package(referrer) {
|
||||
// we're in an npm package, so use node resolution
|
||||
return self
|
||||
.handle_node_resolve_result(node::node_resolve(
|
||||
specifier,
|
||||
referrer,
|
||||
NodeResolutionMode::Execution,
|
||||
&self.npm_resolver,
|
||||
permissions,
|
||||
))
|
||||
.with_context(|| {
|
||||
format!("Could not resolve '{specifier}' from '{referrer}'.")
|
||||
});
|
||||
}
|
||||
|
||||
let graph = self.graph_container.graph();
|
||||
let maybe_resolved = match graph.get(referrer) {
|
||||
Some(Module::Esm(module)) => {
|
||||
module.dependencies.get(specifier).map(|d| &d.maybe_code)
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
||||
match maybe_resolved {
|
||||
Some(Resolution::Ok(resolved)) => {
|
||||
let specifier = &resolved.specifier;
|
||||
|
||||
return match graph.get(specifier) {
|
||||
Some(Module::Npm(module)) => self
|
||||
.handle_node_resolve_result(node::node_resolve_npm_reference(
|
||||
&module.nv_reference,
|
||||
NodeResolutionMode::Execution,
|
||||
&self.npm_resolver,
|
||||
permissions,
|
||||
))
|
||||
.with_context(|| {
|
||||
format!("Could not resolve '{}'.", module.nv_reference)
|
||||
}),
|
||||
Some(Module::Node(module)) => {
|
||||
node::resolve_builtin_node_module(&module.module_name)
|
||||
}
|
||||
Some(Module::Esm(module)) => Ok(module.specifier.clone()),
|
||||
Some(Module::Json(module)) => Ok(module.specifier.clone()),
|
||||
Some(Module::External(module)) => {
|
||||
Ok(node::resolve_specifier_into_node_modules(&module.specifier))
|
||||
}
|
||||
None => Ok(specifier.clone()),
|
||||
};
|
||||
}
|
||||
Some(Resolution::Err(err)) => {
|
||||
return Err(custom_error(
|
||||
"TypeError",
|
||||
format!("{}\n", err.to_string_with_range()),
|
||||
))
|
||||
}
|
||||
Some(Resolution::None) | None => {}
|
||||
}
|
||||
}
|
||||
|
||||
// Built-in Node modules
|
||||
if let Some(module_name) = specifier.strip_prefix("node:") {
|
||||
return node::resolve_builtin_node_module(module_name);
|
||||
}
|
||||
|
||||
// FIXME(bartlomieju): this is a hacky way to provide compatibility with REPL
|
||||
// and `Deno.core.evalContext` API. Ideally we should always have a referrer filled
|
||||
// but sadly that's not the case due to missing APIs in V8.
|
||||
let is_repl = matches!(self.options.sub_command(), DenoSubcommand::Repl(_));
|
||||
let referrer = if referrer.is_empty() && is_repl {
|
||||
deno_core::resolve_path("./$deno$repl.ts", &cwd)?
|
||||
} else {
|
||||
referrer_result?
|
||||
};
|
||||
|
||||
// FIXME(bartlomieju): this is another hack way to provide NPM specifier
|
||||
// support in REPL. This should be fixed.
|
||||
let resolution = self.resolver.resolve(specifier, &referrer);
|
||||
|
||||
if is_repl {
|
||||
let specifier = resolution
|
||||
.as_ref()
|
||||
.ok()
|
||||
.map(Cow::Borrowed)
|
||||
.or_else(|| ModuleSpecifier::parse(specifier).ok().map(Cow::Owned));
|
||||
if let Some(specifier) = specifier {
|
||||
if let Ok(reference) =
|
||||
NpmPackageReqReference::from_specifier(&specifier)
|
||||
{
|
||||
let reference =
|
||||
self.npm_resolution.pkg_req_ref_to_nv_ref(reference)?;
|
||||
return self
|
||||
.handle_node_resolve_result(node::node_resolve_npm_reference(
|
||||
&reference,
|
||||
deno_runtime::deno_node::NodeResolutionMode::Execution,
|
||||
&self.npm_resolver,
|
||||
permissions,
|
||||
))
|
||||
.with_context(|| format!("Could not resolve '{reference}'."));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
resolution
|
||||
}
|
||||
|
||||
/// Creates the default loader used for creating a graph.
|
||||
pub fn create_graph_loader(&self) -> cache::FetchCacher {
|
||||
cache::FetchCacher::new(
|
||||
self.emit_cache.clone(),
|
||||
self.file_fetcher.clone(),
|
||||
self.options.resolve_file_header_overrides(),
|
||||
PermissionsContainer::allow_all(),
|
||||
PermissionsContainer::allow_all(),
|
||||
self.options.node_modules_dir_specifier(),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn create_graph(
|
||||
&self,
|
||||
roots: Vec<ModuleSpecifier>,
|
||||
) -> Result<deno_graph::ModuleGraph, AnyError> {
|
||||
let mut cache = self.create_graph_loader();
|
||||
self.create_graph_with_loader(roots, &mut cache).await
|
||||
}
|
||||
|
||||
pub async fn create_graph_with_loader(
|
||||
&self,
|
||||
roots: Vec<ModuleSpecifier>,
|
||||
loader: &mut dyn Loader,
|
||||
) -> Result<deno_graph::ModuleGraph, AnyError> {
|
||||
let maybe_imports = self.options.to_maybe_imports()?;
|
||||
|
||||
let cli_resolver = CliGraphResolver::new(
|
||||
self.options.to_maybe_jsx_import_source_config(),
|
||||
self.maybe_import_map.clone(),
|
||||
self.options.no_npm(),
|
||||
self.npm_api.clone(),
|
||||
self.npm_resolution.clone(),
|
||||
self.package_json_deps_installer.clone(),
|
||||
);
|
||||
let graph_resolver = cli_resolver.as_graph_resolver();
|
||||
let graph_npm_resolver = cli_resolver.as_graph_npm_resolver();
|
||||
let analyzer = self.parsed_source_cache.as_analyzer();
|
||||
|
||||
let mut graph = ModuleGraph::default();
|
||||
build_graph_with_npm_resolution(
|
||||
&mut graph,
|
||||
&self.resolver,
|
||||
&self.npm_resolver,
|
||||
roots,
|
||||
loader,
|
||||
deno_graph::BuildOptions {
|
||||
is_dynamic: false,
|
||||
imports: maybe_imports,
|
||||
resolver: Some(graph_resolver),
|
||||
npm_resolver: Some(graph_npm_resolver),
|
||||
module_analyzer: Some(&*analyzer),
|
||||
reporter: None,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
if graph.has_node_specifier
|
||||
&& self.options.type_check_mode() != TypeCheckMode::None
|
||||
{
|
||||
self
|
||||
.npm_resolver
|
||||
.inject_synthetic_types_node_package()
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(graph)
|
||||
}
|
||||
|
||||
pub fn graph(&self) -> Arc<ModuleGraph> {
|
||||
self.graph_container.graph()
|
||||
pub fn insert(&self, specifier: ModuleSpecifier) {
|
||||
self.0.lock().insert(specifier);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct FileWatcherReporter {
|
||||
pub struct FileWatcherReporter {
|
||||
sender: tokio::sync::mpsc::UnboundedSender<Vec<PathBuf>>,
|
||||
file_paths: Arc<Mutex<Vec<PathBuf>>>,
|
||||
}
|
||||
|
|
|
@ -27,15 +27,15 @@ use crate::util::sync::AtomicFlag;
|
|||
|
||||
/// A resolver that takes care of resolution, taking into account loaded
|
||||
/// import map, JSX settings.
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug)]
|
||||
pub struct CliGraphResolver {
|
||||
maybe_import_map: Option<Arc<ImportMap>>,
|
||||
maybe_default_jsx_import_source: Option<String>,
|
||||
maybe_jsx_import_source_module: Option<String>,
|
||||
no_npm: bool,
|
||||
npm_registry_api: CliNpmRegistryApi,
|
||||
npm_resolution: NpmResolution,
|
||||
package_json_deps_installer: PackageJsonDepsInstaller,
|
||||
npm_registry_api: Arc<CliNpmRegistryApi>,
|
||||
npm_resolution: Arc<NpmResolution>,
|
||||
package_json_deps_installer: Arc<PackageJsonDepsInstaller>,
|
||||
found_package_json_dep_flag: Arc<AtomicFlag>,
|
||||
sync_download_queue: Option<Arc<TaskQueue>>,
|
||||
}
|
||||
|
@ -44,9 +44,12 @@ impl Default for CliGraphResolver {
|
|||
fn default() -> Self {
|
||||
// This is not ideal, but necessary for the LSP. In the future, we should
|
||||
// refactor the LSP and force this to be initialized.
|
||||
let npm_registry_api = CliNpmRegistryApi::new_uninitialized();
|
||||
let npm_resolution =
|
||||
NpmResolution::from_serialized(npm_registry_api.clone(), None, None);
|
||||
let npm_registry_api = Arc::new(CliNpmRegistryApi::new_uninitialized());
|
||||
let npm_resolution = Arc::new(NpmResolution::from_serialized(
|
||||
npm_registry_api.clone(),
|
||||
None,
|
||||
None,
|
||||
));
|
||||
Self {
|
||||
maybe_import_map: Default::default(),
|
||||
maybe_default_jsx_import_source: Default::default(),
|
||||
|
@ -66,9 +69,9 @@ impl CliGraphResolver {
|
|||
maybe_jsx_import_source_config: Option<JsxImportSourceConfig>,
|
||||
maybe_import_map: Option<Arc<ImportMap>>,
|
||||
no_npm: bool,
|
||||
npm_registry_api: CliNpmRegistryApi,
|
||||
npm_resolution: NpmResolution,
|
||||
package_json_deps_installer: PackageJsonDepsInstaller,
|
||||
npm_registry_api: Arc<CliNpmRegistryApi>,
|
||||
npm_resolution: Arc<NpmResolution>,
|
||||
package_json_deps_installer: Arc<PackageJsonDepsInstaller>,
|
||||
) -> Self {
|
||||
Self {
|
||||
maybe_import_map,
|
||||
|
|
|
@ -422,14 +422,15 @@ async fn check_specifiers(
|
|||
specifiers: Vec<ModuleSpecifier>,
|
||||
) -> Result<(), AnyError> {
|
||||
let lib = ps.options.ts_type_lib_window();
|
||||
ps.prepare_module_load(
|
||||
specifiers,
|
||||
false,
|
||||
lib,
|
||||
PermissionsContainer::allow_all(),
|
||||
PermissionsContainer::new(permissions),
|
||||
)
|
||||
.await?;
|
||||
ps.module_load_preparer
|
||||
.prepare_module_load(
|
||||
specifiers,
|
||||
false,
|
||||
lib,
|
||||
PermissionsContainer::allow_all(),
|
||||
PermissionsContainer::new(permissions),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -705,7 +706,10 @@ pub async fn run_benchmarks_with_watch(
|
|||
} else {
|
||||
bench_modules.clone()
|
||||
};
|
||||
let graph = ps.create_graph(bench_modules.clone()).await?;
|
||||
let graph = ps
|
||||
.module_graph_builder
|
||||
.create_graph(bench_modules.clone())
|
||||
.await?;
|
||||
graph_valid_with_cli_options(&graph, &bench_modules, &ps.options)?;
|
||||
|
||||
// TODO(@kitsonk) - This should be totally derivable from the graph.
|
||||
|
|
|
@ -13,7 +13,6 @@ use crate::args::CliOptions;
|
|||
use crate::args::Flags;
|
||||
use crate::args::TsConfigType;
|
||||
use crate::args::TypeCheckMode;
|
||||
use crate::graph_util::create_graph_and_maybe_check;
|
||||
use crate::graph_util::error_for_any_npm_specifier;
|
||||
use crate::proc_state::ProcState;
|
||||
use crate::util;
|
||||
|
@ -42,9 +41,10 @@ pub async fn bundle(
|
|||
async move {
|
||||
log::debug!(">>>>> bundle START");
|
||||
let ps = ProcState::from_cli_options(cli_options).await?;
|
||||
let graph =
|
||||
create_graph_and_maybe_check(vec![module_specifier.clone()], &ps)
|
||||
.await?;
|
||||
let graph = ps
|
||||
.module_graph_builder
|
||||
.create_graph_and_maybe_check(vec![module_specifier.clone()])
|
||||
.await?;
|
||||
|
||||
let mut paths_to_watch: Vec<PathBuf> = graph
|
||||
.specifiers()
|
||||
|
|
|
@ -56,7 +56,7 @@ pub struct CheckResult {
|
|||
pub fn check(
|
||||
graph: Arc<ModuleGraph>,
|
||||
cache: &TypeCheckCache,
|
||||
npm_resolver: &NpmPackageResolver,
|
||||
npm_resolver: Arc<NpmPackageResolver>,
|
||||
options: CheckOptions,
|
||||
) -> Result<CheckResult, AnyError> {
|
||||
let check_js = options.ts_config.get_check_js();
|
||||
|
|
|
@ -697,8 +697,7 @@ pub async fn cover_files(
|
|||
| MediaType::Mts
|
||||
| MediaType::Cts
|
||||
| MediaType::Tsx => {
|
||||
let source_hash = ps.emitter.get_source_hash(&file.source);
|
||||
match ps.emit_cache.get_emit_code(&file.specifier, source_hash) {
|
||||
match ps.emitter.maybed_cached_emit(&file.specifier, &file.source) {
|
||||
Some(code) => code.into(),
|
||||
None => {
|
||||
return Err(anyhow!(
|
||||
|
|
|
@ -80,7 +80,10 @@ pub async fn print_docs(
|
|||
// Save our fake file into file fetcher cache.
|
||||
ps.file_fetcher.insert_cached(root);
|
||||
|
||||
let graph = ps.create_graph(vec![root_specifier.clone()]).await?;
|
||||
let graph = ps
|
||||
.module_graph_builder
|
||||
.create_graph(vec![root_specifier.clone()])
|
||||
.await?;
|
||||
|
||||
if let Some(lockfile) = &ps.lockfile {
|
||||
graph_lock_or_exit(&graph, &mut lockfile.lock());
|
||||
|
|
|
@ -36,9 +36,10 @@ pub async fn info(flags: Flags, info_flags: InfoFlags) -> Result<(), AnyError> {
|
|||
let ps = ProcState::from_flags(flags).await?;
|
||||
if let Some(specifier) = info_flags.file {
|
||||
let specifier = resolve_url_or_path(&specifier, ps.options.initial_cwd())?;
|
||||
let mut loader = ps.create_graph_loader();
|
||||
let mut loader = ps.module_graph_builder.create_graph_loader();
|
||||
loader.enable_loading_cache_info(); // for displaying the cache information
|
||||
let graph = ps
|
||||
.module_graph_builder
|
||||
.create_graph_with_loader(vec![specifier], &mut loader)
|
||||
.await?;
|
||||
|
||||
|
|
|
@ -235,6 +235,7 @@ pub async fn install_command(
|
|||
// ensure the module is cached
|
||||
ProcState::from_flags(flags.clone())
|
||||
.await?
|
||||
.module_load_preparer
|
||||
.load_and_type_check_files(&[install_flags.module_url.clone()])
|
||||
.await?;
|
||||
|
||||
|
|
|
@ -4,7 +4,6 @@ use crate::args::CaData;
|
|||
use crate::args::CompileFlags;
|
||||
use crate::args::Flags;
|
||||
use crate::cache::DenoDir;
|
||||
use crate::graph_util::create_graph_and_maybe_check;
|
||||
use crate::graph_util::error_for_any_npm_specifier;
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::standalone::Metadata;
|
||||
|
@ -56,9 +55,12 @@ pub async fn compile(
|
|||
)
|
||||
.await?;
|
||||
|
||||
let graph =
|
||||
Arc::try_unwrap(create_graph_and_maybe_check(module_roots, &ps).await?)
|
||||
.unwrap();
|
||||
let graph = Arc::try_unwrap(
|
||||
ps.module_graph_builder
|
||||
.create_graph_and_maybe_check(module_roots)
|
||||
.await?,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// at the moment, we don't support npm specifiers in deno_compile, so show an error
|
||||
error_for_any_npm_specifier(&graph)?;
|
||||
|
|
|
@ -1259,14 +1259,15 @@ pub async fn check_specifiers(
|
|||
ps.file_fetcher.insert_cached(file);
|
||||
}
|
||||
|
||||
ps.prepare_module_load(
|
||||
specifiers,
|
||||
false,
|
||||
lib,
|
||||
PermissionsContainer::new(Permissions::allow_all()),
|
||||
PermissionsContainer::new(permissions.clone()),
|
||||
)
|
||||
.await?;
|
||||
ps.module_load_preparer
|
||||
.prepare_module_load(
|
||||
specifiers,
|
||||
false,
|
||||
lib,
|
||||
PermissionsContainer::new(Permissions::allow_all()),
|
||||
PermissionsContainer::new(permissions.clone()),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
let module_specifiers = specifiers
|
||||
|
@ -1280,14 +1281,15 @@ pub async fn check_specifiers(
|
|||
})
|
||||
.collect();
|
||||
|
||||
ps.prepare_module_load(
|
||||
module_specifiers,
|
||||
false,
|
||||
lib,
|
||||
PermissionsContainer::allow_all(),
|
||||
PermissionsContainer::new(permissions),
|
||||
)
|
||||
.await?;
|
||||
ps.module_load_preparer
|
||||
.prepare_module_load(
|
||||
module_specifiers,
|
||||
false,
|
||||
lib,
|
||||
PermissionsContainer::allow_all(),
|
||||
PermissionsContainer::new(permissions),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1708,7 +1710,10 @@ pub async fn run_tests_with_watch(
|
|||
} else {
|
||||
test_modules.clone()
|
||||
};
|
||||
let graph = ps.create_graph(test_modules.clone()).await?;
|
||||
let graph = ps
|
||||
.module_graph_builder
|
||||
.create_graph(test_modules.clone())
|
||||
.await?;
|
||||
graph_valid_with_cli_options(&graph, &test_modules, &ps.options)?;
|
||||
|
||||
// TODO(@kitsonk) - This should be totally derivable from the graph.
|
||||
|
|
11
cli/tools/vendor/mod.rs
vendored
11
cli/tools/vendor/mod.rs
vendored
|
@ -65,7 +65,7 @@ pub async fn vendor(
|
|||
);
|
||||
if vendored_count > 0 {
|
||||
let import_map_path = raw_output_dir.join("import_map.json");
|
||||
if maybe_update_config_file(&output_dir, &ps) {
|
||||
if maybe_update_config_file(&output_dir, &ps.options) {
|
||||
log::info!(
|
||||
concat!(
|
||||
"\nUpdated your local Deno configuration file with a reference to the ",
|
||||
|
@ -147,15 +147,14 @@ fn validate_options(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn maybe_update_config_file(output_dir: &Path, ps: &ProcState) -> bool {
|
||||
fn maybe_update_config_file(output_dir: &Path, options: &CliOptions) -> bool {
|
||||
assert!(output_dir.is_absolute());
|
||||
let config_file_specifier = match ps.options.maybe_config_file_specifier() {
|
||||
let config_file_specifier = match options.maybe_config_file_specifier() {
|
||||
Some(f) => f,
|
||||
None => return false,
|
||||
};
|
||||
|
||||
let fmt_config = ps
|
||||
.options
|
||||
let fmt_config = options
|
||||
.maybe_config_file()
|
||||
.as_ref()
|
||||
.and_then(|config| config.to_fmt_config().ok())
|
||||
|
@ -271,7 +270,7 @@ async fn create_graph(
|
|||
.map(|p| resolve_url_or_path(p, ps.options.initial_cwd()))
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
ps.create_graph(entry_points).await
|
||||
ps.module_graph_builder.create_graph(entry_points).await
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
17
cli/tools/vendor/test.rs
vendored
17
cli/tools/vendor/test.rs
vendored
|
@ -263,18 +263,21 @@ async fn build_test_graph(
|
|||
mut loader: TestLoader,
|
||||
analyzer: &dyn deno_graph::ModuleAnalyzer,
|
||||
) -> ModuleGraph {
|
||||
let resolver = original_import_map.map(|m| {
|
||||
let npm_registry_api = CliNpmRegistryApi::new_uninitialized();
|
||||
let npm_resolution =
|
||||
NpmResolution::from_serialized(npm_registry_api.clone(), None, None);
|
||||
let deps_installer = PackageJsonDepsInstaller::new(
|
||||
let resolver = original_import_map.map(|original_import_map| {
|
||||
let npm_registry_api = Arc::new(CliNpmRegistryApi::new_uninitialized());
|
||||
let npm_resolution = Arc::new(NpmResolution::from_serialized(
|
||||
npm_registry_api.clone(),
|
||||
None,
|
||||
None,
|
||||
));
|
||||
let deps_installer = Arc::new(PackageJsonDepsInstaller::new(
|
||||
npm_registry_api.clone(),
|
||||
npm_resolution.clone(),
|
||||
None,
|
||||
);
|
||||
));
|
||||
CliGraphResolver::new(
|
||||
None,
|
||||
Some(Arc::new(m)),
|
||||
Some(Arc::new(original_import_map)),
|
||||
false,
|
||||
npm_registry_api,
|
||||
npm_resolution,
|
||||
|
|
|
@ -307,7 +307,7 @@ pub struct Request {
|
|||
pub debug: bool,
|
||||
pub graph: Arc<ModuleGraph>,
|
||||
pub hash_data: u64,
|
||||
pub maybe_npm_resolver: Option<NpmPackageResolver>,
|
||||
pub maybe_npm_resolver: Option<Arc<NpmPackageResolver>>,
|
||||
pub maybe_tsbuildinfo: Option<String>,
|
||||
/// A vector of strings that represent the root/entry point modules for the
|
||||
/// program.
|
||||
|
@ -331,7 +331,7 @@ struct State {
|
|||
graph: Arc<ModuleGraph>,
|
||||
maybe_tsbuildinfo: Option<String>,
|
||||
maybe_response: Option<RespondArgs>,
|
||||
maybe_npm_resolver: Option<NpmPackageResolver>,
|
||||
maybe_npm_resolver: Option<Arc<NpmPackageResolver>>,
|
||||
remapped_specifiers: HashMap<String, ModuleSpecifier>,
|
||||
root_map: HashMap<String, ModuleSpecifier>,
|
||||
current_dir: PathBuf,
|
||||
|
@ -341,7 +341,7 @@ impl State {
|
|||
pub fn new(
|
||||
graph: Arc<ModuleGraph>,
|
||||
hash_data: u64,
|
||||
maybe_npm_resolver: Option<NpmPackageResolver>,
|
||||
maybe_npm_resolver: Option<Arc<NpmPackageResolver>>,
|
||||
maybe_tsbuildinfo: Option<String>,
|
||||
root_map: HashMap<String, ModuleSpecifier>,
|
||||
remapped_specifiers: HashMap<String, ModuleSpecifier>,
|
||||
|
@ -649,7 +649,11 @@ fn resolve_graph_specifier_types(
|
|||
let specifier =
|
||||
node::resolve_specifier_into_node_modules(&module.specifier);
|
||||
NodeResolution::into_specifier_and_media_type(
|
||||
node::url_to_node_resolution(specifier, npm_resolver).ok(),
|
||||
node::url_to_node_resolution(
|
||||
specifier,
|
||||
&npm_resolver.as_require_npm_resolver(),
|
||||
)
|
||||
.ok(),
|
||||
)
|
||||
}))
|
||||
}
|
||||
|
@ -673,7 +677,7 @@ fn resolve_non_graph_specifier_types(
|
|||
specifier,
|
||||
referrer,
|
||||
NodeResolutionMode::Types,
|
||||
npm_resolver,
|
||||
&npm_resolver.as_require_npm_resolver(),
|
||||
&mut PermissionsContainer::allow_all(),
|
||||
)
|
||||
.ok()
|
||||
|
@ -697,7 +701,7 @@ fn resolve_non_graph_specifier_types(
|
|||
|
||||
pub fn resolve_npm_package_reference_types(
|
||||
npm_ref: &NpmPackageNvReference,
|
||||
npm_resolver: &NpmPackageResolver,
|
||||
npm_resolver: &Arc<NpmPackageResolver>,
|
||||
) -> Result<(ModuleSpecifier, MediaType), AnyError> {
|
||||
let maybe_resolution = node_resolve_npm_reference(
|
||||
npm_ref,
|
||||
|
|
|
@ -184,7 +184,8 @@ impl CliMainWorker {
|
|||
&mut self,
|
||||
id: ModuleId,
|
||||
) -> Result<(), AnyError> {
|
||||
if self.ps.npm_resolver.has_packages() || self.ps.graph().has_node_specifier
|
||||
if self.ps.npm_resolver.has_packages()
|
||||
|| self.ps.graph_container.graph().has_node_specifier
|
||||
{
|
||||
self.initialize_main_module_for_node()?;
|
||||
}
|
||||
|
@ -270,8 +271,10 @@ pub async fn create_custom_worker(
|
|||
matches!(node_resolution, node::NodeResolution::CommonJs(_));
|
||||
(node_resolution.into_url(), is_main_cjs)
|
||||
} else if ps.options.is_npm_main() {
|
||||
let node_resolution =
|
||||
node::url_to_node_resolution(main_module, &ps.npm_resolver)?;
|
||||
let node_resolution = node::url_to_node_resolution(
|
||||
main_module,
|
||||
&ps.npm_resolver.as_require_npm_resolver(),
|
||||
)?;
|
||||
let is_main_cjs =
|
||||
matches!(node_resolution, node::NodeResolution::CommonJs(_));
|
||||
(node_resolution.into_url(), is_main_cjs)
|
||||
|
@ -350,7 +353,7 @@ pub async fn create_custom_worker(
|
|||
should_break_on_first_statement: ps.options.inspect_brk().is_some(),
|
||||
should_wait_for_inspector_session: ps.options.inspect_wait().is_some(),
|
||||
module_loader,
|
||||
npm_resolver: Some(Rc::new(ps.npm_resolver.clone())),
|
||||
npm_resolver: Some(Rc::new(ps.npm_resolver.as_require_npm_resolver())),
|
||||
get_error_class_fn: Some(&errors::get_error_class_name),
|
||||
cache_storage_dir,
|
||||
origin_storage_dir,
|
||||
|
@ -473,7 +476,7 @@ fn create_web_worker_callback(
|
|||
format_js_error_fn: Some(Arc::new(format_js_error)),
|
||||
source_map_getter: Some(Box::new(module_loader.clone())),
|
||||
module_loader,
|
||||
npm_resolver: Some(Rc::new(ps.npm_resolver.clone())),
|
||||
npm_resolver: Some(Rc::new(ps.npm_resolver.as_require_npm_resolver())),
|
||||
worker_type: args.worker_type,
|
||||
maybe_inspector_server,
|
||||
get_error_class_fn: Some(&errors::get_error_class_name),
|
||||
|
|
Loading…
Reference in a new issue