diff --git a/Cargo.lock b/Cargo.lock index e0780b2268..8c7b35ca9f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1279,9 +1279,9 @@ dependencies = [ [[package]] name = "deno_ast" -version = "0.42.2" +version = "0.43.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2b9d03b1bbeeecdac54367f075d572131736d06c5be3bc49037855bc5ab1bbb" +checksum = "48d00b724e06d2081a141ec1155756a0b465d413d8e2a7515221f61d482eb2ee" dependencies = [ "base64 0.21.7", "deno_media_type", @@ -1356,9 +1356,9 @@ dependencies = [ [[package]] name = "deno_cache_dir" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "693ca429aebf945de5fef30df232044f9f80be4cc5a5e7c8d767226c43880f5a" +checksum = "08c1f52170cd7715f8006da54cde1444863a0d6fbd9c11d037a737db2dec8e22" dependencies = [ "base32", "deno_media_type", @@ -1506,9 +1506,9 @@ dependencies = [ [[package]] name = "deno_doc" -version = "0.154.0" +version = "0.156.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17e204e45b0d79750880114e37b34abe19ad0710d8435a8da8f23a528fe98de4" +checksum = "2585b98d6ad76dae30bf2d7b6d71b8363cae041158b8780d14a2f4fe17590a61" dependencies = [ "anyhow", "cfg-if", @@ -1606,9 +1606,9 @@ dependencies = [ [[package]] name = "deno_graph" -version = "0.83.4" +version = "0.84.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bd20bc0780071989c622cbfd5d4fb2e4fd05a247ccd7f791f13c8d2c3792228" +checksum = "cd4f4a14aa069087be41c2998077b0453f0191747898f96e6343f700abfc2c18" dependencies = [ "anyhow", "async-trait", @@ -1726,9 +1726,9 @@ dependencies = [ [[package]] name = "deno_lint" -version = "0.67.0" +version = "0.68.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "871b60e32bfb6c110cbb9b0688dbf048f81e5d347fe4ce5a42239263de9dd938" +checksum = "bb994e6d1b18223df0a756c7948143b35682941d615edffef60d5b38822f38ac" dependencies = [ "anyhow", "deno_ast", @@ -1756,9 +1756,9 @@ dependencies = [ [[package]] name = "deno_media_type" -version = "0.1.4" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8978229b82552bf8457a0125aa20863f023619cfc21ebb007b1e571d68fd85b" +checksum = "7fcf552fbdedbe81c89705349d7d2485c7051382b000dfddbdbf7fc25931cf83" dependencies = [ "data-url", "serde", @@ -2608,9 +2608,9 @@ dependencies = [ [[package]] name = "dprint-plugin-typescript" -version = "0.93.0" +version = "0.93.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9308d98b923b7c0335c2ee1560199e3f2321b1be82803107b4ba4ed5dac46cc" +checksum = "5abfd78fe3cde4f5a6699d65f760c8d44da130cf446b6f80a7a9bc6580e156ab" dependencies = [ "anyhow", "deno_ast", diff --git a/Cargo.toml b/Cargo.toml index 285c077809..f57563e0b8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -45,12 +45,12 @@ license = "MIT" repository = "https://github.com/denoland/deno" [workspace.dependencies] -deno_ast = { version = "=0.42.2", features = ["transpiling"] } +deno_ast = { version = "=0.43.3", features = ["transpiling"] } deno_core = { version = "0.316.0" } deno_bench_util = { version = "0.169.0", path = "./bench_util" } deno_lockfile = "=0.23.1" -deno_media_type = { version = "0.1.4", features = ["module_specifier"] } +deno_media_type = { version = "0.2.0", features = ["module_specifier"] } deno_npm = "=0.25.4" deno_path_util = "=0.2.1" deno_permissions = { version = "0.35.0", path = "./runtime/permissions" } @@ -111,7 +111,7 @@ console_static_text = "=0.8.1" dashmap = "5.5.3" data-encoding = "2.3.3" data-url = "=0.3.0" -deno_cache_dir = "=0.13.1" +deno_cache_dir = "=0.13.2" deno_package_json = { version = "0.1.2", default-features = false } dlopen2 = "0.6.1" ecb = "=0.1.2" diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 782adfd903..a3e2b71f00 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -72,9 +72,9 @@ deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposa deno_cache_dir = { workspace = true } deno_config = { version = "=0.37.2", features = ["workspace", "sync"] } deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] } -deno_doc = { version = "0.154.0", default-features = false, features = ["rust", "html", "syntect"] } -deno_graph = { version = "=0.83.4" } -deno_lint = { version = "=0.67.0", features = ["docs"] } +deno_doc = { version = "0.156.0", default-features = false, features = ["rust", "html", "syntect"] } +deno_graph = { version = "=0.84.1" } +deno_lint = { version = "=0.68.0", features = ["docs"] } deno_lockfile.workspace = true deno_npm.workspace = true deno_package_json.workspace = true @@ -107,7 +107,7 @@ dotenvy = "0.15.7" dprint-plugin-json = "=0.19.4" dprint-plugin-jupyter = "=0.1.5" dprint-plugin-markdown = "=0.17.8" -dprint-plugin-typescript = "=0.93.0" +dprint-plugin-typescript = "=0.93.1" env_logger = "=0.10.0" fancy-regex = "=0.10.0" faster-hex.workspace = true diff --git a/cli/args/mod.rs b/cli/args/mod.rs index d725264623..e19025f8b1 100644 --- a/cli/args/mod.rs +++ b/cli/args/mod.rs @@ -201,6 +201,8 @@ pub fn ts_config_to_transpile_and_emit_options( precompile_jsx_dynamic_props: None, transform_jsx, var_decl_imports: false, + // todo(dsherret): support verbatim_module_syntax here properly + verbatim_module_syntax: false, }, deno_ast::EmitOptions { inline_sources: options.inline_sources, @@ -1602,6 +1604,15 @@ impl CliOptions { } pub fn use_byonm(&self) -> bool { + if matches!( + self.sub_command(), + DenoSubcommand::Install(_) + | DenoSubcommand::Add(_) + | DenoSubcommand::Remove(_) + ) { + // For `deno install/add/remove` we want to force the managed resolver so it can set up `node_modules/` directory. + return false; + } if self.node_modules_dir().ok().flatten().is_none() && self.maybe_node_modules_folder.is_some() && self diff --git a/cli/cache/cache_db.rs b/cli/cache/cache_db.rs index b24078f29b..329ed2d970 100644 --- a/cli/cache/cache_db.rs +++ b/cli/cache/cache_db.rs @@ -57,7 +57,7 @@ impl rusqlite::types::FromSql for CacheDBHash { } /// What should the cache should do on failure? -#[derive(Default)] +#[derive(Debug, Default)] pub enum CacheFailure { /// Return errors if failure mode otherwise unspecified. #[default] @@ -69,6 +69,7 @@ pub enum CacheFailure { } /// Configuration SQL and other parameters for a [`CacheDB`]. +#[derive(Debug)] pub struct CacheDBConfiguration { /// SQL to run for a new database. pub table_initializer: &'static str, @@ -98,6 +99,7 @@ impl CacheDBConfiguration { } } +#[derive(Debug)] enum ConnectionState { Connected(Connection), Blackhole, @@ -106,7 +108,7 @@ enum ConnectionState { /// A cache database that eagerly initializes itself off-thread, preventing initialization operations /// from blocking the main thread. -#[derive(Clone)] +#[derive(Debug, Clone)] pub struct CacheDB { // TODO(mmastrac): We can probably simplify our thread-safe implementation here conn: Arc>>, diff --git a/cli/cache/emit.rs b/cli/cache/emit.rs index 74e1c1101b..3c9eecfcbd 100644 --- a/cli/cache/emit.rs +++ b/cli/cache/emit.rs @@ -10,6 +10,7 @@ use deno_core::unsync::sync::AtomicFlag; use super::DiskCache; /// The cache that stores previously emitted files. +#[derive(Debug)] pub struct EmitCache { disk_cache: DiskCache, emit_failed_flag: AtomicFlag, @@ -91,6 +92,7 @@ impl EmitCache { const LAST_LINE_PREFIX: &str = "\n// denoCacheMetadata="; +#[derive(Debug)] struct EmitFileSerializer { cli_version: &'static str, } diff --git a/cli/cache/mod.rs b/cli/cache/mod.rs index bf8f1b1f0b..50fc135ddf 100644 --- a/cli/cache/mod.rs +++ b/cli/cache/mod.rs @@ -8,14 +8,9 @@ use crate::file_fetcher::FetchOptions; use crate::file_fetcher::FetchPermissionsOptionRef; use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileOrRedirect; -use crate::npm::CliNpmResolver; -use crate::resolver::CliNodeResolver; use crate::util::fs::atomic_write_file_with_retries; use crate::util::fs::atomic_write_file_with_retries_and_fs; use crate::util::fs::AtomicWriteFileFsAdapter; -use crate::util::path::specifier_has_extension; -use crate::util::text_encoding::arc_str_to_bytes; -use crate::util::text_encoding::from_utf8_lossy_owned; use deno_ast::MediaType; use deno_core::futures; @@ -25,7 +20,9 @@ use deno_graph::source::CacheInfo; use deno_graph::source::LoadFuture; use deno_graph::source::LoadResponse; use deno_graph::source::Loader; +use deno_runtime::deno_fs; use deno_runtime::deno_permissions::PermissionsContainer; +use node_resolver::InNpmPackageChecker; use std::collections::HashMap; use std::path::Path; use std::path::PathBuf; @@ -60,7 +57,6 @@ pub use fast_check::FastCheckCache; pub use incremental::IncrementalCache; pub use module_info::ModuleInfoCache; pub use node::NodeAnalysisCache; -pub use parsed_source::EsmOrCjsChecker; pub use parsed_source::LazyGraphSourceParser; pub use parsed_source::ParsedSourceCache; @@ -181,46 +177,40 @@ pub struct FetchCacherOptions { pub permissions: PermissionsContainer, /// If we're publishing for `deno publish`. pub is_deno_publish: bool, - pub unstable_detect_cjs: bool, } /// A "wrapper" for the FileFetcher and DiskCache for the Deno CLI that provides /// a concise interface to the DENO_DIR when building module graphs. pub struct FetchCacher { pub file_header_overrides: HashMap>, - esm_or_cjs_checker: Arc, file_fetcher: Arc, + fs: Arc, global_http_cache: Arc, - node_resolver: Arc, - npm_resolver: Arc, + in_npm_pkg_checker: Arc, module_info_cache: Arc, permissions: PermissionsContainer, is_deno_publish: bool, - unstable_detect_cjs: bool, cache_info_enabled: bool, } impl FetchCacher { pub fn new( - esm_or_cjs_checker: Arc, file_fetcher: Arc, + fs: Arc, global_http_cache: Arc, - node_resolver: Arc, - npm_resolver: Arc, + in_npm_pkg_checker: Arc, module_info_cache: Arc, options: FetchCacherOptions, ) -> Self { Self { file_fetcher, - esm_or_cjs_checker, + fs, global_http_cache, - node_resolver, - npm_resolver, + in_npm_pkg_checker, module_info_cache, file_header_overrides: options.file_header_overrides, permissions: options.permissions, is_deno_publish: options.is_deno_publish, - unstable_detect_cjs: options.unstable_detect_cjs, cache_info_enabled: false, } } @@ -271,70 +261,23 @@ impl Loader for FetchCacher { ) -> LoadFuture { use deno_graph::source::CacheSetting as LoaderCacheSetting; - if specifier.scheme() == "file" { - if specifier.path().contains("/node_modules/") { - // The specifier might be in a completely different symlinked tree than - // what the node_modules url is in (ex. `/my-project-1/node_modules` - // symlinked to `/my-project-2/node_modules`), so first we checked if the path - // is in a node_modules dir to avoid needlessly canonicalizing, then now compare - // against the canonicalized specifier. - let specifier = - crate::node::resolve_specifier_into_node_modules(specifier); - if self.npm_resolver.in_npm_package(&specifier) { - return Box::pin(futures::future::ready(Ok(Some( - LoadResponse::External { specifier }, - )))); - } - } - - // make local CJS modules external to the graph - if specifier_has_extension(specifier, "cjs") { + if specifier.scheme() == "file" + && specifier.path().contains("/node_modules/") + { + // The specifier might be in a completely different symlinked tree than + // what the node_modules url is in (ex. `/my-project-1/node_modules` + // symlinked to `/my-project-2/node_modules`), so first we checked if the path + // is in a node_modules dir to avoid needlessly canonicalizing, then now compare + // against the canonicalized specifier. + let specifier = crate::node::resolve_specifier_into_node_modules( + specifier, + self.fs.as_ref(), + ); + if self.in_npm_pkg_checker.in_npm_package(&specifier) { return Box::pin(futures::future::ready(Ok(Some( - LoadResponse::External { - specifier: specifier.clone(), - }, + LoadResponse::External { specifier }, )))); } - - if self.unstable_detect_cjs && specifier_has_extension(specifier, "js") { - if let Ok(Some(pkg_json)) = - self.node_resolver.get_closest_package_json(specifier) - { - if pkg_json.typ == "commonjs" { - if let Ok(path) = specifier.to_file_path() { - if let Ok(bytes) = std::fs::read(&path) { - let text: Arc = from_utf8_lossy_owned(bytes).into(); - let is_es_module = match self.esm_or_cjs_checker.is_esm( - specifier, - text.clone(), - MediaType::JavaScript, - ) { - Ok(value) => value, - Err(err) => { - return Box::pin(futures::future::ready(Err(err.into()))); - } - }; - if !is_es_module { - self.node_resolver.mark_cjs_resolution(specifier.clone()); - return Box::pin(futures::future::ready(Ok(Some( - LoadResponse::External { - specifier: specifier.clone(), - }, - )))); - } else { - return Box::pin(futures::future::ready(Ok(Some( - LoadResponse::Module { - specifier: specifier.clone(), - content: arc_str_to_bytes(text), - maybe_headers: None, - }, - )))); - } - } - } - } - } - } } if self.is_deno_publish diff --git a/cli/cache/module_info.rs b/cli/cache/module_info.rs index 4dbb01c37b..060a6f4f0c 100644 --- a/cli/cache/module_info.rs +++ b/cli/cache/module_info.rs @@ -44,18 +44,32 @@ pub static MODULE_INFO_CACHE_DB: CacheDBConfiguration = CacheDBConfiguration { /// A cache of `deno_graph::ModuleInfo` objects. Using this leads to a considerable /// performance improvement because when it exists we can skip parsing a module for /// deno_graph. +#[derive(Debug)] pub struct ModuleInfoCache { conn: CacheDB, + parsed_source_cache: Arc, } impl ModuleInfoCache { #[cfg(test)] - pub fn new_in_memory(version: &'static str) -> Self { - Self::new(CacheDB::in_memory(&MODULE_INFO_CACHE_DB, version)) + pub fn new_in_memory( + version: &'static str, + parsed_source_cache: Arc, + ) -> Self { + Self::new( + CacheDB::in_memory(&MODULE_INFO_CACHE_DB, version), + parsed_source_cache, + ) } - pub fn new(conn: CacheDB) -> Self { - Self { conn } + pub fn new( + conn: CacheDB, + parsed_source_cache: Arc, + ) -> Self { + Self { + conn, + parsed_source_cache, + } } /// Useful for testing: re-create this cache DB with a different current version. @@ -63,6 +77,7 @@ impl ModuleInfoCache { pub(crate) fn recreate_with_version(self, version: &'static str) -> Self { Self { conn: self.conn.recreate_with_version(version), + parsed_source_cache: self.parsed_source_cache, } } @@ -113,13 +128,10 @@ impl ModuleInfoCache { Ok(()) } - pub fn as_module_analyzer<'a>( - &'a self, - parsed_source_cache: &'a Arc, - ) -> ModuleInfoCacheModuleAnalyzer<'a> { + pub fn as_module_analyzer(&self) -> ModuleInfoCacheModuleAnalyzer { ModuleInfoCacheModuleAnalyzer { module_info_cache: self, - parsed_source_cache, + parsed_source_cache: &self.parsed_source_cache, } } } @@ -129,6 +141,84 @@ pub struct ModuleInfoCacheModuleAnalyzer<'a> { parsed_source_cache: &'a Arc, } +impl<'a> ModuleInfoCacheModuleAnalyzer<'a> { + fn load_cached_module_info( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + source_hash: CacheDBHash, + ) -> Option { + match self.module_info_cache.get_module_info( + specifier, + media_type, + source_hash, + ) { + Ok(Some(info)) => Some(info), + Ok(None) => None, + Err(err) => { + log::debug!( + "Error loading module cache info for {}. {:#}", + specifier, + err + ); + None + } + } + } + + fn save_module_info_to_cache( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + source_hash: CacheDBHash, + module_info: &ModuleInfo, + ) { + if let Err(err) = self.module_info_cache.set_module_info( + specifier, + media_type, + source_hash, + module_info, + ) { + log::debug!( + "Error saving module cache info for {}. {:#}", + specifier, + err + ); + } + } + + pub fn analyze_sync( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + source: &Arc, + ) -> Result { + // attempt to load from the cache + let source_hash = CacheDBHash::from_source(source); + if let Some(info) = + self.load_cached_module_info(specifier, media_type, source_hash) + { + return Ok(info); + } + + // otherwise, get the module info from the parsed source cache + let parser = self.parsed_source_cache.as_capturing_parser(); + let analyzer = ParserModuleAnalyzer::new(&parser); + let module_info = + analyzer.analyze_sync(specifier, source.clone(), media_type)?; + + // then attempt to cache it + self.save_module_info_to_cache( + specifier, + media_type, + source_hash, + &module_info, + ); + + Ok(module_info) + } +} + #[async_trait::async_trait(?Send)] impl<'a> deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'a> { async fn analyze( @@ -139,20 +229,10 @@ impl<'a> deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'a> { ) -> Result { // attempt to load from the cache let source_hash = CacheDBHash::from_source(&source); - match self.module_info_cache.get_module_info( - specifier, - media_type, - source_hash, - ) { - Ok(Some(info)) => return Ok(info), - Ok(None) => {} - Err(err) => { - log::debug!( - "Error loading module cache info for {}. {:#}", - specifier, - err - ); - } + if let Some(info) = + self.load_cached_module_info(specifier, media_type, source_hash) + { + return Ok(info); } // otherwise, get the module info from the parsed source cache @@ -169,18 +249,12 @@ impl<'a> deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'a> { .unwrap()?; // then attempt to cache it - if let Err(err) = self.module_info_cache.set_module_info( + self.save_module_info_to_cache( specifier, media_type, source_hash, &module_info, - ) { - log::debug!( - "Error saving module cache info for {}. {:#}", - specifier, - err - ); - } + ); Ok(module_info) } @@ -202,7 +276,7 @@ fn serialize_media_type(media_type: MediaType) -> i64 { Tsx => 11, Json => 12, Wasm => 13, - TsBuildInfo => 14, + Css => 14, SourceMap => 15, Unknown => 16, } @@ -217,7 +291,7 @@ mod test { #[test] pub fn module_info_cache_general_use() { - let cache = ModuleInfoCache::new_in_memory("1.0.0"); + let cache = ModuleInfoCache::new_in_memory("1.0.0", Default::default()); let specifier1 = ModuleSpecifier::parse("https://localhost/mod.ts").unwrap(); let specifier2 = diff --git a/cli/cache/parsed_source.rs b/cli/cache/parsed_source.rs index df6e45c35e..7e819ae998 100644 --- a/cli/cache/parsed_source.rs +++ b/cli/cache/parsed_source.rs @@ -5,12 +5,11 @@ use std::sync::Arc; use deno_ast::MediaType; use deno_ast::ModuleSpecifier; -use deno_ast::ParseDiagnostic; use deno_ast::ParsedSource; use deno_core::parking_lot::Mutex; -use deno_graph::CapturingModuleParser; -use deno_graph::DefaultModuleParser; -use deno_graph::ModuleParser; +use deno_graph::CapturingEsParser; +use deno_graph::DefaultEsParser; +use deno_graph::EsParser; use deno_graph::ParseOptions; use deno_graph::ParsedSourceStore; @@ -47,7 +46,7 @@ impl<'a> LazyGraphSourceParser<'a> { } } -#[derive(Default)] +#[derive(Debug, Default)] pub struct ParsedSourceCache { sources: Mutex>, } @@ -58,12 +57,11 @@ impl ParsedSourceCache { module: &deno_graph::JsModule, ) -> Result { let parser = self.as_capturing_parser(); - // this will conditionally parse because it's using a CapturingModuleParser - parser.parse_module(ParseOptions { + // this will conditionally parse because it's using a CapturingEsParser + parser.parse_program(ParseOptions { specifier: &module.specifier, source: module.source.clone(), media_type: module.media_type, - // don't bother enabling because this method is currently only used for vendoring scope_analysis: false, }) } @@ -87,10 +85,9 @@ impl ParsedSourceCache { specifier, source, media_type, - // don't bother enabling because this method is currently only used for emitting scope_analysis: false, }; - DefaultModuleParser.parse_module(options) + DefaultEsParser.parse_program(options) } /// Frees the parsed source from memory. @@ -100,8 +97,8 @@ impl ParsedSourceCache { /// Creates a parser that will reuse a ParsedSource from the store /// if it exists, or else parse. - pub fn as_capturing_parser(&self) -> CapturingModuleParser { - CapturingModuleParser::new(None, self) + pub fn as_capturing_parser(&self) -> CapturingEsParser { + CapturingEsParser::new(None, self) } } @@ -150,42 +147,3 @@ impl deno_graph::ParsedSourceStore for ParsedSourceCache { } } } - -pub struct EsmOrCjsChecker { - parsed_source_cache: Arc, -} - -impl EsmOrCjsChecker { - pub fn new(parsed_source_cache: Arc) -> Self { - Self { - parsed_source_cache, - } - } - - pub fn is_esm( - &self, - specifier: &ModuleSpecifier, - source: Arc, - media_type: MediaType, - ) -> Result { - // todo(dsherret): add a file cache here to avoid parsing with swc on each run - let source = match self.parsed_source_cache.get_parsed_source(specifier) { - Some(source) => source.clone(), - None => { - let source = deno_ast::parse_program(deno_ast::ParseParams { - specifier: specifier.clone(), - text: source, - media_type, - capture_tokens: true, // capture because it's used for cjs export analysis - scope_analysis: false, - maybe_syntax: None, - })?; - self - .parsed_source_cache - .set_parsed_source(specifier.clone(), source.clone()); - source - } - }; - Ok(source.is_module()) - } -} diff --git a/cli/emit.rs b/cli/emit.rs index 8e93092e67..8c4f2091cf 100644 --- a/cli/emit.rs +++ b/cli/emit.rs @@ -3,11 +3,14 @@ use crate::cache::EmitCache; use crate::cache::FastInsecureHasher; use crate::cache::ParsedSourceCache; +use crate::resolver::CjsTracker; +use deno_ast::ModuleKind; use deno_ast::SourceMapOption; use deno_ast::SourceRange; use deno_ast::SourceRanged; use deno_ast::SourceRangedForSpanned; +use deno_ast::TranspileModuleOptions; use deno_ast::TranspileResult; use deno_core::error::AnyError; use deno_core::futures::stream::FuturesUnordered; @@ -19,7 +22,9 @@ use deno_graph::Module; use deno_graph::ModuleGraph; use std::sync::Arc; +#[derive(Debug)] pub struct Emitter { + cjs_tracker: Arc, emit_cache: Arc, parsed_source_cache: Arc, transpile_and_emit_options: @@ -30,6 +35,7 @@ pub struct Emitter { impl Emitter { pub fn new( + cjs_tracker: Arc, emit_cache: Arc, parsed_source_cache: Arc, transpile_options: deno_ast::TranspileOptions, @@ -42,6 +48,7 @@ impl Emitter { hasher.finish() }; Self { + cjs_tracker, emit_cache, parsed_source_cache, transpile_and_emit_options: Arc::new((transpile_options, emit_options)), @@ -59,21 +66,19 @@ impl Emitter { continue; }; - // todo(https://github.com/denoland/deno_media_type/pull/12): use is_emittable() - let is_emittable = matches!( - module.media_type, - MediaType::TypeScript - | MediaType::Mts - | MediaType::Cts - | MediaType::Jsx - | MediaType::Tsx - ); - if is_emittable { + if module.media_type.is_emittable() { futures.push( self .emit_parsed_source( &module.specifier, module.media_type, + ModuleKind::from_is_cjs( + self.cjs_tracker.is_cjs_with_known_is_script( + &module.specifier, + module.media_type, + module.is_script, + )?, + ), &module.source, ) .boxed_local(), @@ -92,9 +97,10 @@ impl Emitter { pub fn maybe_cached_emit( &self, specifier: &ModuleSpecifier, + module_kind: deno_ast::ModuleKind, source: &str, ) -> Option { - let source_hash = self.get_source_hash(source); + let source_hash = self.get_source_hash(module_kind, source); self.emit_cache.get_emit_code(specifier, source_hash) } @@ -102,11 +108,12 @@ impl Emitter { &self, specifier: &ModuleSpecifier, media_type: MediaType, + module_kind: deno_ast::ModuleKind, source: &Arc, ) -> Result { // Note: keep this in sync with the sync version below let helper = EmitParsedSourceHelper(self); - match helper.pre_emit_parsed_source(specifier, source) { + match helper.pre_emit_parsed_source(specifier, module_kind, source) { PreEmitResult::Cached(emitted_text) => Ok(emitted_text), PreEmitResult::NotCached { source_hash } => { let parsed_source_cache = self.parsed_source_cache.clone(); @@ -119,8 +126,9 @@ impl Emitter { EmitParsedSourceHelper::transpile( &parsed_source_cache, &specifier, - source.clone(), media_type, + module_kind, + source.clone(), &transpile_and_emit_options.0, &transpile_and_emit_options.1, ) @@ -142,18 +150,20 @@ impl Emitter { &self, specifier: &ModuleSpecifier, media_type: MediaType, + module_kind: deno_ast::ModuleKind, source: &Arc, ) -> Result { // Note: keep this in sync with the async version above let helper = EmitParsedSourceHelper(self); - match helper.pre_emit_parsed_source(specifier, source) { + match helper.pre_emit_parsed_source(specifier, module_kind, source) { PreEmitResult::Cached(emitted_text) => Ok(emitted_text), PreEmitResult::NotCached { source_hash } => { let transpiled_source = EmitParsedSourceHelper::transpile( &self.parsed_source_cache, specifier, - source.clone(), media_type, + module_kind, + source.clone(), &self.transpile_and_emit_options.0, &self.transpile_and_emit_options.1, )?; @@ -171,6 +181,7 @@ impl Emitter { pub async fn load_and_emit_for_hmr( &self, specifier: &ModuleSpecifier, + module_kind: deno_ast::ModuleKind, ) -> Result { let media_type = MediaType::from_specifier(specifier); let source_code = tokio::fs::read_to_string( @@ -193,9 +204,14 @@ impl Emitter { let mut options = self.transpile_and_emit_options.1.clone(); options.source_map = SourceMapOption::None; let transpiled_source = parsed_source - .transpile(&self.transpile_and_emit_options.0, &options)? - .into_source() - .into_string()?; + .transpile( + &self.transpile_and_emit_options.0, + &deno_ast::TranspileModuleOptions { + module_kind: Some(module_kind), + }, + &options, + )? + .into_source(); Ok(transpiled_source.text) } MediaType::JavaScript @@ -206,7 +222,7 @@ impl Emitter { | MediaType::Dcts | MediaType::Json | MediaType::Wasm - | MediaType::TsBuildInfo + | MediaType::Css | MediaType::SourceMap | MediaType::Unknown => { // clear this specifier from the parsed source cache as it's now out of date @@ -219,10 +235,11 @@ impl Emitter { /// A hashing function that takes the source code and uses the global emit /// options then generates a string hash which can be stored to /// determine if the cached emit is valid or not. - fn get_source_hash(&self, source_text: &str) -> u64 { + fn get_source_hash(&self, module_kind: ModuleKind, source_text: &str) -> u64 { FastInsecureHasher::new_without_deno_version() // stored in the transpile_and_emit_options_hash .write_str(source_text) .write_u64(self.transpile_and_emit_options_hash) + .write_hashable(module_kind) .finish() } } @@ -239,9 +256,10 @@ impl<'a> EmitParsedSourceHelper<'a> { pub fn pre_emit_parsed_source( &self, specifier: &ModuleSpecifier, + module_kind: deno_ast::ModuleKind, source: &Arc, ) -> PreEmitResult { - let source_hash = self.0.get_source_hash(source); + let source_hash = self.0.get_source_hash(module_kind, source); if let Some(emit_code) = self.0.emit_cache.get_emit_code(specifier, source_hash) @@ -255,8 +273,9 @@ impl<'a> EmitParsedSourceHelper<'a> { pub fn transpile( parsed_source_cache: &ParsedSourceCache, specifier: &ModuleSpecifier, - source: Arc, media_type: MediaType, + module_kind: deno_ast::ModuleKind, + source: Arc, transpile_options: &deno_ast::TranspileOptions, emit_options: &deno_ast::EmitOptions, ) -> Result { @@ -265,8 +284,13 @@ impl<'a> EmitParsedSourceHelper<'a> { let parsed_source = parsed_source_cache .remove_or_parse_module(specifier, source, media_type)?; ensure_no_import_assertion(&parsed_source)?; - let transpile_result = - parsed_source.transpile(transpile_options, emit_options)?; + let transpile_result = parsed_source.transpile( + transpile_options, + &TranspileModuleOptions { + module_kind: Some(module_kind), + }, + emit_options, + )?; let transpiled_source = match transpile_result { TranspileResult::Owned(source) => source, TranspileResult::Cloned(source) => { @@ -275,8 +299,7 @@ impl<'a> EmitParsedSourceHelper<'a> { } }; debug_assert!(transpiled_source.source_map.is_none()); - let text = String::from_utf8(transpiled_source.source)?; - Ok(text) + Ok(transpiled_source.text) } pub fn post_emit_parsed_source( @@ -321,7 +344,7 @@ fn ensure_no_import_assertion( deno_core::anyhow::anyhow!("{}", msg) } - let Some(module) = parsed_source.program_ref().as_module() else { + let deno_ast::ProgramRef::Module(module) = parsed_source.program_ref() else { return Ok(()); }; diff --git a/cli/factory.rs b/cli/factory.rs index d5ef4fd8b3..4a36c75ba2 100644 --- a/cli/factory.rs +++ b/cli/factory.rs @@ -11,10 +11,10 @@ use crate::args::StorageKeyResolver; use crate::args::TsConfigType; use crate::cache::Caches; use crate::cache::CodeCache; +use crate::cache::DenoCacheEnvFsAdapter; use crate::cache::DenoDir; use crate::cache::DenoDirProvider; use crate::cache::EmitCache; -use crate::cache::EsmOrCjsChecker; use crate::cache::GlobalHttpCache; use crate::cache::HttpCache; use crate::cache::LocalHttpCache; @@ -33,12 +33,16 @@ use crate::module_loader::ModuleLoadPreparer; use crate::node::CliCjsCodeAnalyzer; use crate::node::CliNodeCodeTranslator; use crate::npm::create_cli_npm_resolver; +use crate::npm::create_in_npm_pkg_checker; use crate::npm::CliByonmNpmResolverCreateOptions; +use crate::npm::CliManagedInNpmPkgCheckerCreateOptions; +use crate::npm::CliManagedNpmResolverCreateOptions; use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolverCreateOptions; -use crate::npm::CliNpmResolverManagedCreateOptions; use crate::npm::CliNpmResolverManagedSnapshotOption; -use crate::resolver::CjsResolutionStore; +use crate::npm::CreateInNpmPkgCheckerOptions; +use crate::resolver::CjsTracker; +use crate::resolver::CjsTrackerOptions; use crate::resolver::CliDenoResolverFs; use crate::resolver::CliGraphResolver; use crate::resolver::CliGraphResolverOptions; @@ -51,6 +55,7 @@ use crate::tools::check::TypeChecker; use crate::tools::coverage::CoverageCollector; use crate::tools::lint::LintRuleProvider; use crate::tools::run::hmr::HmrRunner; +use crate::tsc::TypeCheckingCjsTracker; use crate::util::file_watcher::WatcherCommunicator; use crate::util::fs::canonicalize_path_maybe_not_exists; use crate::util::progress_bar::ProgressBar; @@ -59,6 +64,7 @@ use crate::worker::CliMainWorkerFactory; use crate::worker::CliMainWorkerOptions; use std::path::PathBuf; +use deno_cache_dir::npm::NpmCacheDir; use deno_config::workspace::PackageJsonDepResolution; use deno_config::workspace::WorkspaceResolver; use deno_core::error::AnyError; @@ -68,6 +74,7 @@ use deno_core::FeatureChecker; use deno_runtime::deno_fs; use deno_runtime::deno_node::DenoFsNodeResolverEnv; use deno_runtime::deno_node::NodeResolver; +use deno_runtime::deno_node::PackageJsonResolver; use deno_runtime::deno_permissions::Permissions; use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_tls::rustls::RootCertStore; @@ -77,6 +84,7 @@ use deno_runtime::inspector_server::InspectorServer; use deno_runtime::permissions::RuntimePermissionDescriptorParser; use log::warn; use node_resolver::analyze::NodeCodeTranslator; +use node_resolver::InNpmPackageChecker; use once_cell::sync::OnceCell; use std::future::Future; use std::sync::Arc; @@ -164,39 +172,41 @@ impl Deferred { #[derive(Default)] struct CliFactoryServices { - cli_options: Deferred>, + blob_store: Deferred>, caches: Deferred>, + cjs_tracker: Deferred>, + cli_node_resolver: Deferred>, + cli_options: Deferred>, + code_cache: Deferred>, + emit_cache: Deferred>, + emitter: Deferred>, + feature_checker: Deferred>, file_fetcher: Deferred>, + fs: Deferred>, global_http_cache: Deferred>, http_cache: Deferred>, http_client_provider: Deferred>, - emit_cache: Deferred>, - emitter: Deferred>, - esm_or_cjs_checker: Deferred>, - fs: Deferred>, + in_npm_pkg_checker: Deferred>, main_graph_container: Deferred>, - maybe_inspector_server: Deferred>>, - root_cert_store_provider: Deferred>, - blob_store: Deferred>, - module_info_cache: Deferred>, - parsed_source_cache: Deferred>, - resolver: Deferred>, maybe_file_watcher_reporter: Deferred>, + maybe_inspector_server: Deferred>>, module_graph_builder: Deferred>, module_graph_creator: Deferred>, + module_info_cache: Deferred>, module_load_preparer: Deferred>, node_code_translator: Deferred>, node_resolver: Deferred>, + npm_cache_dir: Deferred>, npm_resolver: Deferred>, + parsed_source_cache: Deferred>, permission_desc_parser: Deferred>, + pkg_json_resolver: Deferred>, + resolver: Deferred>, + root_cert_store_provider: Deferred>, root_permissions_container: Deferred, sloppy_imports_resolver: Deferred>>, text_only_progress_bar: Deferred, type_checker: Deferred>, - cjs_resolutions: Deferred>, - cli_node_resolver: Deferred>, - feature_checker: Deferred>, - code_cache: Deferred>, workspace_resolver: Deferred>, } @@ -300,12 +310,6 @@ impl CliFactory { .get_or_init(|| ProgressBar::new(ProgressBarStyle::TextOnly)) } - pub fn esm_or_cjs_checker(&self) -> &Arc { - self.services.esm_or_cjs_checker.get_or_init(|| { - Arc::new(EsmOrCjsChecker::new(self.parsed_source_cache().clone())) - }) - } - pub fn global_http_cache(&self) -> Result<&Arc, AnyError> { self.services.global_http_cache.get_or_try_init(|| { Ok(Arc::new(GlobalHttpCache::new( @@ -359,56 +363,112 @@ impl CliFactory { self.services.fs.get_or_init(|| Arc::new(deno_fs::RealFs)) } + pub fn in_npm_pkg_checker( + &self, + ) -> Result<&Arc, AnyError> { + self.services.in_npm_pkg_checker.get_or_try_init(|| { + let cli_options = self.cli_options()?; + let options = if cli_options.use_byonm() { + CreateInNpmPkgCheckerOptions::Byonm + } else { + CreateInNpmPkgCheckerOptions::Managed( + CliManagedInNpmPkgCheckerCreateOptions { + root_cache_dir_url: self.npm_cache_dir()?.root_dir_url(), + maybe_node_modules_path: cli_options + .node_modules_dir_path() + .map(|p| p.as_path()), + }, + ) + }; + Ok(create_in_npm_pkg_checker(options)) + }) + } + + pub fn npm_cache_dir(&self) -> Result<&Arc, AnyError> { + self.services.npm_cache_dir.get_or_try_init(|| { + let fs = self.fs(); + let global_path = self.deno_dir()?.npm_folder_path(); + let cli_options = self.cli_options()?; + Ok(Arc::new(NpmCacheDir::new( + &DenoCacheEnvFsAdapter(fs.as_ref()), + global_path, + cli_options.npmrc().get_all_known_registries_urls(), + ))) + }) + } + pub async fn npm_resolver( &self, ) -> Result<&Arc, AnyError> { self .services .npm_resolver - .get_or_try_init_async(async { - let fs = self.fs(); - let cli_options = self.cli_options()?; - // For `deno install` we want to force the managed resolver so it can set up `node_modules/` directory. - create_cli_npm_resolver(if cli_options.use_byonm() && !matches!(cli_options.sub_command(), DenoSubcommand::Install(_) | DenoSubcommand::Add(_) | DenoSubcommand::Remove(_)) { - CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions { - fs: CliDenoResolverFs(fs.clone()), - root_node_modules_dir: Some(match cli_options.node_modules_dir_path() { - Some(node_modules_path) => node_modules_path.to_path_buf(), - // path needs to be canonicalized for node resolution - // (node_modules_dir_path above is already canonicalized) - None => canonicalize_path_maybe_not_exists(cli_options.initial_cwd())? - .join("node_modules"), - }), - }) - } else { - CliNpmResolverCreateOptions::Managed(CliNpmResolverManagedCreateOptions { - snapshot: match cli_options.resolve_npm_resolution_snapshot()? { - Some(snapshot) => { - CliNpmResolverManagedSnapshotOption::Specified(Some(snapshot)) - } - None => match cli_options.maybe_lockfile() { - Some(lockfile) => { - CliNpmResolverManagedSnapshotOption::ResolveFromLockfile( - lockfile.clone(), - ) - } - None => CliNpmResolverManagedSnapshotOption::Specified(None), + .get_or_try_init_async( + async { + let fs = self.fs(); + let cli_options = self.cli_options()?; + create_cli_npm_resolver(if cli_options.use_byonm() { + CliNpmResolverCreateOptions::Byonm( + CliByonmNpmResolverCreateOptions { + fs: CliDenoResolverFs(fs.clone()), + pkg_json_resolver: self.pkg_json_resolver().clone(), + root_node_modules_dir: Some( + match cli_options.node_modules_dir_path() { + Some(node_modules_path) => node_modules_path.to_path_buf(), + // path needs to be canonicalized for node resolution + // (node_modules_dir_path above is already canonicalized) + None => canonicalize_path_maybe_not_exists( + cli_options.initial_cwd(), + )? + .join("node_modules"), + }, + ), }, - }, - maybe_lockfile: cli_options.maybe_lockfile().cloned(), - fs: fs.clone(), - http_client_provider: self.http_client_provider().clone(), - npm_global_cache_dir: self.deno_dir()?.npm_folder_path(), - cache_setting: cli_options.cache_setting(), - text_only_progress_bar: self.text_only_progress_bar().clone(), - maybe_node_modules_path: cli_options.node_modules_dir_path().cloned(), - npm_install_deps_provider: Arc::new(NpmInstallDepsProvider::from_workspace(cli_options.workspace())), - npm_system_info: cli_options.npm_system_info(), - npmrc: cli_options.npmrc().clone(), - lifecycle_scripts: cli_options.lifecycle_scripts_config(), + ) + } else { + CliNpmResolverCreateOptions::Managed( + CliManagedNpmResolverCreateOptions { + snapshot: match cli_options.resolve_npm_resolution_snapshot()? { + Some(snapshot) => { + CliNpmResolverManagedSnapshotOption::Specified(Some( + snapshot, + )) + } + None => match cli_options.maybe_lockfile() { + Some(lockfile) => { + CliNpmResolverManagedSnapshotOption::ResolveFromLockfile( + lockfile.clone(), + ) + } + None => { + CliNpmResolverManagedSnapshotOption::Specified(None) + } + }, + }, + maybe_lockfile: cli_options.maybe_lockfile().cloned(), + fs: fs.clone(), + http_client_provider: self.http_client_provider().clone(), + npm_cache_dir: self.npm_cache_dir()?.clone(), + cache_setting: cli_options.cache_setting(), + text_only_progress_bar: self.text_only_progress_bar().clone(), + maybe_node_modules_path: cli_options + .node_modules_dir_path() + .cloned(), + npm_install_deps_provider: Arc::new( + NpmInstallDepsProvider::from_workspace( + cli_options.workspace(), + ), + ), + npm_system_info: cli_options.npm_system_info(), + npmrc: cli_options.npmrc().clone(), + lifecycle_scripts: cli_options.lifecycle_scripts_config(), + }, + ) }) - }).await - }.boxed_local()) + .await + } + .boxed_local(), + ) .await } @@ -513,6 +573,7 @@ impl CliFactory { self.services.module_info_cache.get_or_try_init(|| { Ok(Arc::new(ModuleInfoCache::new( self.caches()?.dep_analysis_db(), + self.parsed_source_cache().clone(), ))) }) } @@ -541,6 +602,7 @@ impl CliFactory { ts_config_result.ts_config, )?; Ok(Arc::new(Emitter::new( + self.cjs_tracker()?.clone(), self.emit_cache()?.clone(), self.parsed_source_cache().clone(), transpile_options, @@ -564,7 +626,9 @@ impl CliFactory { async { Ok(Arc::new(NodeResolver::new( DenoFsNodeResolverEnv::new(self.fs().clone()), + self.in_npm_pkg_checker()?.clone(), self.npm_resolver().await?.clone().into_npm_resolver(), + self.pkg_json_resolver().clone(), ))) } .boxed_local(), @@ -582,24 +646,35 @@ impl CliFactory { let caches = self.caches()?; let node_analysis_cache = NodeAnalysisCache::new(caches.node_analysis_db()); - let node_resolver = self.cli_node_resolver().await?.clone(); + let node_resolver = self.node_resolver().await?.clone(); let cjs_esm_analyzer = CliCjsCodeAnalyzer::new( node_analysis_cache, + self.cjs_tracker()?.clone(), self.fs().clone(), - node_resolver, Some(self.parsed_source_cache().clone()), + self.cli_options()?.is_npm_main(), ); Ok(Arc::new(NodeCodeTranslator::new( cjs_esm_analyzer, DenoFsNodeResolverEnv::new(self.fs().clone()), - self.node_resolver().await?.clone(), + self.in_npm_pkg_checker()?.clone(), + node_resolver, self.npm_resolver().await?.clone().into_npm_resolver(), + self.pkg_json_resolver().clone(), ))) }) .await } + pub fn pkg_json_resolver(&self) -> &Arc { + self.services.pkg_json_resolver.get_or_init(|| { + Arc::new(PackageJsonResolver::new(DenoFsNodeResolverEnv::new( + self.fs().clone(), + ))) + }) + } + pub async fn type_checker(&self) -> Result<&Arc, AnyError> { self .services @@ -608,6 +683,10 @@ impl CliFactory { let cli_options = self.cli_options()?; Ok(Arc::new(TypeChecker::new( self.caches()?.clone(), + Arc::new(TypeCheckingCjsTracker::new( + self.cjs_tracker()?.clone(), + self.module_info_cache()?.clone(), + )), cli_options.clone(), self.module_graph_builder().await?.clone(), self.node_resolver().await?.clone(), @@ -626,19 +705,18 @@ impl CliFactory { .get_or_try_init_async(async { let cli_options = self.cli_options()?; Ok(Arc::new(ModuleGraphBuilder::new( - cli_options.clone(), self.caches()?.clone(), - self.esm_or_cjs_checker().clone(), + cli_options.clone(), + self.file_fetcher()?.clone(), self.fs().clone(), - self.resolver().await?.clone(), - self.cli_node_resolver().await?.clone(), - self.npm_resolver().await?.clone(), - self.module_info_cache()?.clone(), - self.parsed_source_cache().clone(), + self.global_http_cache()?.clone(), + self.in_npm_pkg_checker()?.clone(), cli_options.maybe_lockfile().cloned(), self.maybe_file_watcher_reporter().clone(), - self.file_fetcher()?.clone(), - self.global_http_cache()?.clone(), + self.module_info_cache()?.clone(), + self.npm_resolver().await?.clone(), + self.parsed_source_cache().clone(), + self.resolver().await?.clone(), self.root_permissions_container()?.clone(), ))) }) @@ -710,8 +788,17 @@ impl CliFactory { .await } - pub fn cjs_resolutions(&self) -> &Arc { - self.services.cjs_resolutions.get_or_init(Default::default) + pub fn cjs_tracker(&self) -> Result<&Arc, AnyError> { + self.services.cjs_tracker.get_or_try_init(|| { + let options = self.cli_options()?; + Ok(Arc::new(CjsTracker::new( + self.in_npm_pkg_checker()?.clone(), + self.pkg_json_resolver().clone(), + CjsTrackerOptions { + unstable_detect_cjs: options.unstable_detect_cjs(), + }, + ))) + }) } pub async fn cli_node_resolver( @@ -722,8 +809,9 @@ impl CliFactory { .cli_node_resolver .get_or_try_init_async(async { Ok(Arc::new(CliNodeResolver::new( - self.cjs_resolutions().clone(), + self.cjs_tracker()?.clone(), self.fs().clone(), + self.in_npm_pkg_checker()?.clone(), self.node_resolver().await?.clone(), self.npm_resolver().await?.clone(), ))) @@ -761,6 +849,7 @@ impl CliFactory { ) -> Result { let cli_options = self.cli_options()?; Ok(DenoCompileBinaryWriter::new( + self.cjs_tracker()?, self.deno_dir()?, self.emitter()?, self.file_fetcher()?, @@ -791,53 +880,60 @@ impl CliFactory { &self, ) -> Result { let cli_options = self.cli_options()?; + let fs = self.fs(); let node_resolver = self.node_resolver().await?; let npm_resolver = self.npm_resolver().await?; - let fs = self.fs(); let cli_node_resolver = self.cli_node_resolver().await?; let cli_npm_resolver = self.npm_resolver().await?.clone(); + let in_npm_pkg_checker = self.in_npm_pkg_checker()?; let maybe_file_watcher_communicator = if cli_options.has_hmr() { Some(self.watcher_communicator.clone().unwrap()) } else { None }; + let node_code_translator = self.node_code_translator().await?; + let cjs_tracker = self.cjs_tracker()?.clone(); + let pkg_json_resolver = self.pkg_json_resolver().clone(); Ok(CliMainWorkerFactory::new( self.blob_store().clone(), - self.cjs_resolutions().clone(), if cli_options.code_cache_enabled() { Some(self.code_cache()?.clone()) } else { None }, self.feature_checker()?.clone(), - self.fs().clone(), + fs.clone(), maybe_file_watcher_communicator, self.maybe_inspector_server()?.clone(), cli_options.maybe_lockfile().cloned(), Box::new(CliModuleLoaderFactory::new( cli_options, + cjs_tracker, if cli_options.code_cache_enabled() { Some(self.code_cache()?.clone()) } else { None }, self.emitter()?.clone(), + fs.clone(), + in_npm_pkg_checker.clone(), self.main_module_graph_container().await?.clone(), self.module_load_preparer().await?.clone(), + node_code_translator.clone(), cli_node_resolver.clone(), cli_npm_resolver.clone(), NpmModuleLoader::new( - self.cjs_resolutions().clone(), - self.node_code_translator().await?.clone(), + self.cjs_tracker()?.clone(), fs.clone(), - cli_node_resolver.clone(), + node_code_translator.clone(), ), self.parsed_source_cache().clone(), self.resolver().await?.clone(), )), node_resolver.clone(), npm_resolver.clone(), + pkg_json_resolver, self.root_cert_store_provider().clone(), self.root_permissions_container()?.clone(), StorageKeyResolver::from_options(cli_options), @@ -853,8 +949,10 @@ impl CliFactory { let create_hmr_runner = if cli_options.has_hmr() { let watcher_communicator = self.watcher_communicator.clone().unwrap(); let emitter = self.emitter()?.clone(); + let cjs_tracker = self.cjs_tracker()?.clone(); let fn_: crate::worker::CreateHmrRunnerCb = Box::new(move |session| { Box::new(HmrRunner::new( + cjs_tracker.clone(), emitter.clone(), session, watcher_communicator.clone(), @@ -891,7 +989,6 @@ impl CliFactory { inspect_wait: cli_options.inspect_wait().is_some(), strace_ops: cli_options.strace_ops().clone(), is_inspecting: cli_options.is_inspecting(), - is_npm_main: cli_options.is_npm_main(), location: cli_options.location_flag().clone(), // if the user ran a binary command, we'll need to set process.argv[0] // to be the name of the binary command instead of deno @@ -909,7 +1006,6 @@ impl CliFactory { node_ipc: cli_options.node_ipc_fd(), serve_port: cli_options.serve_port(), serve_host: cli_options.serve_host(), - unstable_detect_cjs: cli_options.unstable_detect_cjs(), }) } } diff --git a/cli/graph_util.rs b/cli/graph_util.rs index 2eaee228af..46257cf785 100644 --- a/cli/graph_util.rs +++ b/cli/graph_util.rs @@ -6,7 +6,6 @@ use crate::args::CliLockfile; use crate::args::CliOptions; use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS; use crate::cache; -use crate::cache::EsmOrCjsChecker; use crate::cache::GlobalHttpCache; use crate::cache::ModuleInfoCache; use crate::cache::ParsedSourceCache; @@ -15,7 +14,6 @@ use crate::errors::get_error_class_name; use crate::file_fetcher::FileFetcher; use crate::npm::CliNpmResolver; use crate::resolver::CliGraphResolver; -use crate::resolver::CliNodeResolver; use crate::resolver::CliSloppyImportsResolver; use crate::resolver::SloppyImportsCachedFs; use crate::tools::check; @@ -50,6 +48,7 @@ use deno_runtime::deno_permissions::PermissionsContainer; use deno_semver::jsr::JsrDepPackageReq; use deno_semver::package::PackageNv; use import_map::ImportMapError; +use node_resolver::InNpmPackageChecker; use std::collections::HashSet; use std::error::Error; use std::ops::Deref; @@ -379,54 +378,51 @@ pub struct BuildFastCheckGraphOptions<'a> { } pub struct ModuleGraphBuilder { - options: Arc, caches: Arc, - esm_or_cjs_checker: Arc, + cli_options: Arc, + file_fetcher: Arc, fs: Arc, - resolver: Arc, - node_resolver: Arc, - npm_resolver: Arc, - module_info_cache: Arc, - parsed_source_cache: Arc, + global_http_cache: Arc, + in_npm_pkg_checker: Arc, lockfile: Option>, maybe_file_watcher_reporter: Option, - file_fetcher: Arc, - global_http_cache: Arc, + module_info_cache: Arc, + npm_resolver: Arc, + parsed_source_cache: Arc, + resolver: Arc, root_permissions_container: PermissionsContainer, } impl ModuleGraphBuilder { #[allow(clippy::too_many_arguments)] pub fn new( - options: Arc, caches: Arc, - esm_or_cjs_checker: Arc, + cli_options: Arc, + file_fetcher: Arc, fs: Arc, - resolver: Arc, - node_resolver: Arc, - npm_resolver: Arc, - module_info_cache: Arc, - parsed_source_cache: Arc, + global_http_cache: Arc, + in_npm_pkg_checker: Arc, lockfile: Option>, maybe_file_watcher_reporter: Option, - file_fetcher: Arc, - global_http_cache: Arc, + module_info_cache: Arc, + npm_resolver: Arc, + parsed_source_cache: Arc, + resolver: Arc, root_permissions_container: PermissionsContainer, ) -> Self { Self { - options, caches, - esm_or_cjs_checker, + cli_options, + file_fetcher, fs, - resolver, - node_resolver, - npm_resolver, - module_info_cache, - parsed_source_cache, + global_http_cache, + in_npm_pkg_checker, lockfile, maybe_file_watcher_reporter, - file_fetcher, - global_http_cache, + module_info_cache, + npm_resolver, + parsed_source_cache, + resolver, root_permissions_container, } } @@ -512,13 +508,11 @@ impl ModuleGraphBuilder { } let maybe_imports = if options.graph_kind.include_types() { - self.options.to_compiler_option_types()? + self.cli_options.to_compiler_option_types()? } else { Vec::new() }; - let analyzer = self - .module_info_cache - .as_module_analyzer(&self.parsed_source_cache); + let analyzer = self.module_info_cache.as_module_analyzer(); let mut loader = match options.loader { Some(loader) => MutLoaderRef::Borrowed(loader), None => MutLoaderRef::Owned(self.create_graph_loader()), @@ -566,7 +560,7 @@ impl ModuleGraphBuilder { // ensure an "npm install" is done if the user has explicitly // opted into using a node_modules directory if self - .options + .cli_options .node_modules_dir()? .map(|m| m.uses_node_modules_dir()) .unwrap_or(false) @@ -677,10 +671,10 @@ impl ModuleGraphBuilder { graph.build_fast_check_type_graph( deno_graph::BuildFastCheckTypeGraphOptions { - jsr_url_provider: &CliJsrUrlProvider, + es_parser: Some(&parser), fast_check_cache: fast_check_cache.as_ref().map(|c| c as _), fast_check_dts: false, - module_parser: Some(&parser), + jsr_url_provider: &CliJsrUrlProvider, resolver: Some(graph_resolver), npm_resolver: Some(&graph_npm_resolver), workspace_fast_check: options.workspace_fast_check, @@ -699,20 +693,18 @@ impl ModuleGraphBuilder { permissions: PermissionsContainer, ) -> cache::FetchCacher { cache::FetchCacher::new( - self.esm_or_cjs_checker.clone(), self.file_fetcher.clone(), + self.fs.clone(), self.global_http_cache.clone(), - self.node_resolver.clone(), - self.npm_resolver.clone(), + self.in_npm_pkg_checker.clone(), self.module_info_cache.clone(), cache::FetchCacherOptions { - file_header_overrides: self.options.resolve_file_header_overrides(), + file_header_overrides: self.cli_options.resolve_file_header_overrides(), permissions, is_deno_publish: matches!( - self.options.sub_command(), + self.cli_options.sub_command(), crate::args::DenoSubcommand::Publish { .. } ), - unstable_detect_cjs: self.options.unstable_detect_cjs(), }, ) } @@ -737,12 +729,12 @@ impl ModuleGraphBuilder { &self.fs, roots, GraphValidOptions { - kind: if self.options.type_check_mode().is_true() { + kind: if self.cli_options.type_check_mode().is_true() { GraphKind::All } else { GraphKind::CodeOnly }, - check_js: self.options.check_js(), + check_js: self.cli_options.check_js(), exit_integrity_errors: true, }, ) diff --git a/cli/lsp/analysis.rs b/cli/lsp/analysis.rs index 39f1ae6482..0769c90477 100644 --- a/cli/lsp/analysis.rs +++ b/cli/lsp/analysis.rs @@ -36,7 +36,6 @@ use deno_semver::package::PackageReq; use deno_semver::package::PackageReqReference; use deno_semver::Version; use import_map::ImportMap; -use node_resolver::NpmResolver; use once_cell::sync::Lazy; use regex::Regex; use std::borrow::Cow; @@ -336,7 +335,12 @@ impl<'a> TsResponseImportMapper<'a> { .resolver .maybe_managed_npm_resolver(Some(&self.file_referrer)) { - if npm_resolver.in_npm_package(specifier) { + let in_npm_pkg = self + .resolver + .maybe_node_resolver(Some(&self.file_referrer)) + .map(|n| n.in_npm_package(specifier)) + .unwrap_or(false); + if in_npm_pkg { if let Ok(Some(pkg_id)) = npm_resolver.resolve_pkg_id_from_specifier(specifier) { @@ -1199,14 +1203,11 @@ impl CodeActionCollection { }), ); - match parsed_source.program_ref() { - deno_ast::swc::ast::Program::Module(module) => module - .body - .iter() - .find(|i| i.range().contains(&specifier_range)) - .map(|i| text_info.line_and_column_index(i.range().start)), - deno_ast::swc::ast::Program::Script(_) => None, - } + parsed_source + .program_ref() + .body() + .find(|i| i.range().contains(&specifier_range)) + .map(|i| text_info.line_and_column_index(i.range().start)) } async fn deno_types_for_npm_action( diff --git a/cli/lsp/code_lens.rs b/cli/lsp/code_lens.rs index e117888fba..a57ca3ac9f 100644 --- a/cli/lsp/code_lens.rs +++ b/cli/lsp/code_lens.rs @@ -421,7 +421,7 @@ pub fn collect_test( ) -> Result, AnyError> { let mut collector = DenoTestCollector::new(specifier.clone(), parsed_source.clone()); - parsed_source.module().visit_with(&mut collector); + parsed_source.program().visit_with(&mut collector); Ok(collector.take()) } @@ -581,7 +581,7 @@ mod tests { .unwrap(); let mut collector = DenoTestCollector::new(specifier, parsed_module.clone()); - parsed_module.module().visit_with(&mut collector); + parsed_module.program().visit_with(&mut collector); assert_eq!( collector.take(), vec![ diff --git a/cli/lsp/config.rs b/cli/lsp/config.rs index cac0637a05..34bf64446d 100644 --- a/cli/lsp/config.rs +++ b/cli/lsp/config.rs @@ -985,7 +985,7 @@ impl Config { | MediaType::Tsx => Some(&workspace_settings.typescript), MediaType::Json | MediaType::Wasm - | MediaType::TsBuildInfo + | MediaType::Css | MediaType::SourceMap | MediaType::Unknown => None, } diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs index 7d1ca6810d..8609aed05e 100644 --- a/cli/lsp/documents.rs +++ b/cli/lsp/documents.rs @@ -272,7 +272,7 @@ fn get_maybe_test_module_fut( parsed_source.specifier().clone(), parsed_source.text_info_lazy().clone(), ); - parsed_source.module().visit_with(&mut collector); + parsed_source.program().visit_with(&mut collector); Arc::new(collector.take()) }) .map(Result::ok) @@ -332,12 +332,8 @@ impl Document { .filter(|s| cache.is_valid_file_referrer(s)) .cloned() .or(file_referrer); - let media_type = resolve_media_type( - &specifier, - maybe_headers.as_ref(), - maybe_language_id, - &resolver, - ); + let media_type = + resolve_media_type(&specifier, maybe_headers.as_ref(), maybe_language_id); let (maybe_parsed_source, maybe_module) = if media_type_is_diagnosable(media_type) { parse_and_analyze_module( @@ -399,7 +395,6 @@ impl Document { &self.specifier, self.maybe_headers.as_ref(), self.maybe_language_id, - &resolver, ); let dependencies; let maybe_types_dependency; @@ -764,14 +759,7 @@ fn resolve_media_type( specifier: &ModuleSpecifier, maybe_headers: Option<&HashMap>, maybe_language_id: Option, - resolver: &LspResolver, ) -> MediaType { - if resolver.in_node_modules(specifier) { - if let Some(media_type) = resolver.node_media_type(specifier) { - return media_type; - } - } - if let Some(language_id) = maybe_language_id { return MediaType::from_specifier_and_content_type( specifier, @@ -1561,7 +1549,7 @@ fn parse_source( text: Arc, media_type: MediaType, ) -> ParsedSourceResult { - deno_ast::parse_module(deno_ast::ParseParams { + deno_ast::parse_program(deno_ast::ParseParams { specifier, text, media_type, diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index a592245ce7..4fa0e3afbc 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -904,7 +904,7 @@ impl Inner { | MediaType::Tsx => {} MediaType::Wasm | MediaType::SourceMap - | MediaType::TsBuildInfo + | MediaType::Css | MediaType::Unknown => { if path.extension().and_then(|s| s.to_str()) != Some("jsonc") { continue; diff --git a/cli/lsp/resolver.rs b/cli/lsp/resolver.rs index 00587f8f50..9ce76005e5 100644 --- a/cli/lsp/resolver.rs +++ b/cli/lsp/resolver.rs @@ -2,6 +2,8 @@ use dashmap::DashMap; use deno_ast::MediaType; +use deno_ast::ParsedSource; +use deno_cache_dir::npm::NpmCacheDir; use deno_cache_dir::HttpCache; use deno_config::workspace::PackageJsonDepResolution; use deno_config::workspace::WorkspaceResolver; @@ -14,15 +16,15 @@ use deno_path_util::url_to_file_path; use deno_runtime::deno_fs; use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_node::PackageJson; +use deno_runtime::deno_node::PackageJsonResolver; use deno_semver::jsr::JsrPackageReqReference; use deno_semver::npm::NpmPackageReqReference; use deno_semver::package::PackageNv; use deno_semver::package::PackageReq; use indexmap::IndexMap; use node_resolver::errors::ClosestPkgJsonError; -use node_resolver::NodeResolution; +use node_resolver::InNpmPackageChecker; use node_resolver::NodeResolutionMode; -use node_resolver::NpmResolver; use std::borrow::Cow; use std::collections::BTreeMap; use std::collections::BTreeSet; @@ -36,6 +38,7 @@ use crate::args::create_default_npmrc; use crate::args::CacheSetting; use crate::args::CliLockfile; use crate::args::NpmInstallDepsProvider; +use crate::cache::DenoCacheEnvFsAdapter; use crate::graph_util::CliJsrUrlProvider; use crate::http_util::HttpClientProvider; use crate::lsp::config::Config; @@ -43,26 +46,32 @@ use crate::lsp::config::ConfigData; use crate::lsp::logging::lsp_warn; use crate::npm::create_cli_npm_resolver_for_lsp; use crate::npm::CliByonmNpmResolverCreateOptions; +use crate::npm::CliManagedInNpmPkgCheckerCreateOptions; +use crate::npm::CliManagedNpmResolverCreateOptions; use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolverCreateOptions; -use crate::npm::CliNpmResolverManagedCreateOptions; use crate::npm::CliNpmResolverManagedSnapshotOption; +use crate::npm::CreateInNpmPkgCheckerOptions; use crate::npm::ManagedCliNpmResolver; -use crate::resolver::CjsResolutionStore; +use crate::resolver::CjsTracker; +use crate::resolver::CjsTrackerOptions; use crate::resolver::CliDenoResolverFs; use crate::resolver::CliGraphResolver; use crate::resolver::CliGraphResolverOptions; use crate::resolver::CliNodeResolver; use crate::resolver::WorkerCliNpmGraphResolver; +use crate::tsc::into_specifier_and_media_type; use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; #[derive(Debug, Clone)] struct LspScopeResolver { + cjs_tracker: Option>, graph_resolver: Arc, jsr_resolver: Option>, npm_resolver: Option>, node_resolver: Option>, + pkg_json_resolver: Option>, redirect_resolver: Option>, graph_imports: Arc>, config_data: Option>, @@ -71,10 +80,12 @@ struct LspScopeResolver { impl Default for LspScopeResolver { fn default() -> Self { Self { + cjs_tracker: None, graph_resolver: create_graph_resolver(None, None, None), jsr_resolver: None, npm_resolver: None, node_resolver: None, + pkg_json_resolver: None, redirect_resolver: None, graph_imports: Default::default(), config_data: None, @@ -90,14 +101,35 @@ impl LspScopeResolver { ) -> Self { let mut npm_resolver = None; let mut node_resolver = None; + let mut lsp_cjs_tracker = None; + let fs = Arc::new(deno_fs::RealFs); + let pkg_json_resolver = Arc::new(PackageJsonResolver::new( + deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), + )); if let Some(http_client) = http_client_provider { npm_resolver = create_npm_resolver( config_data.map(|d| d.as_ref()), cache, http_client, + &pkg_json_resolver, ) .await; - node_resolver = create_node_resolver(npm_resolver.as_ref()); + if let Some(npm_resolver) = &npm_resolver { + let in_npm_pkg_checker = create_in_npm_pkg_checker(npm_resolver); + let cjs_tracker = create_cjs_tracker( + in_npm_pkg_checker.clone(), + pkg_json_resolver.clone(), + ); + lsp_cjs_tracker = + Some(Arc::new(LspCjsTracker::new(cjs_tracker.clone()))); + node_resolver = Some(create_node_resolver( + cjs_tracker, + fs.clone(), + in_npm_pkg_checker, + npm_resolver, + pkg_json_resolver.clone(), + )); + } } let graph_resolver = create_graph_resolver( config_data.map(|d| d.as_ref()), @@ -134,10 +166,12 @@ impl LspScopeResolver { }) .unwrap_or_default(); Self { + cjs_tracker: lsp_cjs_tracker, graph_resolver, jsr_resolver, npm_resolver, node_resolver, + pkg_json_resolver: Some(pkg_json_resolver), redirect_resolver, graph_imports, config_data: config_data.cloned(), @@ -147,18 +181,40 @@ impl LspScopeResolver { fn snapshot(&self) -> Arc { let npm_resolver = self.npm_resolver.as_ref().map(|r| r.clone_snapshotted()); - let node_resolver = create_node_resolver(npm_resolver.as_ref()); + let fs = Arc::new(deno_fs::RealFs); + let pkg_json_resolver = Arc::new(PackageJsonResolver::new( + deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), + )); + let mut node_resolver = None; + let mut lsp_cjs_tracker = None; + if let Some(npm_resolver) = &npm_resolver { + let in_npm_pkg_checker = create_in_npm_pkg_checker(npm_resolver); + let cjs_tracker = create_cjs_tracker( + in_npm_pkg_checker.clone(), + pkg_json_resolver.clone(), + ); + lsp_cjs_tracker = Some(Arc::new(LspCjsTracker::new(cjs_tracker.clone()))); + node_resolver = Some(create_node_resolver( + cjs_tracker, + fs, + in_npm_pkg_checker, + npm_resolver, + pkg_json_resolver.clone(), + )); + } let graph_resolver = create_graph_resolver( self.config_data.as_deref(), npm_resolver.as_ref(), node_resolver.as_ref(), ); Arc::new(Self { + cjs_tracker: lsp_cjs_tracker, graph_resolver, jsr_resolver: self.jsr_resolver.clone(), npm_resolver, node_resolver, redirect_resolver: self.redirect_resolver.clone(), + pkg_json_resolver: Some(pkg_json_resolver), graph_imports: self.graph_imports.clone(), config_data: self.config_data.clone(), }) @@ -261,6 +317,22 @@ impl LspResolver { resolver.graph_resolver.create_graph_npm_resolver() } + pub fn maybe_cjs_tracker( + &self, + file_referrer: Option<&ModuleSpecifier>, + ) -> Option<&Arc> { + let resolver = self.get_scope_resolver(file_referrer); + resolver.cjs_tracker.as_ref() + } + + pub fn maybe_node_resolver( + &self, + file_referrer: Option<&ModuleSpecifier>, + ) -> Option<&Arc> { + let resolver = self.get_scope_resolver(file_referrer); + resolver.node_resolver.as_ref() + } + pub fn maybe_managed_npm_resolver( &self, file_referrer: Option<&ModuleSpecifier>, @@ -328,7 +400,7 @@ impl LspResolver { ) -> Option<(ModuleSpecifier, MediaType)> { let resolver = self.get_scope_resolver(file_referrer); let node_resolver = resolver.node_resolver.as_ref()?; - Some(NodeResolution::into_specifier_and_media_type(Some( + Some(into_specifier_and_media_type(Some( node_resolver .resolve_req_reference(req_ref, referrer, NodeResolutionMode::Types) .ok()?, @@ -346,14 +418,10 @@ impl LspResolver { .contains("/node_modules/") } - let global_npm_resolver = self - .get_scope_resolver(Some(specifier)) - .npm_resolver - .as_ref() - .and_then(|npm_resolver| npm_resolver.as_managed()) - .filter(|r| r.root_node_modules_path().is_none()); - if let Some(npm_resolver) = &global_npm_resolver { - if npm_resolver.in_npm_package(specifier) { + if let Some(node_resolver) = + &self.get_scope_resolver(Some(specifier)).node_resolver + { + if node_resolver.in_npm_package(specifier) { return true; } } @@ -361,18 +429,6 @@ impl LspResolver { has_node_modules_dir(specifier) } - pub fn node_media_type( - &self, - specifier: &ModuleSpecifier, - ) -> Option { - let resolver = self.get_scope_resolver(Some(specifier)); - let node_resolver = resolver.node_resolver.as_ref()?; - let resolution = node_resolver - .url_to_node_resolution(specifier.clone()) - .ok()?; - Some(NodeResolution::into_specifier_and_media_type(Some(resolution)).1) - } - pub fn is_bare_package_json_dep( &self, specifier_text: &str, @@ -398,10 +454,10 @@ impl LspResolver { referrer: &ModuleSpecifier, ) -> Result>, ClosestPkgJsonError> { let resolver = self.get_scope_resolver(Some(referrer)); - let Some(node_resolver) = resolver.node_resolver.as_ref() else { + let Some(pkg_json_resolver) = resolver.pkg_json_resolver.as_ref() else { return Ok(None); }; - node_resolver.get_closest_package_json(referrer) + pkg_json_resolver.get_closest_package_json(referrer) } pub fn resolve_redirects( @@ -457,11 +513,13 @@ async fn create_npm_resolver( config_data: Option<&ConfigData>, cache: &LspCache, http_client_provider: &Arc, + pkg_json_resolver: &Arc, ) -> Option> { let enable_byonm = config_data.map(|d| d.byonm).unwrap_or(false); let options = if enable_byonm { CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions { fs: CliDenoResolverFs(Arc::new(deno_fs::RealFs)), + pkg_json_resolver: pkg_json_resolver.clone(), root_node_modules_dir: config_data.and_then(|config_data| { config_data.node_modules_dir.clone().or_else(|| { url_to_file_path(&config_data.scope) @@ -471,7 +529,15 @@ async fn create_npm_resolver( }), }) } else { - CliNpmResolverCreateOptions::Managed(CliNpmResolverManagedCreateOptions { + let npmrc = config_data + .and_then(|d| d.npmrc.clone()) + .unwrap_or_else(create_default_npmrc); + let npm_cache_dir = Arc::new(NpmCacheDir::new( + &DenoCacheEnvFsAdapter(&deno_fs::RealFs), + cache.deno_dir().npm_folder_path(), + npmrc.get_all_known_registries_urls(), + )); + CliNpmResolverCreateOptions::Managed(CliManagedNpmResolverCreateOptions { http_client_provider: http_client_provider.clone(), snapshot: match config_data.and_then(|d| d.lockfile.as_ref()) { Some(lockfile) => { @@ -485,7 +551,7 @@ async fn create_npm_resolver( // updating it. Only the cache request should update the lockfile. maybe_lockfile: None, fs: Arc::new(deno_fs::RealFs), - npm_global_cache_dir: cache.deno_dir().npm_folder_path(), + npm_cache_dir, // Use an "only" cache setting in order to make the // user do an explicit "cache" command and prevent // the cache from being filled with lots of packages while @@ -496,9 +562,7 @@ async fn create_npm_resolver( .and_then(|d| d.node_modules_dir.clone()), // only used for top level install, so we can ignore this npm_install_deps_provider: Arc::new(NpmInstallDepsProvider::empty()), - npmrc: config_data - .and_then(|d| d.npmrc.clone()) - .unwrap_or_else(create_default_npmrc), + npmrc, npm_system_info: NpmSystemInfo::default(), lifecycle_scripts: Default::default(), }) @@ -506,28 +570,59 @@ async fn create_npm_resolver( Some(create_cli_npm_resolver_for_lsp(options).await) } +fn create_cjs_tracker( + in_npm_pkg_checker: Arc, + pkg_json_resolver: Arc, +) -> Arc { + Arc::new(CjsTracker::new( + in_npm_pkg_checker, + pkg_json_resolver, + CjsTrackerOptions { + // todo(dsherret): support in the lsp by stabilizing the feature + // so that we don't have to pipe the config in here + unstable_detect_cjs: false, + }, + )) +} + +fn create_in_npm_pkg_checker( + npm_resolver: &Arc, +) -> Arc { + crate::npm::create_in_npm_pkg_checker(match npm_resolver.as_inner() { + crate::npm::InnerCliNpmResolverRef::Byonm(_) => { + CreateInNpmPkgCheckerOptions::Byonm + } + crate::npm::InnerCliNpmResolverRef::Managed(m) => { + CreateInNpmPkgCheckerOptions::Managed( + CliManagedInNpmPkgCheckerCreateOptions { + root_cache_dir_url: m.global_cache_root_url(), + maybe_node_modules_path: m.maybe_node_modules_path(), + }, + ) + } + }) +} + fn create_node_resolver( - npm_resolver: Option<&Arc>, -) -> Option> { - use once_cell::sync::Lazy; - - // it's not ideal to share this across all scopes and to - // never clear it, but it's fine for the time being - static CJS_RESOLUTIONS: Lazy> = - Lazy::new(Default::default); - - let npm_resolver = npm_resolver?; - let fs = Arc::new(deno_fs::RealFs); + cjs_tracker: Arc, + fs: Arc, + in_npm_pkg_checker: Arc, + npm_resolver: &Arc, + pkg_json_resolver: Arc, +) -> Arc { let node_resolver_inner = Arc::new(NodeResolver::new( deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), + in_npm_pkg_checker.clone(), npm_resolver.clone().into_npm_resolver(), + pkg_json_resolver.clone(), )); - Some(Arc::new(CliNodeResolver::new( - CJS_RESOLUTIONS.clone(), + Arc::new(CliNodeResolver::new( + cjs_tracker.clone(), fs, + in_npm_pkg_checker, node_resolver_inner, npm_resolver.clone(), - ))) + )) } fn create_graph_resolver( @@ -702,6 +797,45 @@ impl RedirectResolver { } } +#[derive(Debug)] +pub struct LspCjsTracker { + cjs_tracker: Arc, +} + +impl LspCjsTracker { + pub fn new(cjs_tracker: Arc) -> Self { + Self { cjs_tracker } + } + + pub fn is_cjs( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + maybe_parsed_source: Option<&ParsedSource>, + ) -> bool { + if let Some(module_kind) = + self.cjs_tracker.get_known_kind(specifier, media_type) + { + module_kind.is_cjs() + } else { + let maybe_is_script = maybe_parsed_source.map(|p| p.compute_is_script()); + maybe_is_script + .and_then(|is_script| { + self + .cjs_tracker + .is_cjs_with_known_is_script(specifier, media_type, is_script) + .ok() + }) + .unwrap_or_else(|| { + self + .cjs_tracker + .is_maybe_cjs(specifier, media_type) + .unwrap_or(false) + }) + } + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/cli/lsp/testing/collectors.rs b/cli/lsp/testing/collectors.rs index 2f2ddb8773..2dd7ec0d96 100644 --- a/cli/lsp/testing/collectors.rs +++ b/cli/lsp/testing/collectors.rs @@ -650,7 +650,7 @@ pub mod tests { .unwrap(); let text_info = parsed_module.text_info_lazy().clone(); let mut collector = TestCollector::new(specifier, text_info); - parsed_module.module().visit_with(&mut collector); + parsed_module.program().visit_with(&mut collector); collector.take() } diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs index 0f31d7dd3b..5fcdb3575a 100644 --- a/cli/lsp/tsc.rs +++ b/cli/lsp/tsc.rs @@ -4362,14 +4362,25 @@ fn op_load<'s>( None } else { let asset_or_document = state.get_asset_or_document(&specifier); - asset_or_document.map(|doc| LoadResponse { - data: doc.text(), - script_kind: crate::tsc::as_ts_script_kind(doc.media_type()), - version: state.script_version(&specifier), - is_cjs: matches!( - doc.media_type(), - MediaType::Cjs | MediaType::Cts | MediaType::Dcts - ), + asset_or_document.map(|doc| { + let maybe_cjs_tracker = state + .state_snapshot + .resolver + .maybe_cjs_tracker(Some(&specifier)); + LoadResponse { + data: doc.text(), + script_kind: crate::tsc::as_ts_script_kind(doc.media_type()), + version: state.script_version(&specifier), + is_cjs: maybe_cjs_tracker + .map(|t| { + t.is_cjs( + &specifier, + doc.media_type(), + doc.maybe_parsed_source().and_then(|p| p.as_ref().ok()), + ) + }) + .unwrap_or(false), + } }) }; diff --git a/cli/main.rs b/cli/main.rs index c2639c9088..04daff6700 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -135,7 +135,7 @@ async fn run_subcommand(flags: Arc) -> Result { tools::compile::compile(flags, compile_flags).await }), DenoSubcommand::Coverage(coverage_flags) => spawn_subcommand(async { - tools::coverage::cover_files(flags, coverage_flags).await + tools::coverage::cover_files(flags, coverage_flags) }), DenoSubcommand::Fmt(fmt_flags) => { spawn_subcommand( diff --git a/cli/module_loader.rs b/cli/module_loader.rs index 4a020516e7..43c9e1aa07 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -2,6 +2,7 @@ use std::borrow::Cow; use std::cell::RefCell; +use std::path::Path; use std::path::PathBuf; use std::pin::Pin; use std::rc::Rc; @@ -23,19 +24,23 @@ use crate::graph_container::ModuleGraphUpdatePermit; use crate::graph_util::CreateGraphOptions; use crate::graph_util::ModuleGraphBuilder; use crate::node; +use crate::node::CliNodeCodeTranslator; use crate::npm::CliNpmResolver; +use crate::resolver::CjsTracker; use crate::resolver::CliGraphResolver; use crate::resolver::CliNodeResolver; use crate::resolver::ModuleCodeStringSource; +use crate::resolver::NotSupportedKindInNpmError; use crate::resolver::NpmModuleLoader; use crate::tools::check; use crate::tools::check::TypeChecker; use crate::util::progress_bar::ProgressBar; use crate::util::text_encoding::code_without_source_map; use crate::util::text_encoding::source_map_from_code; -use crate::worker::ModuleLoaderAndSourceMapGetter; +use crate::worker::CreateModuleLoaderResult; use crate::worker::ModuleLoaderFactory; use deno_ast::MediaType; +use deno_ast::ModuleKind; use deno_core::anyhow::anyhow; use deno_core::anyhow::bail; use deno_core::anyhow::Context; @@ -63,9 +68,12 @@ use deno_graph::Module; use deno_graph::ModuleGraph; use deno_graph::Resolution; use deno_runtime::code_cache; +use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_node::create_host_defined_options; +use deno_runtime::deno_node::NodeRequireLoader; use deno_runtime::deno_permissions::PermissionsContainer; use deno_semver::npm::NpmPackageReqReference; +use node_resolver::InNpmPackageChecker; use node_resolver::NodeResolutionMode; pub struct ModuleLoadPreparer { @@ -198,11 +206,16 @@ struct SharedCliModuleLoaderState { lib_worker: TsTypeLib, initial_cwd: PathBuf, is_inspecting: bool, + is_npm_main: bool, is_repl: bool, + cjs_tracker: Arc, code_cache: Option>, emitter: Arc, + fs: Arc, + in_npm_pkg_checker: Arc, main_module_graph_container: Arc, module_load_preparer: Arc, + node_code_translator: Arc, node_resolver: Arc, npm_resolver: Arc, npm_module_loader: NpmModuleLoader, @@ -218,10 +231,14 @@ impl CliModuleLoaderFactory { #[allow(clippy::too_many_arguments)] pub fn new( options: &CliOptions, + cjs_tracker: Arc, code_cache: Option>, emitter: Arc, + fs: Arc, + in_npm_pkg_checker: Arc, main_module_graph_container: Arc, module_load_preparer: Arc, + node_code_translator: Arc, node_resolver: Arc, npm_resolver: Arc, npm_module_loader: NpmModuleLoader, @@ -235,14 +252,19 @@ impl CliModuleLoaderFactory { lib_worker: options.ts_type_lib_worker(), initial_cwd: options.initial_cwd().to_path_buf(), is_inspecting: options.is_inspecting(), + is_npm_main: options.is_npm_main(), is_repl: matches!( options.sub_command(), DenoSubcommand::Repl(_) | DenoSubcommand::Jupyter(_) ), + cjs_tracker, code_cache, emitter, + fs, + in_npm_pkg_checker, main_module_graph_container, module_load_preparer, + node_code_translator, node_resolver, npm_resolver, npm_module_loader, @@ -259,19 +281,30 @@ impl CliModuleLoaderFactory { is_worker: bool, parent_permissions: PermissionsContainer, permissions: PermissionsContainer, - ) -> ModuleLoaderAndSourceMapGetter { - let loader = Rc::new(CliModuleLoader(Rc::new(CliModuleLoaderInner { - lib, - is_worker, - parent_permissions, - permissions, + ) -> CreateModuleLoaderResult { + let module_loader = + Rc::new(CliModuleLoader(Rc::new(CliModuleLoaderInner { + lib, + is_worker, + is_npm_main: self.shared.is_npm_main, + parent_permissions, + permissions, + graph_container: graph_container.clone(), + node_code_translator: self.shared.node_code_translator.clone(), + emitter: self.shared.emitter.clone(), + parsed_source_cache: self.shared.parsed_source_cache.clone(), + shared: self.shared.clone(), + }))); + let node_require_loader = Rc::new(CliNodeRequireLoader::new( + self.shared.emitter.clone(), + self.shared.fs.clone(), graph_container, - emitter: self.shared.emitter.clone(), - parsed_source_cache: self.shared.parsed_source_cache.clone(), - shared: self.shared.clone(), - }))); - ModuleLoaderAndSourceMapGetter { - module_loader: loader, + self.shared.in_npm_pkg_checker.clone(), + self.shared.npm_resolver.clone(), + )); + CreateModuleLoaderResult { + module_loader, + node_require_loader, } } } @@ -280,7 +313,7 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory { fn create_for_main( &self, root_permissions: PermissionsContainer, - ) -> ModuleLoaderAndSourceMapGetter { + ) -> CreateModuleLoaderResult { self.create_with_lib( (*self.shared.main_module_graph_container).clone(), self.shared.lib_window, @@ -294,7 +327,7 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory { &self, parent_permissions: PermissionsContainer, permissions: PermissionsContainer, - ) -> ModuleLoaderAndSourceMapGetter { + ) -> CreateModuleLoaderResult { self.create_with_lib( // create a fresh module graph for the worker WorkerModuleGraphContainer::new(Arc::new(ModuleGraph::new( @@ -310,6 +343,7 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory { struct CliModuleLoaderInner { lib: TsTypeLib, + is_npm_main: bool, is_worker: bool, /// The initial set of permissions used to resolve the static imports in the /// worker. These are "allow all" for main worker, and parent thread @@ -318,6 +352,7 @@ struct CliModuleLoaderInner { permissions: PermissionsContainer, shared: Arc, emitter: Arc, + node_code_translator: Arc, parsed_source_cache: Arc, graph_container: TGraphContainer, } @@ -331,24 +366,7 @@ impl maybe_referrer: Option<&ModuleSpecifier>, requested_module_type: RequestedModuleType, ) -> Result { - let code_source = match self.load_prepared_module(specifier).await? { - Some(code_source) => code_source, - None => { - if self.shared.npm_module_loader.if_in_npm_package(specifier) { - self - .shared - .npm_module_loader - .load(specifier, maybe_referrer) - .await? - } else { - let mut msg = format!("Loading unprepared module: {specifier}"); - if let Some(referrer) = maybe_referrer { - msg = format!("{}, imported from: {}", msg, referrer.as_str()); - } - return Err(anyhow!(msg)); - } - } - }; + let code_source = self.load_code_source(specifier, maybe_referrer).await?; let code = if self.shared.is_inspecting { // we need the code with the source map in order for // it to work with --inspect or --inspect-brk @@ -402,6 +420,29 @@ impl )) } + async fn load_code_source( + &self, + specifier: &ModuleSpecifier, + maybe_referrer: Option<&ModuleSpecifier>, + ) -> Result { + if let Some(code_source) = self.load_prepared_module(specifier).await? { + return Ok(code_source); + } + if self.shared.node_resolver.in_npm_package(specifier) { + return self + .shared + .npm_module_loader + .load(specifier, maybe_referrer) + .await; + } + + let mut msg = format!("Loading unprepared module: {specifier}"); + if let Some(referrer) = maybe_referrer { + msg = format!("{}, imported from: {}", msg, referrer.as_str()); + } + Err(anyhow!(msg)) + } + fn resolve_referrer( &self, referrer: &str, @@ -474,15 +515,11 @@ impl if self.shared.is_repl { if let Ok(reference) = NpmPackageReqReference::from_specifier(&specifier) { - return self - .shared - .node_resolver - .resolve_req_reference( - &reference, - referrer, - NodeResolutionMode::Execution, - ) - .map(|res| res.into_url()); + return self.shared.node_resolver.resolve_req_reference( + &reference, + referrer, + NodeResolutionMode::Execution, + ); } } @@ -506,13 +543,15 @@ impl .with_context(|| { format!("Could not resolve '{}'.", module.nv_reference) })? - .into_url() } Some(Module::Node(module)) => module.specifier.clone(), Some(Module::Js(module)) => module.specifier.clone(), Some(Module::Json(module)) => module.specifier.clone(), Some(Module::External(module)) => { - node::resolve_specifier_into_node_modules(&module.specifier) + node::resolve_specifier_into_node_modules( + &module.specifier, + self.shared.fs.as_ref(), + ) } None => specifier.into_owned(), }; @@ -534,7 +573,7 @@ impl }) => { let transpile_result = self .emitter - .emit_parsed_source(specifier, media_type, source) + .emit_parsed_source(specifier, media_type, ModuleKind::Esm, source) .await?; // at this point, we no longer need the parsed source in memory, so free it @@ -547,11 +586,19 @@ impl media_type, })) } + Some(CodeOrDeferredEmit::Cjs { + specifier, + media_type, + source, + }) => self + .load_maybe_cjs(specifier, media_type, source) + .await + .map(Some), None => Ok(None), } } - fn load_prepared_module_sync( + fn load_prepared_module_for_source_map_sync( &self, specifier: &ModuleSpecifier, ) -> Result, AnyError> { @@ -564,9 +611,12 @@ impl media_type, source, }) => { - let transpile_result = self - .emitter - .emit_parsed_source_sync(specifier, media_type, source)?; + let transpile_result = self.emitter.emit_parsed_source_sync( + specifier, + media_type, + ModuleKind::Esm, + source, + )?; // at this point, we no longer need the parsed source in memory, so free it self.parsed_source_cache.free(specifier); @@ -578,6 +628,14 @@ impl media_type, })) } + Some(CodeOrDeferredEmit::Cjs { .. }) => { + self.parsed_source_cache.free(specifier); + + // todo(dsherret): to make this work, we should probably just + // rely on the CJS export cache. At the moment this is hard because + // cjs export analysis is only async + Ok(None) + } None => Ok(None), } } @@ -607,20 +665,40 @@ impl source, media_type, specifier, + is_script, .. })) => { + // todo(dsherret): revert in https://github.com/denoland/deno/pull/26439 + if self.is_npm_main && *is_script + || self.shared.cjs_tracker.is_cjs_with_known_is_script( + specifier, + *media_type, + *is_script, + )? + { + return Ok(Some(CodeOrDeferredEmit::Cjs { + specifier, + media_type: *media_type, + source, + })); + } let code: ModuleCodeString = match media_type { MediaType::JavaScript | MediaType::Unknown - | MediaType::Cjs | MediaType::Mjs | MediaType::Json => source.clone().into(), MediaType::Dts | MediaType::Dcts | MediaType::Dmts => { Default::default() } + MediaType::Cjs | MediaType::Cts => { + return Ok(Some(CodeOrDeferredEmit::Cjs { + specifier, + media_type: *media_type, + source, + })); + } MediaType::TypeScript | MediaType::Mts - | MediaType::Cts | MediaType::Jsx | MediaType::Tsx => { return Ok(Some(CodeOrDeferredEmit::DeferredEmit { @@ -629,7 +707,7 @@ impl source, })); } - MediaType::TsBuildInfo | MediaType::Wasm | MediaType::SourceMap => { + MediaType::Css | MediaType::Wasm | MediaType::SourceMap => { panic!("Unexpected media type {media_type} for {specifier}") } }; @@ -651,6 +729,48 @@ impl | None => Ok(None), } } + + async fn load_maybe_cjs( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + original_source: &Arc, + ) -> Result { + let js_source = if media_type.is_emittable() { + Cow::Owned( + self + .emitter + .emit_parsed_source( + specifier, + media_type, + ModuleKind::Cjs, + original_source, + ) + .await?, + ) + } else { + Cow::Borrowed(original_source.as_ref()) + }; + let text = self + .node_code_translator + .translate_cjs_to_esm(specifier, Some(js_source)) + .await?; + // at this point, we no longer need the parsed source in memory, so free it + self.parsed_source_cache.free(specifier); + Ok(ModuleCodeStringSource { + code: match text { + // perf: if the text is borrowed, that means it didn't make any changes + // to the original source, so we can just provide that instead of cloning + // the borrowed text + Cow::Borrowed(_) => { + ModuleSourceCode::String(original_source.clone().into()) + } + Cow::Owned(text) => ModuleSourceCode::String(text.into()), + }, + found_url: specifier.clone(), + media_type, + }) + } } enum CodeOrDeferredEmit<'a> { @@ -660,6 +780,11 @@ enum CodeOrDeferredEmit<'a> { media_type: MediaType, source: &'a Arc, }, + Cjs { + specifier: &'a ModuleSpecifier, + media_type: MediaType, + source: &'a Arc, + }, } // todo(dsherret): this double Rc boxing is not ideal @@ -821,7 +946,10 @@ impl ModuleLoader "wasm" | "file" | "http" | "https" | "data" | "blob" => (), _ => return None, } - let source = self.0.load_prepared_module_sync(&specifier).ok()??; + let source = self + .0 + .load_prepared_module_for_source_map_sync(&specifier) + .ok()??; source_map_from_code(source.code.as_bytes()) } @@ -900,3 +1028,79 @@ impl ModuleGraphUpdatePermit for WorkerModuleGraphUpdatePermit { drop(self.permit); // explicit drop for clarity } } + +#[derive(Debug)] +struct CliNodeRequireLoader { + emitter: Arc, + fs: Arc, + graph_container: TGraphContainer, + in_npm_pkg_checker: Arc, + npm_resolver: Arc, +} + +impl + CliNodeRequireLoader +{ + pub fn new( + emitter: Arc, + fs: Arc, + graph_container: TGraphContainer, + in_npm_pkg_checker: Arc, + npm_resolver: Arc, + ) -> Self { + Self { + emitter, + fs, + graph_container, + in_npm_pkg_checker, + npm_resolver, + } + } +} + +impl NodeRequireLoader + for CliNodeRequireLoader +{ + fn ensure_read_permission<'a>( + &self, + permissions: &mut dyn deno_runtime::deno_node::NodePermissions, + path: &'a Path, + ) -> Result, AnyError> { + if let Ok(url) = deno_path_util::url_from_file_path(path) { + // allow reading if it's in the module graph + if self.graph_container.graph().get(&url).is_some() { + return Ok(std::borrow::Cow::Borrowed(path)); + } + } + self.npm_resolver.ensure_read_permission(permissions, path) + } + + fn load_text_file_lossy(&self, path: &Path) -> Result { + // todo(dsherret): use the preloaded module from the graph if available? + let media_type = MediaType::from_path(path); + let text = self.fs.read_text_file_lossy_sync(path, None)?; + if media_type.is_emittable() { + let specifier = deno_path_util::url_from_file_path(path)?; + if self.in_npm_pkg_checker.in_npm_package(&specifier) { + return Err( + NotSupportedKindInNpmError { + media_type, + specifier, + } + .into(), + ); + } + self.emitter.emit_parsed_source_sync( + &specifier, + media_type, + // this is probably not super accurate due to require esm, but probably ok. + // If we find this causes a lot of churn in the emit cache then we should + // investigate how we can make this better + ModuleKind::Cjs, + &text.into(), + ) + } else { + Ok(text) + } + } +} diff --git a/cli/node.rs b/cli/node.rs index 733d5f8717..1d410a726a 100644 --- a/cli/node.rs +++ b/cli/node.rs @@ -1,11 +1,14 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +use std::borrow::Cow; use std::sync::Arc; use deno_ast::MediaType; use deno_ast::ModuleSpecifier; use deno_core::error::AnyError; use deno_graph::ParsedSourceStore; +use deno_path_util::url_from_file_path; +use deno_path_util::url_to_file_path; use deno_runtime::deno_fs; use deno_runtime::deno_node::DenoFsNodeResolverEnv; use node_resolver::analyze::CjsAnalysis as ExtNodeCjsAnalysis; @@ -18,8 +21,8 @@ use serde::Serialize; use crate::cache::CacheDBHash; use crate::cache::NodeAnalysisCache; use crate::cache::ParsedSourceCache; -use crate::resolver::CliNodeResolver; -use crate::util::fs::canonicalize_path_maybe_not_exists; +use crate::resolver::CjsTracker; +use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs; pub type CliNodeCodeTranslator = NodeCodeTranslator; @@ -32,14 +35,14 @@ pub type CliNodeCodeTranslator = /// because the node_modules folder might not exist at that time. pub fn resolve_specifier_into_node_modules( specifier: &ModuleSpecifier, + fs: &dyn deno_fs::FileSystem, ) -> ModuleSpecifier { - specifier - .to_file_path() + url_to_file_path(specifier) .ok() // this path might not exist at the time the graph is being created // because the node_modules folder might not yet exist - .and_then(|path| canonicalize_path_maybe_not_exists(&path).ok()) - .and_then(|path| ModuleSpecifier::from_file_path(path).ok()) + .and_then(|path| canonicalize_path_maybe_not_exists_with_fs(&path, fs).ok()) + .and_then(|path| url_from_file_path(&path).ok()) .unwrap_or_else(|| specifier.clone()) } @@ -56,23 +59,29 @@ pub enum CliCjsAnalysis { pub struct CliCjsCodeAnalyzer { cache: NodeAnalysisCache, + cjs_tracker: Arc, fs: deno_fs::FileSystemRc, - node_resolver: Arc, parsed_source_cache: Option>, + // todo(dsherret): hack, remove in https://github.com/denoland/deno/pull/26439 + // For example, this does not properly handle if cjs analysis was already done + // and has been cached. + is_npm_main: bool, } impl CliCjsCodeAnalyzer { pub fn new( cache: NodeAnalysisCache, + cjs_tracker: Arc, fs: deno_fs::FileSystemRc, - node_resolver: Arc, parsed_source_cache: Option>, + is_npm_main: bool, ) -> Self { Self { cache, + cjs_tracker, fs, - node_resolver, parsed_source_cache, + is_npm_main, } } @@ -88,7 +97,7 @@ impl CliCjsCodeAnalyzer { return Ok(analysis); } - let mut media_type = MediaType::from_specifier(specifier); + let media_type = MediaType::from_specifier(specifier); if media_type == MediaType::Json { return Ok(CliCjsAnalysis::Cjs { exports: vec![], @@ -96,62 +105,53 @@ impl CliCjsCodeAnalyzer { }); } - if media_type == MediaType::JavaScript { - if let Some(package_json) = - self.node_resolver.get_closest_package_json(specifier)? - { - match package_json.typ.as_str() { - "commonjs" => { - media_type = MediaType::Cjs; - } - "module" => { - media_type = MediaType::Mjs; - } - _ => {} - } - } - } + let cjs_tracker = self.cjs_tracker.clone(); + let is_npm_main = self.is_npm_main; + let is_maybe_cjs = + cjs_tracker.is_maybe_cjs(specifier, media_type)? || is_npm_main; + let analysis = if is_maybe_cjs { + let maybe_parsed_source = self + .parsed_source_cache + .as_ref() + .and_then(|c| c.remove_parsed_source(specifier)); - let maybe_parsed_source = self - .parsed_source_cache - .as_ref() - .and_then(|c| c.remove_parsed_source(specifier)); - - let analysis = deno_core::unsync::spawn_blocking({ - let specifier = specifier.clone(); - let source: Arc = source.into(); - move || -> Result<_, deno_ast::ParseDiagnostic> { - let parsed_source = - maybe_parsed_source.map(Ok).unwrap_or_else(|| { - deno_ast::parse_program(deno_ast::ParseParams { - specifier, - text: source, - media_type, - capture_tokens: true, - scope_analysis: false, - maybe_syntax: None, + deno_core::unsync::spawn_blocking({ + let specifier = specifier.clone(); + let source: Arc = source.into(); + move || -> Result<_, AnyError> { + let parsed_source = + maybe_parsed_source.map(Ok).unwrap_or_else(|| { + deno_ast::parse_program(deno_ast::ParseParams { + specifier, + text: source, + media_type, + capture_tokens: true, + scope_analysis: false, + maybe_syntax: None, + }) + })?; + let is_script = parsed_source.compute_is_script(); + let is_cjs = cjs_tracker.is_cjs_with_known_is_script( + parsed_source.specifier(), + media_type, + is_script, + )? || is_script && is_npm_main; + if is_cjs { + let analysis = parsed_source.analyze_cjs(); + Ok(CliCjsAnalysis::Cjs { + exports: analysis.exports, + reexports: analysis.reexports, }) - })?; - if parsed_source.is_script() { - let analysis = parsed_source.analyze_cjs(); - Ok(CliCjsAnalysis::Cjs { - exports: analysis.exports, - reexports: analysis.reexports, - }) - } else if media_type == MediaType::Cjs { - // FIXME: `deno_ast` should internally handle MediaType::Cjs implying that - // the result must never be Esm - Ok(CliCjsAnalysis::Cjs { - exports: vec![], - reexports: vec![], - }) - } else { - Ok(CliCjsAnalysis::Esm) + } else { + Ok(CliCjsAnalysis::Esm) + } } - } - }) - .await - .unwrap()?; + }) + .await + .unwrap()? + } else { + CliCjsAnalysis::Esm + }; self .cache @@ -163,11 +163,11 @@ impl CliCjsCodeAnalyzer { #[async_trait::async_trait(?Send)] impl CjsCodeAnalyzer for CliCjsCodeAnalyzer { - async fn analyze_cjs( + async fn analyze_cjs<'a>( &self, specifier: &ModuleSpecifier, - source: Option, - ) -> Result { + source: Option>, + ) -> Result, AnyError> { let source = match source { Some(source) => source, None => { @@ -175,7 +175,7 @@ impl CjsCodeAnalyzer for CliCjsCodeAnalyzer { if let Ok(source_from_file) = self.fs.read_text_file_lossy_async(path, None).await { - source_from_file + Cow::Owned(source_from_file) } else { return Ok(ExtNodeCjsAnalysis::Cjs(CjsAnalysisExports { exports: vec![], diff --git a/cli/npm/byonm.rs b/cli/npm/byonm.rs index fc095ab16f..4535b07fc5 100644 --- a/cli/npm/byonm.rs +++ b/cli/npm/byonm.rs @@ -10,8 +10,8 @@ use deno_core::serde_json; use deno_core::url::Url; use deno_resolver::npm::ByonmNpmResolver; use deno_resolver::npm::ByonmNpmResolverCreateOptions; +use deno_runtime::deno_node::DenoFsNodeResolverEnv; use deno_runtime::deno_node::NodePermissions; -use deno_runtime::deno_node::NodeRequireResolver; use deno_runtime::ops::process::NpmProcessStateProvider; use deno_semver::package::PackageReq; use node_resolver::NpmResolver; @@ -25,30 +25,14 @@ use super::InnerCliNpmResolverRef; use super::ResolvePkgFolderFromDenoReqError; pub type CliByonmNpmResolverCreateOptions = - ByonmNpmResolverCreateOptions; -pub type CliByonmNpmResolver = ByonmNpmResolver; + ByonmNpmResolverCreateOptions; +pub type CliByonmNpmResolver = + ByonmNpmResolver; // todo(dsherret): the services hanging off `CliNpmResolver` doesn't seem ideal. We should probably decouple. #[derive(Debug)] struct CliByonmWrapper(Arc); -impl NodeRequireResolver for CliByonmWrapper { - fn ensure_read_permission<'a>( - &self, - permissions: &mut dyn NodePermissions, - path: &'a Path, - ) -> Result, AnyError> { - if !path - .components() - .any(|c| c.as_os_str().to_ascii_lowercase() == "node_modules") - { - permissions.check_read_path(path) - } else { - Ok(Cow::Borrowed(path)) - } - } -} - impl NpmProcessStateProvider for CliByonmWrapper { fn get_npm_process_state(&self) -> String { serde_json::to_string(&NpmProcessState { @@ -67,10 +51,6 @@ impl CliNpmResolver for CliByonmNpmResolver { self } - fn into_require_resolver(self: Arc) -> Arc { - Arc::new(CliByonmWrapper(self)) - } - fn into_process_state_provider( self: Arc, ) -> Arc { @@ -100,6 +80,21 @@ impl CliNpmResolver for CliByonmNpmResolver { .map_err(ResolvePkgFolderFromDenoReqError::Byonm) } + fn ensure_read_permission<'a>( + &self, + permissions: &mut dyn NodePermissions, + path: &'a Path, + ) -> Result, AnyError> { + if !path + .components() + .any(|c| c.as_os_str().to_ascii_lowercase() == "node_modules") + { + permissions.check_read_path(path) + } else { + Ok(Cow::Borrowed(path)) + } + } + fn check_state_hash(&self) -> Option { // it is very difficult to determine the check state hash for byonm // so we just return None to signify check caching is not supported diff --git a/cli/npm/managed/cache/mod.rs b/cli/npm/managed/cache/mod.rs index aaec486681..8ae99f41e0 100644 --- a/cli/npm/managed/cache/mod.rs +++ b/cli/npm/managed/cache/mod.rs @@ -36,7 +36,7 @@ pub use tarball::TarballCache; /// Stores a single copy of npm packages in a cache. #[derive(Debug)] pub struct NpmCache { - cache_dir: NpmCacheDir, + cache_dir: Arc, cache_setting: CacheSetting, npmrc: Arc, /// ensures a package is only downloaded once per run @@ -45,7 +45,7 @@ pub struct NpmCache { impl NpmCache { pub fn new( - cache_dir: NpmCacheDir, + cache_dir: Arc, cache_setting: CacheSetting, npmrc: Arc, ) -> Self { @@ -61,6 +61,10 @@ impl NpmCache { &self.cache_setting } + pub fn root_dir_path(&self) -> &Path { + self.cache_dir.root_dir() + } + pub fn root_dir_url(&self) -> &Url { self.cache_dir.root_dir_url() } @@ -152,10 +156,6 @@ impl NpmCache { self.cache_dir.package_name_folder(name, registry_url) } - pub fn root_folder(&self) -> PathBuf { - self.cache_dir.root_dir().to_owned() - } - pub fn resolve_package_folder_id_from_specifier( &self, specifier: &ModuleSpecifier, diff --git a/cli/npm/managed/mod.rs b/cli/npm/managed/mod.rs index a0754812bb..4a91bc3474 100644 --- a/cli/npm/managed/mod.rs +++ b/cli/npm/managed/mod.rs @@ -12,6 +12,7 @@ use deno_cache_dir::npm::NpmCacheDir; use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::serde_json; +use deno_core::url::Url; use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::registry::NpmPackageInfo; use deno_npm::registry::NpmRegistryApi; @@ -24,12 +25,12 @@ use deno_npm::NpmSystemInfo; use deno_runtime::colors; use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_node::NodePermissions; -use deno_runtime::deno_node::NodeRequireResolver; use deno_runtime::ops::process::NpmProcessStateProvider; use deno_semver::package::PackageNv; use deno_semver::package::PackageReq; use node_resolver::errors::PackageFolderResolveError; use node_resolver::errors::PackageFolderResolveIoError; +use node_resolver::InNpmPackageChecker; use node_resolver::NpmResolver; use resolution::AddPkgReqsResult; @@ -39,7 +40,6 @@ use crate::args::NpmInstallDepsProvider; use crate::args::NpmProcessState; use crate::args::NpmProcessStateKind; use crate::args::PackageJsonDepValueParseWithLocationError; -use crate::cache::DenoCacheEnvFsAdapter; use crate::cache::FastInsecureHasher; use crate::http_util::HttpClientProvider; use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs; @@ -66,12 +66,12 @@ pub enum CliNpmResolverManagedSnapshotOption { Specified(Option), } -pub struct CliNpmResolverManagedCreateOptions { +pub struct CliManagedNpmResolverCreateOptions { pub snapshot: CliNpmResolverManagedSnapshotOption, pub maybe_lockfile: Option>, pub fs: Arc, pub http_client_provider: Arc, - pub npm_global_cache_dir: PathBuf, + pub npm_cache_dir: Arc, pub cache_setting: crate::args::CacheSetting, pub text_only_progress_bar: crate::util::progress_bar::ProgressBar, pub maybe_node_modules_path: Option, @@ -82,7 +82,7 @@ pub struct CliNpmResolverManagedCreateOptions { } pub async fn create_managed_npm_resolver_for_lsp( - options: CliNpmResolverManagedCreateOptions, + options: CliManagedNpmResolverCreateOptions, ) -> Arc { let npm_cache = create_cache(&options); let npm_api = create_api(&options, npm_cache.clone()); @@ -115,7 +115,7 @@ pub async fn create_managed_npm_resolver_for_lsp( } pub async fn create_managed_npm_resolver( - options: CliNpmResolverManagedCreateOptions, + options: CliManagedNpmResolverCreateOptions, ) -> Result, AnyError> { let npm_cache = create_cache(&options); let npm_api = create_api(&options, npm_cache.clone()); @@ -189,20 +189,16 @@ fn create_inner( )) } -fn create_cache(options: &CliNpmResolverManagedCreateOptions) -> Arc { +fn create_cache(options: &CliManagedNpmResolverCreateOptions) -> Arc { Arc::new(NpmCache::new( - NpmCacheDir::new( - &DenoCacheEnvFsAdapter(options.fs.as_ref()), - options.npm_global_cache_dir.clone(), - options.npmrc.get_all_known_registries_urls(), - ), + options.npm_cache_dir.clone(), options.cache_setting.clone(), options.npmrc.clone(), )) } fn create_api( - options: &CliNpmResolverManagedCreateOptions, + options: &CliManagedNpmResolverCreateOptions, npm_cache: Arc, ) -> Arc { Arc::new(CliNpmRegistryApi::new( @@ -259,6 +255,35 @@ async fn snapshot_from_lockfile( Ok(snapshot) } +#[derive(Debug)] +struct ManagedInNpmPackageChecker { + root_dir: Url, +} + +impl InNpmPackageChecker for ManagedInNpmPackageChecker { + fn in_npm_package(&self, specifier: &Url) -> bool { + specifier.as_ref().starts_with(self.root_dir.as_str()) + } +} + +pub struct CliManagedInNpmPkgCheckerCreateOptions<'a> { + pub root_cache_dir_url: &'a Url, + pub maybe_node_modules_path: Option<&'a Path>, +} + +pub fn create_managed_in_npm_pkg_checker( + options: CliManagedInNpmPkgCheckerCreateOptions, +) -> Arc { + let root_dir = match options.maybe_node_modules_path { + Some(node_modules_folder) => { + deno_path_util::url_from_directory_path(node_modules_folder).unwrap() + } + None => options.root_cache_dir_url.clone(), + }; + debug_assert!(root_dir.as_str().ends_with('/')); + Arc::new(ManagedInNpmPackageChecker { root_dir }) +} + /// An npm resolver where the resolution is managed by Deno rather than /// the user bringing their own node_modules (BYONM) on the file system. pub struct ManagedCliNpmResolver { @@ -555,8 +580,16 @@ impl ManagedCliNpmResolver { .map_err(|err| err.into()) } - pub fn global_cache_root_folder(&self) -> PathBuf { - self.npm_cache.root_folder() + pub fn maybe_node_modules_path(&self) -> Option<&Path> { + self.fs_resolver.node_modules_path() + } + + pub fn global_cache_root_path(&self) -> &Path { + self.npm_cache.root_dir_path() + } + + pub fn global_cache_root_url(&self) -> &Url { + self.npm_cache.root_dir_url() } } @@ -591,22 +624,6 @@ impl NpmResolver for ManagedCliNpmResolver { log::debug!("Resolved {} from {} to {}", name, referrer, path.display()); Ok(path) } - - fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool { - let root_dir_url = self.fs_resolver.root_dir_url(); - debug_assert!(root_dir_url.as_str().ends_with('/')); - specifier.as_ref().starts_with(root_dir_url.as_str()) - } -} - -impl NodeRequireResolver for ManagedCliNpmResolver { - fn ensure_read_permission<'a>( - &self, - permissions: &mut dyn NodePermissions, - path: &'a Path, - ) -> Result, AnyError> { - self.fs_resolver.ensure_read_permission(permissions, path) - } } impl NpmProcessStateProvider for ManagedCliNpmResolver { @@ -623,10 +640,6 @@ impl CliNpmResolver for ManagedCliNpmResolver { self } - fn into_require_resolver(self: Arc) -> Arc { - self - } - fn into_process_state_provider( self: Arc, ) -> Arc { @@ -687,6 +700,14 @@ impl CliNpmResolver for ManagedCliNpmResolver { .map_err(ResolvePkgFolderFromDenoReqError::Managed) } + fn ensure_read_permission<'a>( + &self, + permissions: &mut dyn NodePermissions, + path: &'a Path, + ) -> Result, AnyError> { + self.fs_resolver.ensure_read_permission(permissions, path) + } + fn check_state_hash(&self) -> Option { // We could go further and check all the individual // npm packages, but that's probably overkill. diff --git a/cli/npm/managed/resolvers/common.rs b/cli/npm/managed/resolvers/common.rs index 867bb4168a..590f8fb25c 100644 --- a/cli/npm/managed/resolvers/common.rs +++ b/cli/npm/managed/resolvers/common.rs @@ -17,7 +17,6 @@ use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::futures; use deno_core::futures::StreamExt; -use deno_core::url::Url; use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageId; use deno_npm::NpmResolutionPackage; @@ -30,9 +29,6 @@ use crate::npm::managed::cache::TarballCache; /// Part of the resolution that interacts with the file system. #[async_trait(?Send)] pub trait NpmPackageFsResolver: Send + Sync { - /// Specifier for the root directory. - fn root_dir_url(&self) -> &Url; - /// The local node_modules folder if it is applicable to the implementation. fn node_modules_path(&self) -> Option<&Path>; diff --git a/cli/npm/managed/resolvers/global.rs b/cli/npm/managed/resolvers/global.rs index 5be315e992..f0193e78e9 100644 --- a/cli/npm/managed/resolvers/global.rs +++ b/cli/npm/managed/resolvers/global.rs @@ -11,7 +11,6 @@ use crate::colors; use async_trait::async_trait; use deno_ast::ModuleSpecifier; use deno_core::error::AnyError; -use deno_core::url::Url; use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageId; use deno_npm::NpmResolutionPackage; @@ -56,7 +55,7 @@ impl GlobalNpmPackageResolver { Self { registry_read_permission_checker: RegistryReadPermissionChecker::new( fs, - cache.root_folder(), + cache.root_dir_path().to_path_buf(), ), cache, tarball_cache, @@ -69,10 +68,6 @@ impl GlobalNpmPackageResolver { #[async_trait(?Send)] impl NpmPackageFsResolver for GlobalNpmPackageResolver { - fn root_dir_url(&self) -> &Url { - self.cache.root_dir_url() - } - fn node_modules_path(&self) -> Option<&Path> { None } diff --git a/cli/npm/managed/resolvers/local.rs b/cli/npm/managed/resolvers/local.rs index 54f7576ade..0968be8a7e 100644 --- a/cli/npm/managed/resolvers/local.rs +++ b/cli/npm/managed/resolvers/local.rs @@ -155,10 +155,6 @@ impl LocalNpmPackageResolver { #[async_trait(?Send)] impl NpmPackageFsResolver for LocalNpmPackageResolver { - fn root_dir_url(&self) -> &Url { - &self.root_node_modules_url - } - fn node_modules_path(&self) -> Option<&Path> { Some(self.root_node_modules_path.as_ref()) } diff --git a/cli/npm/mod.rs b/cli/npm/mod.rs index 3971b1a0b3..0d434ca27f 100644 --- a/cli/npm/mod.rs +++ b/cli/npm/mod.rs @@ -4,6 +4,7 @@ mod byonm; mod common; mod managed; +use std::borrow::Cow; use std::path::Path; use std::path::PathBuf; use std::sync::Arc; @@ -15,13 +16,16 @@ use deno_core::error::AnyError; use deno_core::serde_json; use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::registry::NpmPackageInfo; +use deno_resolver::npm::ByonmInNpmPackageChecker; use deno_resolver::npm::ByonmNpmResolver; use deno_resolver::npm::ByonmResolvePkgFolderFromDenoReqError; -use deno_runtime::deno_node::NodeRequireResolver; +use deno_runtime::deno_node::NodePermissions; use deno_runtime::ops::process::NpmProcessStateProvider; use deno_semver::package::PackageNv; use deno_semver::package::PackageReq; use managed::cache::registry_info::get_package_url; +use managed::create_managed_in_npm_pkg_checker; +use node_resolver::InNpmPackageChecker; use node_resolver::NpmResolver; use thiserror::Error; @@ -29,7 +33,8 @@ use crate::file_fetcher::FileFetcher; pub use self::byonm::CliByonmNpmResolver; pub use self::byonm::CliByonmNpmResolverCreateOptions; -pub use self::managed::CliNpmResolverManagedCreateOptions; +pub use self::managed::CliManagedInNpmPkgCheckerCreateOptions; +pub use self::managed::CliManagedNpmResolverCreateOptions; pub use self::managed::CliNpmResolverManagedSnapshotOption; pub use self::managed::ManagedCliNpmResolver; @@ -42,7 +47,7 @@ pub enum ResolvePkgFolderFromDenoReqError { } pub enum CliNpmResolverCreateOptions { - Managed(CliNpmResolverManagedCreateOptions), + Managed(CliManagedNpmResolverCreateOptions), Byonm(CliByonmNpmResolverCreateOptions), } @@ -68,6 +73,22 @@ pub async fn create_cli_npm_resolver( } } +pub enum CreateInNpmPkgCheckerOptions<'a> { + Managed(CliManagedInNpmPkgCheckerCreateOptions<'a>), + Byonm, +} + +pub fn create_in_npm_pkg_checker( + options: CreateInNpmPkgCheckerOptions, +) -> Arc { + match options { + CreateInNpmPkgCheckerOptions::Managed(options) => { + create_managed_in_npm_pkg_checker(options) + } + CreateInNpmPkgCheckerOptions::Byonm => Arc::new(ByonmInNpmPackageChecker), + } +} + pub enum InnerCliNpmResolverRef<'a> { Managed(&'a ManagedCliNpmResolver), #[allow(dead_code)] @@ -76,7 +97,6 @@ pub enum InnerCliNpmResolverRef<'a> { pub trait CliNpmResolver: NpmResolver { fn into_npm_resolver(self: Arc) -> Arc; - fn into_require_resolver(self: Arc) -> Arc; fn into_process_state_provider( self: Arc, ) -> Arc; @@ -107,6 +127,12 @@ pub trait CliNpmResolver: NpmResolver { referrer: &ModuleSpecifier, ) -> Result; + fn ensure_read_permission<'a>( + &self, + permissions: &mut dyn NodePermissions, + path: &'a Path, + ) -> Result, AnyError>; + /// Returns a hash returning the state of the npm resolver /// or `None` if the state currently can't be determined. fn check_state_hash(&self) -> Option; diff --git a/cli/resolver.rs b/cli/resolver.rs index 84c671268a..710b975093 100644 --- a/cli/resolver.rs +++ b/cli/resolver.rs @@ -4,6 +4,7 @@ use async_trait::async_trait; use dashmap::DashMap; use dashmap::DashSet; use deno_ast::MediaType; +use deno_ast::ModuleKind; use deno_config::workspace::MappedResolution; use deno_config::workspace::MappedResolutionDiagnostic; use deno_config::workspace::MappedResolutionError; @@ -11,6 +12,7 @@ use deno_config::workspace::WorkspaceResolver; use deno_core::anyhow::anyhow; use deno_core::anyhow::Context; use deno_core::error::AnyError; +use deno_core::url::Url; use deno_core::ModuleSourceCode; use deno_core::ModuleSpecifier; use deno_graph::source::ResolutionMode; @@ -29,6 +31,7 @@ use deno_runtime::deno_fs; use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_node::is_builtin_node_module; use deno_runtime::deno_node::NodeResolver; +use deno_runtime::deno_node::PackageJsonResolver; use deno_semver::npm::NpmPackageReqReference; use deno_semver::package::PackageReq; use node_resolver::errors::ClosestPkgJsonError; @@ -38,21 +41,22 @@ use node_resolver::errors::PackageFolderResolveErrorKind; use node_resolver::errors::PackageFolderResolveIoError; use node_resolver::errors::PackageNotFoundError; use node_resolver::errors::PackageResolveErrorKind; -use node_resolver::errors::UrlToNodeResolutionError; +use node_resolver::errors::PackageSubpathResolveError; +use node_resolver::InNpmPackageChecker; use node_resolver::NodeModuleKind; use node_resolver::NodeResolution; use node_resolver::NodeResolutionMode; -use node_resolver::PackageJson; +use std::borrow::Cow; use std::path::Path; use std::path::PathBuf; use std::sync::Arc; +use thiserror::Error; use crate::args::JsxImportSourceConfig; use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS; use crate::node::CliNodeCodeTranslator; use crate::npm::CliNpmResolver; use crate::npm::InnerCliNpmResolverRef; -use crate::util::path::specifier_has_extension; use crate::util::sync::AtomicFlag; use crate::util::text_encoding::from_utf8_lossy_owned; @@ -104,36 +108,32 @@ impl deno_resolver::fs::DenoResolverFs for CliDenoResolverFs { #[derive(Debug)] pub struct CliNodeResolver { - cjs_resolutions: Arc, + cjs_tracker: Arc, fs: Arc, + in_npm_pkg_checker: Arc, node_resolver: Arc, npm_resolver: Arc, } impl CliNodeResolver { pub fn new( - cjs_resolutions: Arc, + cjs_tracker: Arc, fs: Arc, + in_npm_pkg_checker: Arc, node_resolver: Arc, npm_resolver: Arc, ) -> Self { Self { - cjs_resolutions, + cjs_tracker, fs, + in_npm_pkg_checker, node_resolver, npm_resolver, } } pub fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool { - self.npm_resolver.in_npm_package(specifier) - } - - pub fn get_closest_package_json( - &self, - referrer: &ModuleSpecifier, - ) -> Result>, ClosestPkgJsonError> { - self.node_resolver.get_closest_package_json(referrer) + self.in_npm_pkg_checker.in_npm_package(specifier) } pub fn resolve_if_for_npm_pkg( @@ -153,8 +153,7 @@ impl CliNodeResolver { | NodeResolveErrorKind::UnsupportedEsmUrlScheme(_) | NodeResolveErrorKind::DataUrlReferrer(_) | NodeResolveErrorKind::TypesNotFound(_) - | NodeResolveErrorKind::FinalizeResolution(_) - | NodeResolveErrorKind::UrlToNodeResolution(_) => Err(err.into()), + | NodeResolveErrorKind::FinalizeResolution(_) => Err(err.into()), NodeResolveErrorKind::PackageResolve(err) => { let err = err.into_kind(); match err { @@ -216,7 +215,11 @@ impl CliNodeResolver { referrer: &ModuleSpecifier, mode: NodeResolutionMode, ) -> Result { - let referrer_kind = if self.cjs_resolutions.is_known_cjs(referrer) { + let referrer_kind = if self + .cjs_tracker + .is_maybe_cjs(referrer, MediaType::from_specifier(referrer)) + .map_err(|err| NodeResolveErrorKind::PackageResolve(err.into()))? + { NodeModuleKind::Cjs } else { NodeModuleKind::Esm @@ -226,7 +229,7 @@ impl CliNodeResolver { self .node_resolver .resolve(specifier, referrer, referrer_kind, mode)?; - Ok(self.handle_node_resolution(res)) + Ok(res) } pub fn resolve_req_reference( @@ -234,7 +237,7 @@ impl CliNodeResolver { req_ref: &NpmPackageReqReference, referrer: &ModuleSpecifier, mode: NodeResolutionMode, - ) -> Result { + ) -> Result { self.resolve_req_with_sub_path( req_ref.req(), req_ref.sub_path(), @@ -249,7 +252,7 @@ impl CliNodeResolver { sub_path: Option<&str>, referrer: &ModuleSpecifier, mode: NodeResolutionMode, - ) -> Result { + ) -> Result { let package_folder = self .npm_resolver .resolve_pkg_folder_from_deno_module_req(req, referrer)?; @@ -260,7 +263,7 @@ impl CliNodeResolver { mode, ); match resolution_result { - Ok(resolution) => Ok(resolution), + Ok(url) => Ok(url), Err(err) => { if self.npm_resolver.as_byonm().is_some() { let package_json_path = package_folder.join("package.json"); @@ -271,7 +274,7 @@ impl CliNodeResolver { )); } } - Err(err) + Err(err.into()) } } } @@ -282,16 +285,13 @@ impl CliNodeResolver { sub_path: Option<&str>, maybe_referrer: Option<&ModuleSpecifier>, mode: NodeResolutionMode, - ) -> Result { - let res = self - .node_resolver - .resolve_package_subpath_from_deno_module( - package_folder, - sub_path, - maybe_referrer, - mode, - )?; - Ok(self.handle_node_resolution(res)) + ) -> Result { + self.node_resolver.resolve_package_subpath_from_deno_module( + package_folder, + sub_path, + maybe_referrer, + mode, + ) } pub fn handle_if_in_node_modules( @@ -306,71 +306,45 @@ impl CliNodeResolver { // so canoncalize then check if it's in the node_modules directory. // If so, check if we need to store this specifier as being a CJS // resolution. - let specifier = - crate::node::resolve_specifier_into_node_modules(specifier); - if self.in_npm_package(&specifier) { - let resolution = - self.node_resolver.url_to_node_resolution(specifier)?; - let resolution = self.handle_node_resolution(resolution); - return Ok(Some(resolution.into_url())); - } + let specifier = crate::node::resolve_specifier_into_node_modules( + specifier, + self.fs.as_ref(), + ); + return Ok(Some(specifier)); } Ok(None) } - - pub fn url_to_node_resolution( - &self, - specifier: ModuleSpecifier, - ) -> Result { - self.node_resolver.url_to_node_resolution(specifier) - } - - fn handle_node_resolution( - &self, - resolution: NodeResolution, - ) -> NodeResolution { - if let NodeResolution::CommonJs(specifier) = &resolution { - // remember that this was a common js resolution - self.mark_cjs_resolution(specifier.clone()); - } - resolution - } - - pub fn mark_cjs_resolution(&self, specifier: ModuleSpecifier) { - self.cjs_resolutions.insert(specifier); - } } -// todo(dsherret): move to module_loader.rs +#[derive(Debug, Error)] +#[error("{media_type} files are not supported in npm packages: {specifier}")] +pub struct NotSupportedKindInNpmError { + pub media_type: MediaType, + pub specifier: Url, +} + +// todo(dsherret): move to module_loader.rs (it seems to be here due to use in standalone) #[derive(Clone)] pub struct NpmModuleLoader { - cjs_resolutions: Arc, - node_code_translator: Arc, + cjs_tracker: Arc, fs: Arc, - node_resolver: Arc, + node_code_translator: Arc, } impl NpmModuleLoader { pub fn new( - cjs_resolutions: Arc, - node_code_translator: Arc, + cjs_tracker: Arc, fs: Arc, - node_resolver: Arc, + node_code_translator: Arc, ) -> Self { Self { - cjs_resolutions, + cjs_tracker, node_code_translator, fs, - node_resolver, } } - pub fn if_in_npm_package(&self, specifier: &ModuleSpecifier) -> bool { - self.node_resolver.in_npm_package(specifier) - || self.cjs_resolutions.is_known_cjs(specifier) - } - pub async fn load( &self, specifier: &ModuleSpecifier, @@ -413,20 +387,30 @@ impl NpmModuleLoader { } })?; - let code = if self.cjs_resolutions.is_known_cjs(specifier) { + let media_type = MediaType::from_specifier(specifier); + if media_type.is_emittable() { + return Err(AnyError::from(NotSupportedKindInNpmError { + media_type, + specifier: specifier.clone(), + })); + } + + let code = if self.cjs_tracker.is_maybe_cjs(specifier, media_type)? { // translate cjs to esm if it's cjs and inject node globals let code = from_utf8_lossy_owned(code); ModuleSourceCode::String( self .node_code_translator - .translate_cjs_to_esm(specifier, Some(code)) + .translate_cjs_to_esm(specifier, Some(Cow::Owned(code))) .await? + .into_owned() .into(), ) } else { // esm and json code is untouched ModuleSourceCode::Bytes(code.into_boxed_slice().into()) }; + Ok(ModuleCodeStringSource { code, found_url: specifier.clone(), @@ -435,21 +419,165 @@ impl NpmModuleLoader { } } +pub struct CjsTrackerOptions { + pub unstable_detect_cjs: bool, +} + /// Keeps track of what module specifiers were resolved as CJS. -#[derive(Debug, Default)] -pub struct CjsResolutionStore(DashSet); +/// +/// Modules that are `.js` or `.ts` are only known to be CJS or +/// ESM after they're loaded based on their contents. So these files +/// will be "maybe CJS" until they're loaded. +#[derive(Debug)] +pub struct CjsTracker { + in_npm_pkg_checker: Arc, + pkg_json_resolver: Arc, + unstable_detect_cjs: bool, + known: DashMap, +} -impl CjsResolutionStore { - pub fn is_known_cjs(&self, specifier: &ModuleSpecifier) -> bool { - if specifier.scheme() != "file" { - return false; +impl CjsTracker { + pub fn new( + in_npm_pkg_checker: Arc, + pkg_json_resolver: Arc, + options: CjsTrackerOptions, + ) -> Self { + Self { + in_npm_pkg_checker, + pkg_json_resolver, + unstable_detect_cjs: options.unstable_detect_cjs, + known: Default::default(), } - - specifier_has_extension(specifier, "cjs") || self.0.contains(specifier) } - pub fn insert(&self, specifier: ModuleSpecifier) { - self.0.insert(specifier); + /// Checks whether the file might be treated as CJS, but it's not for sure + /// yet because the source hasn't been loaded to see whether it contains + /// imports or exports. + pub fn is_maybe_cjs( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + ) -> Result { + self.treat_as_cjs_with_is_script(specifier, media_type, None) + } + + /// Gets whether the file is CJS. If true, this is for sure + /// cjs because `is_script` is provided. + /// + /// `is_script` should be `true` when the contents of the file at the + /// provided specifier are known to be a script and not an ES module. + pub fn is_cjs_with_known_is_script( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + is_script: bool, + ) -> Result { + self.treat_as_cjs_with_is_script(specifier, media_type, Some(is_script)) + } + + fn treat_as_cjs_with_is_script( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + is_script: Option, + ) -> Result { + let kind = match self + .get_known_kind_with_is_script(specifier, media_type, is_script) + { + Some(kind) => kind, + None => self.check_based_on_pkg_json(specifier)?, + }; + Ok(kind.is_cjs()) + } + + pub fn get_known_kind( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + ) -> Option { + self.get_known_kind_with_is_script(specifier, media_type, None) + } + + fn get_known_kind_with_is_script( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + is_script: Option, + ) -> Option { + if specifier.scheme() != "file" { + return Some(ModuleKind::Esm); + } + + match media_type { + MediaType::Mts | MediaType::Mjs | MediaType::Dmts => Some(ModuleKind::Esm), + MediaType::Cjs | MediaType::Cts | MediaType::Dcts => Some(ModuleKind::Cjs), + MediaType::Dts => { + // dts files are always determined based on the package.json because + // they contain imports/exports even when considered CJS + if let Some(value) = self.known.get(specifier).map(|v| *v) { + Some(value) + } else { + let value = self.check_based_on_pkg_json(specifier).ok(); + if let Some(value) = value { + self.known.insert(specifier.clone(), value); + } + Some(value.unwrap_or(ModuleKind::Esm)) + } + } + MediaType::Wasm | + MediaType::Json => Some(ModuleKind::Esm), + MediaType::JavaScript + | MediaType::Jsx + | MediaType::TypeScript + | MediaType::Tsx + // treat these as unknown + | MediaType::Css + | MediaType::SourceMap + | MediaType::Unknown => { + if let Some(value) = self.known.get(specifier).map(|v| *v) { + if value.is_cjs() && is_script == Some(false) { + // we now know this is actually esm + self.known.insert(specifier.clone(), ModuleKind::Esm); + Some(ModuleKind::Esm) + } else { + Some(value) + } + } else if is_script == Some(false) { + // we know this is esm + self.known.insert(specifier.clone(), ModuleKind::Esm); + Some(ModuleKind::Esm) + } else { + None + } + } + } + } + + fn check_based_on_pkg_json( + &self, + specifier: &ModuleSpecifier, + ) -> Result { + if self.in_npm_pkg_checker.in_npm_package(specifier) { + if let Some(pkg_json) = + self.pkg_json_resolver.get_closest_package_json(specifier)? + { + let is_file_location_cjs = pkg_json.typ != "module"; + Ok(ModuleKind::from_is_cjs(is_file_location_cjs)) + } else { + Ok(ModuleKind::Cjs) + } + } else if self.unstable_detect_cjs { + if let Some(pkg_json) = + self.pkg_json_resolver.get_closest_package_json(specifier)? + { + let is_cjs_type = pkg_json.typ == "commonjs"; + Ok(ModuleKind::from_is_cjs(is_cjs_type)) + } else { + Ok(ModuleKind::Esm) + } + } else { + Ok(ModuleKind::Esm) + } } } @@ -633,8 +761,7 @@ impl Resolver for CliGraphResolver { Some(referrer), to_node_mode(mode), ) - .map_err(ResolveError::Other) - .map(|res| res.into_url()), + .map_err(|e| ResolveError::Other(e.into())), MappedResolution::PackageJson { dep_result, alias, @@ -665,19 +792,17 @@ impl Resolver for CliGraphResolver { ) .map_err(|e| ResolveError::Other(e.into())) .and_then(|pkg_folder| { - Ok( - self - .node_resolver - .as_ref() - .unwrap() - .resolve_package_sub_path_from_deno_module( - pkg_folder, - sub_path.as_deref(), - Some(referrer), - to_node_mode(mode), - )? - .into_url(), - ) + self + .node_resolver + .as_ref() + .unwrap() + .resolve_package_sub_path_from_deno_module( + pkg_folder, + sub_path.as_deref(), + Some(referrer), + to_node_mode(mode), + ) + .map_err(|e| ResolveError::Other(e.into())) }), }) } @@ -717,23 +842,20 @@ impl Resolver for CliGraphResolver { npm_req_ref.req(), ) { - return Ok( - node_resolver - .resolve_package_sub_path_from_deno_module( - pkg_folder, - npm_req_ref.sub_path(), - Some(referrer), - to_node_mode(mode), - )? - .into_url(), - ); + return node_resolver + .resolve_package_sub_path_from_deno_module( + pkg_folder, + npm_req_ref.sub_path(), + Some(referrer), + to_node_mode(mode), + ) + .map_err(|e| ResolveError::Other(e.into())); } // do npm resolution for byonm if is_byonm { return node_resolver .resolve_req_reference(&npm_req_ref, referrer, to_node_mode(mode)) - .map(|res| res.into_url()) .map_err(|err| err.into()); } } @@ -751,9 +873,7 @@ impl Resolver for CliGraphResolver { .map_err(ResolveError::Other)?; if let Some(res) = maybe_resolution { match res { - NodeResolution::Esm(url) | NodeResolution::CommonJs(url) => { - return Ok(url) - } + NodeResolution::Module(url) => return Ok(url), NodeResolution::BuiltIn(_) => { // don't resolve bare specifiers for built-in modules via node resolution } diff --git a/cli/standalone/binary.rs b/cli/standalone/binary.rs index f41f3003ff..9e26512268 100644 --- a/cli/standalone/binary.rs +++ b/cli/standalone/binary.rs @@ -21,6 +21,7 @@ use std::process::Command; use std::sync::Arc; use deno_ast::MediaType; +use deno_ast::ModuleKind; use deno_ast::ModuleSpecifier; use deno_config::workspace::PackageJsonDepResolution; use deno_config::workspace::ResolverWorkspaceJsrPackage; @@ -67,6 +68,7 @@ use crate::file_fetcher::FileFetcher; use crate::http_util::HttpClientProvider; use crate::npm::CliNpmResolver; use crate::npm::InnerCliNpmResolverRef; +use crate::resolver::CjsTracker; use crate::shared::ReleaseChannel; use crate::standalone::virtual_fs::VfsEntry; use crate::util::archive; @@ -257,6 +259,10 @@ impl StandaloneModules { } } + pub fn has_file(&self, path: &Path) -> bool { + self.vfs.file_entry(path).is_ok() + } + pub fn read<'a>( &'a self, specifier: &'a ModuleSpecifier, @@ -353,6 +359,7 @@ pub fn extract_standalone( } pub struct DenoCompileBinaryWriter<'a> { + cjs_tracker: &'a CjsTracker, deno_dir: &'a DenoDir, emitter: &'a Emitter, file_fetcher: &'a FileFetcher, @@ -365,6 +372,7 @@ pub struct DenoCompileBinaryWriter<'a> { impl<'a> DenoCompileBinaryWriter<'a> { #[allow(clippy::too_many_arguments)] pub fn new( + cjs_tracker: &'a CjsTracker, deno_dir: &'a DenoDir, emitter: &'a Emitter, file_fetcher: &'a FileFetcher, @@ -374,6 +382,7 @@ impl<'a> DenoCompileBinaryWriter<'a> { npm_system_info: NpmSystemInfo, ) -> Self { Self { + cjs_tracker, deno_dir, emitter, file_fetcher, @@ -599,19 +608,21 @@ impl<'a> DenoCompileBinaryWriter<'a> { } let (maybe_source, media_type) = match module { deno_graph::Module::Js(m) => { - // todo(https://github.com/denoland/deno_media_type/pull/12): use is_emittable() - let is_emittable = matches!( - m.media_type, - MediaType::TypeScript - | MediaType::Mts - | MediaType::Cts - | MediaType::Jsx - | MediaType::Tsx - ); - let source = if is_emittable { + let source = if m.media_type.is_emittable() { + let is_cjs = self.cjs_tracker.is_cjs_with_known_is_script( + &m.specifier, + m.media_type, + m.is_script, + )?; + let module_kind = ModuleKind::from_is_cjs(is_cjs); let source = self .emitter - .emit_parsed_source(&m.specifier, m.media_type, &m.source) + .emit_parsed_source( + &m.specifier, + m.media_type, + module_kind, + &m.source, + ) .await?; source.into_bytes() } else { @@ -745,8 +756,9 @@ impl<'a> DenoCompileBinaryWriter<'a> { } else { // DO NOT include the user's registry url as it may contain credentials, // but also don't make this dependent on the registry url - let global_cache_root_path = npm_resolver.global_cache_root_folder(); - let mut builder = VfsBuilder::new(global_cache_root_path)?; + let global_cache_root_path = npm_resolver.global_cache_root_path(); + let mut builder = + VfsBuilder::new(global_cache_root_path.to_path_buf())?; let mut packages = npm_resolver.all_system_packages(&self.npm_system_info); packages.sort_by(|a, b| a.id.cmp(&b.id)); // determinism diff --git a/cli/standalone/mod.rs b/cli/standalone/mod.rs index 3a62b6ff96..85610f4c20 100644 --- a/cli/standalone/mod.rs +++ b/cli/standalone/mod.rs @@ -19,6 +19,7 @@ use deno_core::error::type_error; use deno_core::error::AnyError; use deno_core::futures::FutureExt; use deno_core::v8_set_flags; +use deno_core::FastString; use deno_core::FeatureChecker; use deno_core::ModuleLoader; use deno_core::ModuleSourceCode; @@ -30,7 +31,9 @@ use deno_npm::npm_rc::ResolvedNpmRc; use deno_package_json::PackageJsonDepValue; use deno_runtime::deno_fs; use deno_runtime::deno_node::create_host_defined_options; +use deno_runtime::deno_node::NodeRequireLoader; use deno_runtime::deno_node::NodeResolver; +use deno_runtime::deno_node::PackageJsonResolver; use deno_runtime::deno_permissions::Permissions; use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_tls::rustls::RootCertStore; @@ -43,6 +46,7 @@ use deno_semver::npm::NpmPackageReqReference; use import_map::parse_from_json; use node_resolver::analyze::NodeCodeTranslator; use node_resolver::NodeResolutionMode; +use serialization::DenoCompileModuleSource; use std::borrow::Cow; use std::rc::Rc; use std::sync::Arc; @@ -61,12 +65,18 @@ use crate::cache::NodeAnalysisCache; use crate::cache::RealDenoCacheEnv; use crate::http_util::HttpClientProvider; use crate::node::CliCjsCodeAnalyzer; +use crate::node::CliNodeCodeTranslator; use crate::npm::create_cli_npm_resolver; +use crate::npm::create_in_npm_pkg_checker; use crate::npm::CliByonmNpmResolverCreateOptions; +use crate::npm::CliManagedInNpmPkgCheckerCreateOptions; +use crate::npm::CliManagedNpmResolverCreateOptions; +use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolverCreateOptions; -use crate::npm::CliNpmResolverManagedCreateOptions; use crate::npm::CliNpmResolverManagedSnapshotOption; -use crate::resolver::CjsResolutionStore; +use crate::npm::CreateInNpmPkgCheckerOptions; +use crate::resolver::CjsTracker; +use crate::resolver::CjsTrackerOptions; use crate::resolver::CliDenoResolverFs; use crate::resolver::CliNodeResolver; use crate::resolver::NpmModuleLoader; @@ -75,7 +85,7 @@ use crate::util::progress_bar::ProgressBarStyle; use crate::util::v8::construct_v8_flags; use crate::worker::CliMainWorkerFactory; use crate::worker::CliMainWorkerOptions; -use crate::worker::ModuleLoaderAndSourceMapGetter; +use crate::worker::CreateModuleLoaderResult; use crate::worker::ModuleLoaderFactory; pub mod binary; @@ -91,10 +101,14 @@ use self::binary::Metadata; use self::file_system::DenoCompileFileSystem; struct SharedModuleLoaderState { + cjs_tracker: Arc, + fs: Arc, modules: StandaloneModules, - workspace_resolver: WorkspaceResolver, + node_code_translator: Arc, node_resolver: Arc, npm_module_loader: Arc, + npm_resolver: Arc, + workspace_resolver: WorkspaceResolver, } #[derive(Clone)] @@ -102,6 +116,12 @@ struct EmbeddedModuleLoader { shared: Arc, } +impl std::fmt::Debug for EmbeddedModuleLoader { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("EmbeddedModuleLoader").finish() + } +} + pub const MODULE_NOT_FOUND: &str = "Module not found"; pub const UNSUPPORTED_SCHEME: &str = "Unsupported scheme"; @@ -159,8 +179,7 @@ impl ModuleLoader for EmbeddedModuleLoader { sub_path.as_deref(), Some(&referrer), NodeResolutionMode::Execution, - )? - .into_url(), + )?, ), Ok(MappedResolution::PackageJson { dep_result, @@ -168,16 +187,14 @@ impl ModuleLoader for EmbeddedModuleLoader { alias, .. }) => match dep_result.as_ref().map_err(|e| AnyError::from(e.clone()))? { - PackageJsonDepValue::Req(req) => self - .shared - .node_resolver - .resolve_req_with_sub_path( + PackageJsonDepValue::Req(req) => { + self.shared.node_resolver.resolve_req_with_sub_path( req, sub_path.as_deref(), &referrer, NodeResolutionMode::Execution, ) - .map(|res| res.into_url()), + } PackageJsonDepValue::Workspace(version_req) => { let pkg_folder = self .shared @@ -195,8 +212,7 @@ impl ModuleLoader for EmbeddedModuleLoader { sub_path.as_deref(), Some(&referrer), NodeResolutionMode::Execution, - )? - .into_url(), + )?, ) } }, @@ -205,15 +221,11 @@ impl ModuleLoader for EmbeddedModuleLoader { if let Ok(reference) = NpmPackageReqReference::from_specifier(&specifier) { - return self - .shared - .node_resolver - .resolve_req_reference( - &reference, - &referrer, - NodeResolutionMode::Execution, - ) - .map(|res| res.into_url()); + return self.shared.node_resolver.resolve_req_reference( + &reference, + &referrer, + NodeResolutionMode::Execution, + ); } if specifier.scheme() == "jsr" { @@ -317,17 +329,72 @@ impl ModuleLoader for EmbeddedModuleLoader { match self.shared.modules.read(original_specifier) { Ok(Some(module)) => { + let media_type = module.media_type; let (module_specifier, module_type, module_source) = - module.into_for_v8(); - deno_core::ModuleLoadResponse::Sync(Ok( - deno_core::ModuleSource::new_with_redirect( - module_type, - module_source, - original_specifier, - module_specifier, - None, - ), - )) + module.into_parts(); + let is_maybe_cjs = match self + .shared + .cjs_tracker + .is_maybe_cjs(original_specifier, media_type) + { + Ok(is_maybe_cjs) => is_maybe_cjs, + Err(err) => { + return deno_core::ModuleLoadResponse::Sync(Err(type_error( + format!("{:?}", err), + ))); + } + }; + if is_maybe_cjs { + let original_specifier = original_specifier.clone(); + let module_specifier = module_specifier.clone(); + let shared = self.shared.clone(); + deno_core::ModuleLoadResponse::Async( + async move { + let source = match module_source { + DenoCompileModuleSource::String(string) => { + Cow::Borrowed(string) + } + DenoCompileModuleSource::Bytes(module_code_bytes) => { + match module_code_bytes { + Cow::Owned(bytes) => Cow::Owned( + crate::util::text_encoding::from_utf8_lossy_owned(bytes), + ), + Cow::Borrowed(bytes) => String::from_utf8_lossy(bytes), + } + } + }; + let source = shared + .node_code_translator + .translate_cjs_to_esm(&module_specifier, Some(source)) + .await?; + let module_source = match source { + Cow::Owned(source) => ModuleSourceCode::String(source.into()), + Cow::Borrowed(source) => { + ModuleSourceCode::String(FastString::from_static(source)) + } + }; + Ok(deno_core::ModuleSource::new_with_redirect( + module_type, + module_source, + &original_specifier, + &module_specifier, + None, + )) + } + .boxed_local(), + ) + } else { + let module_source = module_source.into_for_v8(); + deno_core::ModuleLoadResponse::Sync(Ok( + deno_core::ModuleSource::new_with_redirect( + module_type, + module_source, + original_specifier, + module_specifier, + None, + ), + )) + } } Ok(None) => deno_core::ModuleLoadResponse::Sync(Err(type_error( format!("{MODULE_NOT_FOUND}: {}", original_specifier), @@ -339,32 +406,61 @@ impl ModuleLoader for EmbeddedModuleLoader { } } +impl NodeRequireLoader for EmbeddedModuleLoader { + fn ensure_read_permission<'a>( + &self, + permissions: &mut dyn deno_runtime::deno_node::NodePermissions, + path: &'a std::path::Path, + ) -> Result, AnyError> { + if self.shared.modules.has_file(path) { + // allow reading if the file is in the snapshot + return Ok(Cow::Borrowed(path)); + } + + self + .shared + .npm_resolver + .ensure_read_permission(permissions, path) + } + + fn load_text_file_lossy( + &self, + path: &std::path::Path, + ) -> Result { + Ok(self.shared.fs.read_text_file_lossy_sync(path, None)?) + } +} + struct StandaloneModuleLoaderFactory { shared: Arc, } +impl StandaloneModuleLoaderFactory { + pub fn create_result(&self) -> CreateModuleLoaderResult { + let loader = Rc::new(EmbeddedModuleLoader { + shared: self.shared.clone(), + }); + CreateModuleLoaderResult { + module_loader: loader.clone(), + node_require_loader: loader, + } + } +} + impl ModuleLoaderFactory for StandaloneModuleLoaderFactory { fn create_for_main( &self, _root_permissions: PermissionsContainer, - ) -> ModuleLoaderAndSourceMapGetter { - ModuleLoaderAndSourceMapGetter { - module_loader: Rc::new(EmbeddedModuleLoader { - shared: self.shared.clone(), - }), - } + ) -> CreateModuleLoaderResult { + self.create_result() } fn create_for_worker( &self, _parent_permissions: PermissionsContainer, _permissions: PermissionsContainer, - ) -> ModuleLoaderAndSourceMapGetter { - ModuleLoaderAndSourceMapGetter { - module_loader: Rc::new(EmbeddedModuleLoader { - shared: self.shared.clone(), - }), - } + ) -> CreateModuleLoaderResult { + self.create_result() } } @@ -410,106 +506,155 @@ pub async fn run(data: StandaloneData) -> Result { let main_module = root_dir_url.join(&metadata.entrypoint_key).unwrap(); let npm_global_cache_dir = root_path.join(".deno_compile_node_modules"); let cache_setting = CacheSetting::Only; - let npm_resolver = match metadata.node_modules { + let pkg_json_resolver = Arc::new(PackageJsonResolver::new( + deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), + )); + let (in_npm_pkg_checker, npm_resolver) = match metadata.node_modules { Some(binary::NodeModules::Managed { node_modules_dir }) => { + // create an npmrc that uses the fake npm_registry_url to resolve packages + let npmrc = Arc::new(ResolvedNpmRc { + default_config: deno_npm::npm_rc::RegistryConfigWithUrl { + registry_url: npm_registry_url.clone(), + config: Default::default(), + }, + scopes: Default::default(), + registry_configs: Default::default(), + }); + let npm_cache_dir = Arc::new(NpmCacheDir::new( + &DenoCacheEnvFsAdapter(fs.as_ref()), + npm_global_cache_dir, + npmrc.get_all_known_registries_urls(), + )); let snapshot = npm_snapshot.unwrap(); let maybe_node_modules_path = node_modules_dir .map(|node_modules_dir| root_path.join(node_modules_dir)); - create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed( - CliNpmResolverManagedCreateOptions { - snapshot: CliNpmResolverManagedSnapshotOption::Specified(Some( - snapshot, - )), - maybe_lockfile: None, - fs: fs.clone(), - http_client_provider: http_client_provider.clone(), - npm_global_cache_dir, - cache_setting, - text_only_progress_bar: progress_bar, - maybe_node_modules_path, - npm_system_info: Default::default(), - npm_install_deps_provider: Arc::new( - // this is only used for installing packages, which isn't necessary with deno compile - NpmInstallDepsProvider::empty(), - ), - // create an npmrc that uses the fake npm_registry_url to resolve packages - npmrc: Arc::new(ResolvedNpmRc { - default_config: deno_npm::npm_rc::RegistryConfigWithUrl { - registry_url: npm_registry_url.clone(), - config: Default::default(), - }, - scopes: Default::default(), - registry_configs: Default::default(), - }), - lifecycle_scripts: Default::default(), - }, - )) - .await? + let in_npm_pkg_checker = + create_in_npm_pkg_checker(CreateInNpmPkgCheckerOptions::Managed( + CliManagedInNpmPkgCheckerCreateOptions { + root_cache_dir_url: npm_cache_dir.root_dir_url(), + maybe_node_modules_path: maybe_node_modules_path.as_deref(), + }, + )); + let npm_resolver = + create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed( + CliManagedNpmResolverCreateOptions { + snapshot: CliNpmResolverManagedSnapshotOption::Specified(Some( + snapshot, + )), + maybe_lockfile: None, + fs: fs.clone(), + http_client_provider: http_client_provider.clone(), + npm_cache_dir, + cache_setting, + text_only_progress_bar: progress_bar, + maybe_node_modules_path, + npm_system_info: Default::default(), + npm_install_deps_provider: Arc::new( + // this is only used for installing packages, which isn't necessary with deno compile + NpmInstallDepsProvider::empty(), + ), + npmrc, + lifecycle_scripts: Default::default(), + }, + )) + .await?; + (in_npm_pkg_checker, npm_resolver) } Some(binary::NodeModules::Byonm { root_node_modules_dir, }) => { let root_node_modules_dir = root_node_modules_dir.map(|p| vfs.root().join(p)); - create_cli_npm_resolver(CliNpmResolverCreateOptions::Byonm( - CliByonmNpmResolverCreateOptions { + let in_npm_pkg_checker = + create_in_npm_pkg_checker(CreateInNpmPkgCheckerOptions::Byonm); + let npm_resolver = create_cli_npm_resolver( + CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions { fs: CliDenoResolverFs(fs.clone()), + pkg_json_resolver: pkg_json_resolver.clone(), root_node_modules_dir, - }, - )) - .await? + }), + ) + .await?; + (in_npm_pkg_checker, npm_resolver) } None => { - create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed( - CliNpmResolverManagedCreateOptions { - snapshot: CliNpmResolverManagedSnapshotOption::Specified(None), - maybe_lockfile: None, - fs: fs.clone(), - http_client_provider: http_client_provider.clone(), - npm_global_cache_dir, - cache_setting, - text_only_progress_bar: progress_bar, - maybe_node_modules_path: None, - npm_system_info: Default::default(), - npm_install_deps_provider: Arc::new( - // this is only used for installing packages, which isn't necessary with deno compile - NpmInstallDepsProvider::empty(), - ), - // Packages from different registries are already inlined in the binary, - // so no need to create actual `.npmrc` configuration. - npmrc: create_default_npmrc(), - lifecycle_scripts: Default::default(), - }, - )) - .await? + // Packages from different registries are already inlined in the binary, + // so no need to create actual `.npmrc` configuration. + let npmrc = create_default_npmrc(); + let npm_cache_dir = Arc::new(NpmCacheDir::new( + &DenoCacheEnvFsAdapter(fs.as_ref()), + npm_global_cache_dir, + npmrc.get_all_known_registries_urls(), + )); + let in_npm_pkg_checker = + create_in_npm_pkg_checker(CreateInNpmPkgCheckerOptions::Managed( + CliManagedInNpmPkgCheckerCreateOptions { + root_cache_dir_url: npm_cache_dir.root_dir_url(), + maybe_node_modules_path: None, + }, + )); + let npm_resolver = + create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed( + CliManagedNpmResolverCreateOptions { + snapshot: CliNpmResolverManagedSnapshotOption::Specified(None), + maybe_lockfile: None, + fs: fs.clone(), + http_client_provider: http_client_provider.clone(), + npm_cache_dir, + cache_setting, + text_only_progress_bar: progress_bar, + maybe_node_modules_path: None, + npm_system_info: Default::default(), + npm_install_deps_provider: Arc::new( + // this is only used for installing packages, which isn't necessary with deno compile + NpmInstallDepsProvider::empty(), + ), + npmrc: create_default_npmrc(), + lifecycle_scripts: Default::default(), + }, + )) + .await?; + (in_npm_pkg_checker, npm_resolver) } }; let has_node_modules_dir = npm_resolver.root_node_modules_path().is_some(); let node_resolver = Arc::new(NodeResolver::new( deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), + in_npm_pkg_checker.clone(), npm_resolver.clone().into_npm_resolver(), + pkg_json_resolver.clone(), + )); + let cjs_tracker = Arc::new(CjsTracker::new( + in_npm_pkg_checker.clone(), + pkg_json_resolver.clone(), + CjsTrackerOptions { + unstable_detect_cjs: metadata.unstable_config.detect_cjs, + }, )); - let cjs_resolutions = Arc::new(CjsResolutionStore::default()); let cache_db = Caches::new(deno_dir_provider.clone()); let node_analysis_cache = NodeAnalysisCache::new(cache_db.node_analysis_db()); let cli_node_resolver = Arc::new(CliNodeResolver::new( - cjs_resolutions.clone(), + cjs_tracker.clone(), fs.clone(), + in_npm_pkg_checker.clone(), node_resolver.clone(), npm_resolver.clone(), )); let cjs_esm_code_analyzer = CliCjsCodeAnalyzer::new( node_analysis_cache, + cjs_tracker.clone(), fs.clone(), - cli_node_resolver.clone(), None, + false, ); let node_code_translator = Arc::new(NodeCodeTranslator::new( cjs_esm_code_analyzer, deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), + in_npm_pkg_checker, node_resolver.clone(), npm_resolver.clone().into_npm_resolver(), + pkg_json_resolver.clone(), )); let workspace_resolver = { let import_map = match metadata.workspace_resolver.import_map { @@ -562,15 +707,18 @@ pub async fn run(data: StandaloneData) -> Result { }; let module_loader_factory = StandaloneModuleLoaderFactory { shared: Arc::new(SharedModuleLoaderState { + cjs_tracker: cjs_tracker.clone(), + fs: fs.clone(), modules, - workspace_resolver, + node_code_translator: node_code_translator.clone(), node_resolver: cli_node_resolver.clone(), npm_module_loader: Arc::new(NpmModuleLoader::new( - cjs_resolutions.clone(), - node_code_translator, + cjs_tracker.clone(), fs.clone(), - cli_node_resolver, + node_code_translator, )), + npm_resolver: npm_resolver.clone(), + workspace_resolver, }), }; @@ -609,7 +757,6 @@ pub async fn run(data: StandaloneData) -> Result { }); let worker_factory = CliMainWorkerFactory::new( Arc::new(BlobStore::default()), - cjs_resolutions, // Code cache is not supported for standalone binary yet. None, feature_checker, @@ -620,6 +767,7 @@ pub async fn run(data: StandaloneData) -> Result { Box::new(module_loader_factory), node_resolver, npm_resolver, + pkg_json_resolver, root_cert_store_provider, permissions, StorageKeyResolver::empty(), @@ -635,7 +783,6 @@ pub async fn run(data: StandaloneData) -> Result { inspect_wait: false, strace_ops: None, is_inspecting: false, - is_npm_main: main_module.scheme() == "npm", skip_op_registration: true, location: metadata.location, argv0: NpmPackageReqReference::from_specifier(&main_module) @@ -652,7 +799,6 @@ pub async fn run(data: StandaloneData) -> Result { node_ipc: None, serve_port: None, serve_host: None, - unstable_detect_cjs: metadata.unstable_config.detect_cjs, }, ); diff --git a/cli/standalone/serialization.rs b/cli/standalone/serialization.rs index 7b63c584e7..a5eb649bfd 100644 --- a/cli/standalone/serialization.rs +++ b/cli/standalone/serialization.rs @@ -214,14 +214,13 @@ impl RemoteModulesStoreBuilder { } } -pub struct DenoCompileModuleData<'a> { - pub specifier: &'a Url, - pub media_type: MediaType, - pub data: Cow<'static, [u8]>, +pub enum DenoCompileModuleSource { + String(&'static str), + Bytes(Cow<'static, [u8]>), } -impl<'a> DenoCompileModuleData<'a> { - pub fn into_for_v8(self) -> (&'a Url, ModuleType, ModuleSourceCode) { +impl DenoCompileModuleSource { + pub fn into_for_v8(self) -> ModuleSourceCode { fn into_bytes(data: Cow<'static, [u8]>) -> ModuleSourceCode { ModuleSourceCode::Bytes(match data { Cow::Borrowed(d) => d.into(), @@ -229,16 +228,31 @@ impl<'a> DenoCompileModuleData<'a> { }) } - fn into_string_unsafe(data: Cow<'static, [u8]>) -> ModuleSourceCode { + match self { // todo(https://github.com/denoland/deno_core/pull/943): store whether // the string is ascii or not ahead of time so we can avoid the is_ascii() // check in FastString::from_static + Self::String(s) => ModuleSourceCode::String(FastString::from_static(s)), + Self::Bytes(b) => into_bytes(b), + } + } +} + +pub struct DenoCompileModuleData<'a> { + pub specifier: &'a Url, + pub media_type: MediaType, + pub data: Cow<'static, [u8]>, +} + +impl<'a> DenoCompileModuleData<'a> { + pub fn into_parts(self) -> (&'a Url, ModuleType, DenoCompileModuleSource) { + fn into_string_unsafe(data: Cow<'static, [u8]>) -> DenoCompileModuleSource { match data { - Cow::Borrowed(d) => ModuleSourceCode::String( + Cow::Borrowed(d) => DenoCompileModuleSource::String( // SAFETY: we know this is a valid utf8 string - unsafe { FastString::from_static(std::str::from_utf8_unchecked(d)) }, + unsafe { std::str::from_utf8_unchecked(d) }, ), - Cow::Owned(d) => ModuleSourceCode::Bytes(d.into_boxed_slice().into()), + Cow::Owned(d) => DenoCompileModuleSource::Bytes(Cow::Owned(d)), } } @@ -257,11 +271,14 @@ impl<'a> DenoCompileModuleData<'a> { (ModuleType::JavaScript, into_string_unsafe(self.data)) } MediaType::Json => (ModuleType::Json, into_string_unsafe(self.data)), - MediaType::Wasm => (ModuleType::Wasm, into_bytes(self.data)), - // just assume javascript if we made it here - MediaType::TsBuildInfo | MediaType::SourceMap | MediaType::Unknown => { - (ModuleType::JavaScript, into_bytes(self.data)) + MediaType::Wasm => { + (ModuleType::Wasm, DenoCompileModuleSource::Bytes(self.data)) } + // just assume javascript if we made it here + MediaType::Css | MediaType::SourceMap | MediaType::Unknown => ( + ModuleType::JavaScript, + DenoCompileModuleSource::Bytes(self.data), + ), }; (self.specifier, media_type, source) } @@ -551,7 +568,7 @@ fn serialize_media_type(media_type: MediaType) -> u8 { MediaType::Tsx => 10, MediaType::Json => 11, MediaType::Wasm => 12, - MediaType::TsBuildInfo => 13, + MediaType::Css => 13, MediaType::SourceMap => 14, MediaType::Unknown => 15, } @@ -572,7 +589,7 @@ fn deserialize_media_type(value: u8) -> Result { 10 => Ok(MediaType::Tsx), 11 => Ok(MediaType::Json), 12 => Ok(MediaType::Wasm), - 13 => Ok(MediaType::TsBuildInfo), + 13 => Ok(MediaType::Css), 14 => Ok(MediaType::SourceMap), 15 => Ok(MediaType::Unknown), _ => bail!("Unknown media type value: {}", value), diff --git a/cli/tools/bench/mod.rs b/cli/tools/bench/mod.rs index be5d0ad0e1..272d063355 100644 --- a/cli/tools/bench/mod.rs +++ b/cli/tools/bench/mod.rs @@ -193,7 +193,7 @@ async fn bench_specifier_inner( .await?; // We execute the main module as a side module so that import.meta.main is not set. - worker.execute_side_module_possibly_with_npm().await?; + worker.execute_side_module().await?; let mut worker = worker.into_main_worker(); diff --git a/cli/tools/check.rs b/cli/tools/check.rs index 7edb392d48..d880278884 100644 --- a/cli/tools/check.rs +++ b/cli/tools/check.rs @@ -32,6 +32,7 @@ use crate::graph_util::ModuleGraphBuilder; use crate::npm::CliNpmResolver; use crate::tsc; use crate::tsc::Diagnostics; +use crate::tsc::TypeCheckingCjsTracker; use crate::util::extract; use crate::util::path::to_percent_decoded_str; @@ -99,6 +100,7 @@ pub struct CheckOptions { pub struct TypeChecker { caches: Arc, + cjs_tracker: Arc, cli_options: Arc, module_graph_builder: Arc, node_resolver: Arc, @@ -108,6 +110,7 @@ pub struct TypeChecker { impl TypeChecker { pub fn new( caches: Arc, + cjs_tracker: Arc, cli_options: Arc, module_graph_builder: Arc, node_resolver: Arc, @@ -115,6 +118,7 @@ impl TypeChecker { ) -> Self { Self { caches, + cjs_tracker, cli_options, module_graph_builder, node_resolver, @@ -244,6 +248,7 @@ impl TypeChecker { graph: graph.clone(), hash_data, maybe_npm: Some(tsc::RequestNpmState { + cjs_tracker: self.cjs_tracker.clone(), node_resolver: self.node_resolver.clone(), npm_resolver: self.npm_resolver.clone(), }), @@ -346,7 +351,7 @@ fn get_check_hash( } } MediaType::Json - | MediaType::TsBuildInfo + | MediaType::Css | MediaType::SourceMap | MediaType::Wasm | MediaType::Unknown => continue, @@ -428,7 +433,7 @@ fn get_tsc_roots( } MediaType::Json | MediaType::Wasm - | MediaType::TsBuildInfo + | MediaType::Css | MediaType::SourceMap | MediaType::Unknown => None, }, @@ -536,7 +541,7 @@ fn has_ts_check(media_type: MediaType, file_text: &str) -> bool { | MediaType::Tsx | MediaType::Json | MediaType::Wasm - | MediaType::TsBuildInfo + | MediaType::Css | MediaType::SourceMap | MediaType::Unknown => false, } diff --git a/cli/tools/compile.rs b/cli/tools/compile.rs index 5a4a938bb9..b3e9993379 100644 --- a/cli/tools/compile.rs +++ b/cli/tools/compile.rs @@ -53,16 +53,6 @@ pub async fn compile( ); } - if cli_options.unstable_detect_cjs() { - log::warn!( - concat!( - "{} --unstable-detect-cjs is not properly supported in deno compile. ", - "The compiled executable may encounter runtime errors.", - ), - crate::colors::yellow("Warning"), - ); - } - let output_path = resolve_compile_executable_output_path( http_client, &compile_flags, diff --git a/cli/tools/coverage/mod.rs b/cli/tools/coverage/mod.rs index 48922f1449..f593332475 100644 --- a/cli/tools/coverage/mod.rs +++ b/cli/tools/coverage/mod.rs @@ -6,12 +6,12 @@ use crate::args::FileFlags; use crate::args::Flags; use crate::cdp; use crate::factory::CliFactory; -use crate::npm::CliNpmResolver; use crate::tools::fmt::format_json; use crate::tools::test::is_supported_test_path; use crate::util::text_encoding::source_map_from_code; use deno_ast::MediaType; +use deno_ast::ModuleKind; use deno_ast::ModuleSpecifier; use deno_config::glob::FileCollector; use deno_config::glob::FilePatterns; @@ -25,6 +25,7 @@ use deno_core::serde_json; use deno_core::sourcemap::SourceMap; use deno_core::url::Url; use deno_core::LocalInspectorSession; +use node_resolver::InNpmPackageChecker; use regex::Regex; use std::fs; use std::fs::File; @@ -461,7 +462,7 @@ fn filter_coverages( coverages: Vec, include: Vec, exclude: Vec, - npm_resolver: &dyn CliNpmResolver, + in_npm_pkg_checker: &dyn InNpmPackageChecker, ) -> Vec { let include: Vec = include.iter().map(|e| Regex::new(e).unwrap()).collect(); @@ -485,7 +486,7 @@ fn filter_coverages( || doc_test_re.is_match(e.url.as_str()) || Url::parse(&e.url) .ok() - .map(|url| npm_resolver.in_npm_package(&url)) + .map(|url| in_npm_pkg_checker.in_npm_package(&url)) .unwrap_or(false); let is_included = include.iter().any(|p| p.is_match(&e.url)); @@ -496,7 +497,7 @@ fn filter_coverages( .collect::>() } -pub async fn cover_files( +pub fn cover_files( flags: Arc, coverage_flags: CoverageFlags, ) -> Result<(), AnyError> { @@ -506,9 +507,10 @@ pub async fn cover_files( let factory = CliFactory::from_flags(flags); let cli_options = factory.cli_options()?; - let npm_resolver = factory.npm_resolver().await?; + let in_npm_pkg_checker = factory.in_npm_pkg_checker()?; let file_fetcher = factory.file_fetcher()?; let emitter = factory.emitter()?; + let cjs_tracker = factory.cjs_tracker()?; assert!(!coverage_flags.files.include.is_empty()); @@ -528,7 +530,7 @@ pub async fn cover_files( script_coverages, coverage_flags.include, coverage_flags.exclude, - npm_resolver.as_ref(), + in_npm_pkg_checker.as_ref(), ); if script_coverages.is_empty() { return Err(generic_error("No covered files included in the report")); @@ -585,6 +587,8 @@ pub async fn cover_files( let transpiled_code = match file.media_type { MediaType::JavaScript | MediaType::Unknown + | MediaType::Css + | MediaType::Wasm | MediaType::Cjs | MediaType::Mjs | MediaType::Json => None, @@ -594,7 +598,10 @@ pub async fn cover_files( | MediaType::Mts | MediaType::Cts | MediaType::Tsx => { - Some(match emitter.maybe_cached_emit(&file.specifier, &file.source) { + let module_kind = ModuleKind::from_is_cjs( + cjs_tracker.is_maybe_cjs(&file.specifier, file.media_type)?, + ); + Some(match emitter.maybe_cached_emit(&file.specifier, module_kind, &file.source) { Some(code) => code, None => { return Err(anyhow!( @@ -605,7 +612,7 @@ pub async fn cover_files( } }) } - MediaType::Wasm | MediaType::TsBuildInfo | MediaType::SourceMap => { + MediaType::SourceMap => { unreachable!() } }; diff --git a/cli/tools/doc.rs b/cli/tools/doc.rs index 5e18546a28..e33da4efb2 100644 --- a/cli/tools/doc.rs +++ b/cli/tools/doc.rs @@ -22,9 +22,9 @@ use deno_core::serde_json; use deno_doc as doc; use deno_doc::html::UrlResolveKind; use deno_graph::source::NullFileSystem; +use deno_graph::EsParser; use deno_graph::GraphKind; use deno_graph::ModuleAnalyzer; -use deno_graph::ModuleParser; use deno_graph::ModuleSpecifier; use doc::html::ShortPath; use doc::DocDiagnostic; @@ -37,7 +37,7 @@ const JSON_SCHEMA_VERSION: u8 = 1; async fn generate_doc_nodes_for_builtin_types( doc_flags: DocFlags, - parser: &dyn ModuleParser, + parser: &dyn EsParser, analyzer: &dyn ModuleAnalyzer, ) -> Result>, AnyError> { let source_file_specifier = @@ -96,7 +96,7 @@ pub async fn doc( let module_info_cache = factory.module_info_cache()?; let parsed_source_cache = factory.parsed_source_cache(); let capturing_parser = parsed_source_cache.as_capturing_parser(); - let analyzer = module_info_cache.as_module_analyzer(parsed_source_cache); + let analyzer = module_info_cache.as_module_analyzer(); let doc_nodes_by_url = match doc_flags.source_files { DocSourceFileFlag::Builtin => { diff --git a/cli/tools/registry/tar.rs b/cli/tools/registry/tar.rs index aca125e00b..6d1801ce69 100644 --- a/cli/tools/registry/tar.rs +++ b/cli/tools/registry/tar.rs @@ -120,7 +120,7 @@ fn resolve_content_maybe_unfurling( | MediaType::Unknown | MediaType::Json | MediaType::Wasm - | MediaType::TsBuildInfo => { + | MediaType::Css => { // not unfurlable data return Ok(data); } diff --git a/cli/tools/repl/session.rs b/cli/tools/repl/session.rs index 484664dae4..23b0f11ac5 100644 --- a/cli/tools/repl/session.rs +++ b/cli/tools/repl/session.rs @@ -25,6 +25,7 @@ use deno_ast::swc::visit::noop_visit_type; use deno_ast::swc::visit::Visit; use deno_ast::swc::visit::VisitWith; use deno_ast::ImportsNotUsedAsValues; +use deno_ast::ModuleKind; use deno_ast::ModuleSpecifier; use deno_ast::ParseDiagnosticsError; use deno_ast::ParsedSource; @@ -641,6 +642,10 @@ impl ReplSession { jsx_fragment_factory: self.jsx.frag_factory.clone(), jsx_import_source: self.jsx.import_source.clone(), var_decl_imports: true, + verbatim_module_syntax: false, + }, + &deno_ast::TranspileModuleOptions { + module_kind: Some(ModuleKind::Esm), }, &deno_ast::EmitOptions { source_map: deno_ast::SourceMapOption::None, @@ -651,7 +656,6 @@ impl ReplSession { }, )? .into_source() - .into_string()? .text; let value = self diff --git a/cli/tools/run/hmr.rs b/cli/tools/run/hmr.rs index 6ccf8e344b..6cebedd012 100644 --- a/cli/tools/run/hmr.rs +++ b/cli/tools/run/hmr.rs @@ -1,9 +1,11 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use crate::cdp; -use crate::emit::Emitter; -use crate::util::file_watcher::WatcherCommunicator; -use crate::util::file_watcher::WatcherRestartMode; +use std::collections::HashMap; +use std::path::PathBuf; +use std::sync::Arc; + +use deno_ast::MediaType; +use deno_ast::ModuleKind; use deno_core::error::generic_error; use deno_core::error::AnyError; use deno_core::futures::StreamExt; @@ -12,11 +14,14 @@ use deno_core::serde_json::{self}; use deno_core::url::Url; use deno_core::LocalInspectorSession; use deno_terminal::colors; -use std::collections::HashMap; -use std::path::PathBuf; -use std::sync::Arc; use tokio::select; +use crate::cdp; +use crate::emit::Emitter; +use crate::resolver::CjsTracker; +use crate::util::file_watcher::WatcherCommunicator; +use crate::util::file_watcher::WatcherRestartMode; + fn explain(status: &cdp::Status) -> &'static str { match status { cdp::Status::Ok => "OK", @@ -58,6 +63,7 @@ pub struct HmrRunner { session: LocalInspectorSession, watcher_communicator: Arc, script_ids: HashMap, + cjs_tracker: Arc, emitter: Arc, } @@ -139,7 +145,8 @@ impl crate::worker::HmrRunner for HmrRunner { }; let source_code = self.emitter.load_and_emit_for_hmr( - &module_url + &module_url, + ModuleKind::from_is_cjs(self.cjs_tracker.is_maybe_cjs(&module_url, MediaType::from_specifier(&module_url))?), ).await?; let mut tries = 1; @@ -172,12 +179,14 @@ impl crate::worker::HmrRunner for HmrRunner { impl HmrRunner { pub fn new( + cjs_tracker: Arc, emitter: Arc, session: LocalInspectorSession, watcher_communicator: Arc, ) -> Self { Self { session, + cjs_tracker, emitter, watcher_communicator, script_ids: HashMap::new(), diff --git a/cli/tools/test/mod.rs b/cli/tools/test/mod.rs index e81abad0b2..fa849614fa 100644 --- a/cli/tools/test/mod.rs +++ b/cli/tools/test/mod.rs @@ -631,7 +631,7 @@ async fn configure_main_worker( "Deno[Deno.internal].core.setLeakTracingEnabled(true);", )?; } - let res = worker.execute_side_module_possibly_with_npm().await; + let res = worker.execute_side_module().await; let mut worker = worker.into_main_worker(); match res { Ok(()) => Ok(()), diff --git a/cli/tsc/99_main_compiler.js b/cli/tsc/99_main_compiler.js index 6011dece76..bdc4340e33 100644 --- a/cli/tsc/99_main_compiler.js +++ b/cli/tsc/99_main_compiler.js @@ -845,6 +845,8 @@ delete Object.prototype.__proto__; jqueryMessage, "Cannot_find_name_0_Do_you_need_to_install_type_definitions_for_jQuery_Try_npm_i_save_dev_types_Slash_2592": jqueryMessage, + "Module_0_was_resolved_to_1_but_allowArbitraryExtensions_is_not_set_6263": + "Module '{0}' was resolved to '{1}', but importing these modules is not supported.", }; })()); diff --git a/cli/tsc/diagnostics.rs b/cli/tsc/diagnostics.rs index b0394ec177..d3795706eb 100644 --- a/cli/tsc/diagnostics.rs +++ b/cli/tsc/diagnostics.rs @@ -323,7 +323,7 @@ impl Diagnostics { // todo(dsherret): use a short lived cache to prevent parsing // source maps so often if let Ok(source_map) = - SourceMap::from_slice(&fast_check_module.source_map) + SourceMap::from_slice(fast_check_module.source_map.as_bytes()) { if let Some(start) = d.start.as_mut() { let maybe_token = source_map diff --git a/cli/tsc/mod.rs b/cli/tsc/mod.rs index de91889b62..dc7fc38f7a 100644 --- a/cli/tsc/mod.rs +++ b/cli/tsc/mod.rs @@ -3,9 +3,11 @@ use crate::args::TsConfig; use crate::args::TypeCheckMode; use crate::cache::FastInsecureHasher; +use crate::cache::ModuleInfoCache; use crate::node; use crate::npm::CliNpmResolver; use crate::npm::ResolvePkgFolderFromDenoReqError; +use crate::resolver::CjsTracker; use crate::util::checksum; use crate::util::path::mapped_specifier_for_tsc; @@ -32,13 +34,13 @@ use deno_graph::GraphKind; use deno_graph::Module; use deno_graph::ModuleGraph; use deno_graph::ResolutionResolved; +use deno_runtime::deno_fs; use deno_runtime::deno_node::NodeResolver; use deno_semver::npm::NpmPackageReqReference; use node_resolver::errors::NodeJsErrorCode; use node_resolver::errors::NodeJsErrorCoded; -use node_resolver::errors::ResolvePkgSubpathFromDenoModuleError; +use node_resolver::errors::PackageSubpathResolveError; use node_resolver::NodeModuleKind; -use node_resolver::NodeResolution; use node_resolver::NodeResolutionMode; use once_cell::sync::Lazy; use std::borrow::Cow; @@ -302,8 +304,76 @@ pub struct EmittedFile { pub media_type: MediaType, } +pub fn into_specifier_and_media_type( + specifier: Option, +) -> (ModuleSpecifier, MediaType) { + match specifier { + Some(specifier) => { + let media_type = MediaType::from_specifier(&specifier); + + (specifier, media_type) + } + None => ( + Url::parse("internal:///missing_dependency.d.ts").unwrap(), + MediaType::Dts, + ), + } +} + +#[derive(Debug)] +pub struct TypeCheckingCjsTracker { + cjs_tracker: Arc, + module_info_cache: Arc, +} + +impl TypeCheckingCjsTracker { + pub fn new( + cjs_tracker: Arc, + module_info_cache: Arc, + ) -> Self { + Self { + cjs_tracker, + module_info_cache, + } + } + + pub fn is_cjs( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + code: &Arc, + ) -> bool { + if let Some(module_kind) = + self.cjs_tracker.get_known_kind(specifier, media_type) + { + module_kind.is_cjs() + } else { + let maybe_is_script = self + .module_info_cache + .as_module_analyzer() + .analyze_sync(specifier, media_type, code) + .ok() + .map(|info| info.is_script); + maybe_is_script + .and_then(|is_script| { + self + .cjs_tracker + .is_cjs_with_known_is_script(specifier, media_type, is_script) + .ok() + }) + .unwrap_or_else(|| { + self + .cjs_tracker + .is_maybe_cjs(specifier, media_type) + .unwrap_or(false) + }) + } + } +} + #[derive(Debug)] pub struct RequestNpmState { + pub cjs_tracker: Arc, pub node_resolver: Arc, pub npm_resolver: Arc, } @@ -456,7 +526,7 @@ pub fn as_ts_script_kind(media_type: MediaType) -> i32 { MediaType::Tsx => 4, MediaType::Json => 6, MediaType::SourceMap - | MediaType::TsBuildInfo + | MediaType::Css | MediaType::Wasm | MediaType::Unknown => 0, } @@ -489,25 +559,22 @@ fn op_load_inner( ) -> Result, AnyError> { fn load_from_node_modules( specifier: &ModuleSpecifier, - node_resolver: Option<&NodeResolver>, + npm_state: Option<&RequestNpmState>, media_type: &mut MediaType, is_cjs: &mut bool, ) -> Result { *media_type = MediaType::from_specifier(specifier); - *is_cjs = node_resolver - .map(|node_resolver| { - match node_resolver.url_to_node_resolution(specifier.clone()) { - Ok(NodeResolution::CommonJs(_)) => true, - Ok(NodeResolution::Esm(_)) - | Ok(NodeResolution::BuiltIn(_)) - | Err(_) => false, - } - }) - .unwrap_or(false); let file_path = specifier.to_file_path().unwrap(); let code = std::fs::read_to_string(&file_path) .with_context(|| format!("Unable to load {}", file_path.display()))?; - Ok(code) + let code: Arc = code.into(); + *is_cjs = npm_state + .map(|npm_state| { + npm_state.cjs_tracker.is_cjs(specifier, *media_type, &code) + }) + .unwrap_or(false); + // todo(dsherret): how to avoid cloning here? + Ok(code.to_string()) } let state = state.borrow_mut::(); @@ -560,6 +627,9 @@ fn op_load_inner( match module { Module::Js(module) => { media_type = module.media_type; + if matches!(media_type, MediaType::Cjs | MediaType::Cts) { + is_cjs = true; + } let source = module .fast_check_module() .map(|m| &*m.source) @@ -573,11 +643,13 @@ fn op_load_inner( Module::Npm(_) | Module::Node(_) => None, Module::External(module) => { // means it's Deno code importing an npm module - let specifier = - node::resolve_specifier_into_node_modules(&module.specifier); + let specifier = node::resolve_specifier_into_node_modules( + &module.specifier, + &deno_fs::RealFs, + ); Some(Cow::Owned(load_from_node_modules( &specifier, - state.maybe_npm.as_ref().map(|n| n.node_resolver.as_ref()), + state.maybe_npm.as_ref(), &mut media_type, &mut is_cjs, )?)) @@ -590,7 +662,7 @@ fn op_load_inner( { Some(Cow::Owned(load_from_node_modules( specifier, - Some(npm.node_resolver.as_ref()), + Some(npm), &mut media_type, &mut is_cjs, )?)) @@ -739,7 +811,13 @@ fn op_resolve_inner( } } }; - (specifier_str, media_type.as_ts_extension()) + ( + specifier_str, + match media_type { + MediaType::Css => ".js", // surface these as .js for typescript + media_type => media_type.as_ts_extension(), + }, + ) } None => ( MISSING_DEPENDENCY_SPECIFIER.to_string(), @@ -810,29 +888,27 @@ fn resolve_graph_specifier_types( Some(referrer), NodeResolutionMode::Types, ); - let maybe_resolution = match res_result { - Ok(res) => Some(res), + let maybe_url = match res_result { + Ok(url) => Some(url), Err(err) => match err.code() { NodeJsErrorCode::ERR_TYPES_NOT_FOUND | NodeJsErrorCode::ERR_MODULE_NOT_FOUND => None, _ => return Err(err.into()), }, }; - Ok(Some(NodeResolution::into_specifier_and_media_type( - maybe_resolution, - ))) + Ok(Some(into_specifier_and_media_type(maybe_url))) } else { Ok(None) } } Some(Module::External(module)) => { // we currently only use "External" for when the module is in an npm package - Ok(state.maybe_npm.as_ref().map(|npm| { - let specifier = - node::resolve_specifier_into_node_modules(&module.specifier); - NodeResolution::into_specifier_and_media_type( - npm.node_resolver.url_to_node_resolution(specifier).ok(), - ) + Ok(state.maybe_npm.as_ref().map(|_| { + let specifier = node::resolve_specifier_into_node_modules( + &module.specifier, + &deno_fs::RealFs, + ); + into_specifier_and_media_type(Some(specifier)) })) } Some(Module::Node(_)) | None => Ok(None), @@ -844,7 +920,7 @@ enum ResolveNonGraphSpecifierTypesError { #[error(transparent)] ResolvePkgFolderFromDenoReq(#[from] ResolvePkgFolderFromDenoReqError), #[error(transparent)] - ResolvePkgSubpathFromDenoModule(#[from] ResolvePkgSubpathFromDenoModuleError), + PackageSubpathResolve(#[from] PackageSubpathResolveError), } fn resolve_non_graph_specifier_types( @@ -863,7 +939,7 @@ fn resolve_non_graph_specifier_types( let node_resolver = &npm.node_resolver; if node_resolver.in_npm_package(referrer) { // we're in an npm package, so use node resolution - Ok(Some(NodeResolution::into_specifier_and_media_type( + Ok(Some(into_specifier_and_media_type( node_resolver .resolve( raw_specifier, @@ -871,7 +947,8 @@ fn resolve_non_graph_specifier_types( referrer_kind, NodeResolutionMode::Types, ) - .ok(), + .ok() + .map(|res| res.into_url()), ))) } else if let Ok(npm_req_ref) = NpmPackageReqReference::from_str(raw_specifier) @@ -890,17 +967,15 @@ fn resolve_non_graph_specifier_types( Some(referrer), NodeResolutionMode::Types, ); - let maybe_resolution = match res_result { - Ok(res) => Some(res), + let maybe_url = match res_result { + Ok(url) => Some(url), Err(err) => match err.code() { NodeJsErrorCode::ERR_TYPES_NOT_FOUND | NodeJsErrorCode::ERR_MODULE_NOT_FOUND => None, _ => return Err(err.into()), }, }; - Ok(Some(NodeResolution::into_specifier_and_media_type( - maybe_resolution, - ))) + Ok(Some(into_specifier_and_media_type(maybe_url))) } else { Ok(None) } diff --git a/cli/util/extract.rs b/cli/util/extract.rs index 873b7e7f2d..f577cbefec 100644 --- a/cli/util/extract.rs +++ b/cli/util/extract.rs @@ -64,7 +64,7 @@ fn extract_inner( }) { Ok(parsed) => { let mut c = ExportCollector::default(); - c.visit_program(parsed.program_ref()); + c.visit_program(parsed.program().as_ref()); c } Err(_) => ExportCollector::default(), @@ -570,14 +570,14 @@ fn generate_pseudo_file( })?; let top_level_atoms = swc_utils::collect_decls_with_ctxt::( - parsed.program_ref(), + &parsed.program_ref(), parsed.top_level_context(), ); let transformed = parsed .program_ref() - .clone() + .to_owned() .fold_with(&mut as_folder(Transform { specifier: &file.specifier, base_file_specifier, @@ -1416,7 +1416,7 @@ console.log(Foo); }) .unwrap(); - collector.visit_program(parsed.program_ref()); + parsed.program_ref().visit_with(&mut collector); collector } diff --git a/cli/util/logger.rs b/cli/util/logger.rs index cdc89411fe..d93753dfd3 100644 --- a/cli/util/logger.rs +++ b/cli/util/logger.rs @@ -65,6 +65,8 @@ pub fn init(maybe_level: Option) { .filter_module("swc_ecma_parser", log::LevelFilter::Error) // Suppress span lifecycle logs since they are too verbose .filter_module("tracing::span", log::LevelFilter::Off) + // for deno_compile, this is too verbose + .filter_module("editpe", log::LevelFilter::Error) .format(|buf, record| { let mut target = record.target().to_string(); if let Some(line_no) = record.line() { diff --git a/cli/util/path.rs b/cli/util/path.rs index e4ae6e7cb1..58bed664f9 100644 --- a/cli/util/path.rs +++ b/cli/util/path.rs @@ -42,21 +42,6 @@ pub fn get_extension(file_path: &Path) -> Option { .map(|e| e.to_lowercase()); } -pub fn specifier_has_extension( - specifier: &ModuleSpecifier, - searching_ext: &str, -) -> bool { - let Some((_, ext)) = specifier.path().rsplit_once('.') else { - return false; - }; - let searching_ext = searching_ext.strip_prefix('.').unwrap_or(searching_ext); - debug_assert!(!searching_ext.contains('.')); // exts like .d.ts are not implemented here - if ext.len() != searching_ext.len() { - return false; - } - ext.eq_ignore_ascii_case(searching_ext) -} - pub fn get_atomic_dir_path(file_path: &Path) -> PathBuf { let rand = gen_rand_path_component(); let new_file_name = format!( @@ -350,18 +335,6 @@ mod test { } } - #[test] - fn test_specifier_has_extension() { - fn get(specifier: &str, ext: &str) -> bool { - specifier_has_extension(&ModuleSpecifier::parse(specifier).unwrap(), ext) - } - - assert!(get("file:///a/b/c.ts", "ts")); - assert!(get("file:///a/b/c.ts", ".ts")); - assert!(!get("file:///a/b/c.ts", ".cts")); - assert!(get("file:///a/b/c.CtS", ".cts")); - } - #[test] fn test_to_percent_decoded_str() { let str = to_percent_decoded_str("%F0%9F%A6%95"); diff --git a/cli/util/text_encoding.rs b/cli/util/text_encoding.rs index df72cc2be6..8524e63ebb 100644 --- a/cli/util/text_encoding.rs +++ b/cli/util/text_encoding.rs @@ -97,6 +97,7 @@ fn find_source_map_range(code: &[u8]) -> Option> { } /// Converts an `Arc` to an `Arc<[u8]>`. +#[allow(dead_code)] pub fn arc_str_to_bytes(arc_str: Arc) -> Arc<[u8]> { let raw = Arc::into_raw(arc_str); // SAFETY: This is safe because they have the same memory layout. diff --git a/cli/worker.rs b/cli/worker.rs index e230197d2b..baacd681a1 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -14,16 +14,17 @@ use deno_core::v8; use deno_core::CompiledWasmModuleStore; use deno_core::Extension; use deno_core::FeatureChecker; -use deno_core::ModuleId; use deno_core::ModuleLoader; use deno_core::PollEventLoopOptions; use deno_core::SharedArrayBufferStore; use deno_runtime::code_cache; use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel; use deno_runtime::deno_fs; -use deno_runtime::deno_node; use deno_runtime::deno_node::NodeExtInitServices; +use deno_runtime::deno_node::NodeRequireLoader; +use deno_runtime::deno_node::NodeRequireLoaderRc; use deno_runtime::deno_node::NodeResolver; +use deno_runtime::deno_node::PackageJsonResolver; use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_tls::RootCertStoreProvider; use deno_runtime::deno_web::BlobStore; @@ -42,7 +43,6 @@ use deno_runtime::WorkerExecutionMode; use deno_runtime::WorkerLogLevel; use deno_semver::npm::NpmPackageReqReference; use deno_terminal::colors; -use node_resolver::NodeResolution; use node_resolver::NodeResolutionMode; use tokio::select; @@ -51,28 +51,27 @@ use crate::args::DenoSubcommand; use crate::args::StorageKeyResolver; use crate::errors; use crate::npm::CliNpmResolver; -use crate::resolver::CjsResolutionStore; use crate::util::checksum; use crate::util::file_watcher::WatcherCommunicator; use crate::util::file_watcher::WatcherRestartMode; -use crate::util::path::specifier_has_extension; use crate::version; -pub struct ModuleLoaderAndSourceMapGetter { +pub struct CreateModuleLoaderResult { pub module_loader: Rc, + pub node_require_loader: Rc, } pub trait ModuleLoaderFactory: Send + Sync { fn create_for_main( &self, root_permissions: PermissionsContainer, - ) -> ModuleLoaderAndSourceMapGetter; + ) -> CreateModuleLoaderResult; fn create_for_worker( &self, parent_permissions: PermissionsContainer, permissions: PermissionsContainer, - ) -> ModuleLoaderAndSourceMapGetter; + ) -> CreateModuleLoaderResult; } #[async_trait::async_trait(?Send)] @@ -109,7 +108,6 @@ pub struct CliMainWorkerOptions { pub inspect_wait: bool, pub strace_ops: Option>, pub is_inspecting: bool, - pub is_npm_main: bool, pub location: Option, pub argv0: Option, pub node_debug: Option, @@ -122,13 +120,11 @@ pub struct CliMainWorkerOptions { pub node_ipc: Option, pub serve_port: Option, pub serve_host: Option, - pub unstable_detect_cjs: bool, } struct SharedWorkerState { blob_store: Arc, broadcast_channel: InMemoryBroadcastChannel, - cjs_resolution_store: Arc, code_cache: Option>, compiled_wasm_module_store: CompiledWasmModuleStore, feature_checker: Arc, @@ -139,6 +135,7 @@ struct SharedWorkerState { module_loader_factory: Box, node_resolver: Arc, npm_resolver: Arc, + pkg_json_resolver: Arc, root_cert_store_provider: Arc, root_permissions: PermissionsContainer, shared_array_buffer_store: SharedArrayBufferStore, @@ -148,11 +145,15 @@ struct SharedWorkerState { } impl SharedWorkerState { - pub fn create_node_init_services(&self) -> NodeExtInitServices { + pub fn create_node_init_services( + &self, + node_require_loader: NodeRequireLoaderRc, + ) -> NodeExtInitServices { NodeExtInitServices { - node_require_resolver: self.npm_resolver.clone().into_require_resolver(), + node_require_loader, node_resolver: self.node_resolver.clone(), npm_resolver: self.npm_resolver.clone().into_npm_resolver(), + pkg_json_resolver: self.pkg_json_resolver.clone(), } } @@ -163,7 +164,6 @@ impl SharedWorkerState { pub struct CliMainWorker { main_module: ModuleSpecifier, - is_main_cjs: bool, worker: MainWorker, shared: Arc, } @@ -185,17 +185,7 @@ impl CliMainWorker { log::debug!("main_module {}", self.main_module); - if self.is_main_cjs { - deno_node::load_cjs_module( - &mut self.worker.js_runtime, - &self.main_module.to_file_path().unwrap().to_string_lossy(), - true, - self.shared.options.inspect_brk, - )?; - } else { - self.execute_main_module_possibly_with_npm().await?; - } - + self.execute_main_module().await?; self.worker.dispatch_load_event()?; loop { @@ -283,22 +273,7 @@ impl CliMainWorker { /// Execute the given main module emitting load and unload events before and after execution /// respectively. pub async fn execute(&mut self) -> Result<(), AnyError> { - if self.inner.is_main_cjs { - deno_node::load_cjs_module( - &mut self.inner.worker.js_runtime, - &self - .inner - .main_module - .to_file_path() - .unwrap() - .to_string_lossy(), - true, - self.inner.shared.options.inspect_brk, - )?; - } else { - self.inner.execute_main_module_possibly_with_npm().await?; - } - + self.inner.execute_main_module().await?; self.inner.worker.dispatch_load_event()?; self.pending_unload = true; @@ -339,24 +314,13 @@ impl CliMainWorker { executor.execute().await } - pub async fn execute_main_module_possibly_with_npm( - &mut self, - ) -> Result<(), AnyError> { + pub async fn execute_main_module(&mut self) -> Result<(), AnyError> { let id = self.worker.preload_main_module(&self.main_module).await?; - self.evaluate_module_possibly_with_npm(id).await + self.worker.evaluate_module(id).await } - pub async fn execute_side_module_possibly_with_npm( - &mut self, - ) -> Result<(), AnyError> { + pub async fn execute_side_module(&mut self) -> Result<(), AnyError> { let id = self.worker.preload_side_module(&self.main_module).await?; - self.evaluate_module_possibly_with_npm(id).await - } - - async fn evaluate_module_possibly_with_npm( - &mut self, - id: ModuleId, - ) -> Result<(), AnyError> { self.worker.evaluate_module(id).await } @@ -426,7 +390,6 @@ impl CliMainWorkerFactory { #[allow(clippy::too_many_arguments)] pub fn new( blob_store: Arc, - cjs_resolution_store: Arc, code_cache: Option>, feature_checker: Arc, fs: Arc, @@ -436,6 +399,7 @@ impl CliMainWorkerFactory { module_loader_factory: Box, node_resolver: Arc, npm_resolver: Arc, + pkg_json_resolver: Arc, root_cert_store_provider: Arc, root_permissions: PermissionsContainer, storage_key_resolver: StorageKeyResolver, @@ -446,7 +410,6 @@ impl CliMainWorkerFactory { shared: Arc::new(SharedWorkerState { blob_store, broadcast_channel: Default::default(), - cjs_resolution_store, code_cache, compiled_wasm_module_store: Default::default(), feature_checker, @@ -457,6 +420,7 @@ impl CliMainWorkerFactory { module_loader_factory, node_resolver, npm_resolver, + pkg_json_resolver, root_cert_store_provider, root_permissions, shared_array_buffer_store: Default::default(), @@ -492,10 +456,13 @@ impl CliMainWorkerFactory { stdio: deno_runtime::deno_io::Stdio, ) -> Result { let shared = &self.shared; - let ModuleLoaderAndSourceMapGetter { module_loader } = shared + let CreateModuleLoaderResult { + module_loader, + node_require_loader, + } = shared .module_loader_factory .create_for_main(permissions.clone()); - let (main_module, is_main_cjs) = if let Ok(package_ref) = + let main_module = if let Ok(package_ref) = NpmPackageReqReference::from_specifier(&main_module) { if let Some(npm_resolver) = shared.npm_resolver.as_managed() { @@ -515,9 +482,8 @@ impl CliMainWorkerFactory { package_ref.req(), &referrer, )?; - let node_resolution = self + let main_module = self .resolve_binary_entrypoint(&package_folder, package_ref.sub_path())?; - let is_main_cjs = matches!(node_resolution, NodeResolution::CommonJs(_)); if let Some(lockfile) = &shared.maybe_lockfile { // For npm binary commands, ensure that the lockfile gets updated @@ -526,36 +492,9 @@ impl CliMainWorkerFactory { lockfile.write_if_changed()?; } - (node_resolution.into_url(), is_main_cjs) - } else if shared.options.is_npm_main - || shared.node_resolver.in_npm_package(&main_module) - { - let node_resolution = - shared.node_resolver.url_to_node_resolution(main_module)?; - let is_main_cjs = matches!(node_resolution, NodeResolution::CommonJs(_)); - (node_resolution.into_url(), is_main_cjs) + main_module } else { - let is_maybe_cjs_js_ext = self.shared.options.unstable_detect_cjs - && specifier_has_extension(&main_module, "js") - && self - .shared - .node_resolver - .get_closest_package_json(&main_module) - .ok() - .flatten() - .map(|pkg_json| pkg_json.typ == "commonjs") - .unwrap_or(false); - let is_cjs = if is_maybe_cjs_js_ext { - // fill the cjs resolution store by preparing the module load - module_loader - .prepare_load(&main_module, None, false) - .await?; - self.shared.cjs_resolution_store.is_known_cjs(&main_module) - } else { - main_module.scheme() == "file" - && specifier_has_extension(&main_module, "cjs") - }; - (main_module, is_cjs) + main_module }; let maybe_inspector_server = shared.maybe_inspector_server.clone(); @@ -597,7 +536,9 @@ impl CliMainWorkerFactory { root_cert_store_provider: Some(shared.root_cert_store_provider.clone()), module_loader, fs: shared.fs.clone(), - node_services: Some(shared.create_node_init_services()), + node_services: Some( + shared.create_node_init_services(node_require_loader), + ), npm_process_state_provider: Some(shared.npm_process_state_provider()), blob_store: shared.blob_store.clone(), broadcast_channel: shared.broadcast_channel.clone(), @@ -682,7 +623,6 @@ impl CliMainWorkerFactory { Ok(CliMainWorker { main_module, - is_main_cjs, worker, shared: shared.clone(), }) @@ -692,19 +632,19 @@ impl CliMainWorkerFactory { &self, package_folder: &Path, sub_path: Option<&str>, - ) -> Result { + ) -> Result { match self .shared .node_resolver .resolve_binary_export(package_folder, sub_path) { - Ok(node_resolution) => Ok(node_resolution), + Ok(specifier) => Ok(specifier), Err(original_err) => { // if the binary entrypoint was not found, fallback to regular node resolution let result = self.resolve_binary_entrypoint_fallback(package_folder, sub_path); match result { - Ok(Some(resolution)) => Ok(resolution), + Ok(Some(specifier)) => Ok(specifier), Ok(None) => Err(original_err.into()), Err(fallback_err) => { bail!("{:#}\n\nFallback failed: {:#}", original_err, fallback_err) @@ -719,7 +659,7 @@ impl CliMainWorkerFactory { &self, package_folder: &Path, sub_path: Option<&str>, - ) -> Result, AnyError> { + ) -> Result, AnyError> { // only fallback if the user specified a sub path if sub_path.is_none() { // it's confusing to users if the package doesn't have any binary @@ -728,7 +668,7 @@ impl CliMainWorkerFactory { return Ok(None); } - let resolution = self + let specifier = self .shared .node_resolver .resolve_package_subpath_from_deno_module( @@ -737,19 +677,14 @@ impl CliMainWorkerFactory { /* referrer */ None, NodeResolutionMode::Execution, )?; - match &resolution { - NodeResolution::BuiltIn(_) => Ok(None), - NodeResolution::CommonJs(specifier) | NodeResolution::Esm(specifier) => { - if specifier - .to_file_path() - .map(|p| p.exists()) - .unwrap_or(false) - { - Ok(Some(resolution)) - } else { - bail!("Cannot find module '{}'", specifier) - } - } + if specifier + .to_file_path() + .map(|p| p.exists()) + .unwrap_or(false) + { + Ok(Some(specifier)) + } else { + bail!("Cannot find module '{}'", specifier) } } } @@ -761,11 +696,13 @@ fn create_web_worker_callback( Arc::new(move |args| { let maybe_inspector_server = shared.maybe_inspector_server.clone(); - let ModuleLoaderAndSourceMapGetter { module_loader } = - shared.module_loader_factory.create_for_worker( - args.parent_permissions.clone(), - args.permissions.clone(), - ); + let CreateModuleLoaderResult { + module_loader, + node_require_loader, + } = shared.module_loader_factory.create_for_worker( + args.parent_permissions.clone(), + args.permissions.clone(), + ); let create_web_worker_cb = create_web_worker_callback(shared.clone(), stdio.clone()); @@ -795,7 +732,9 @@ fn create_web_worker_callback( root_cert_store_provider: Some(shared.root_cert_store_provider.clone()), module_loader, fs: shared.fs.clone(), - node_services: Some(shared.create_node_init_services()), + node_services: Some( + shared.create_node_init_services(node_require_loader), + ), blob_store: shared.blob_store.clone(), broadcast_channel: shared.broadcast_channel.clone(), shared_array_buffer_store: Some(shared.shared_array_buffer_store.clone()), diff --git a/ext/node/lib.rs b/ext/node/lib.rs index b34bea815b..db6d08e11e 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -9,15 +9,11 @@ use std::path::Path; use std::path::PathBuf; use deno_core::error::AnyError; -use deno_core::located_script_name; use deno_core::op2; use deno_core::url::Url; #[allow(unused_imports)] use deno_core::v8; use deno_core::v8::ExternalReference; -use deno_core::JsRuntime; -use deno_fs::sync::MaybeSend; -use deno_fs::sync::MaybeSync; use node_resolver::NpmResolverRc; use once_cell::sync::Lazy; @@ -125,16 +121,17 @@ impl NodePermissions for deno_permissions::PermissionsContainer { } #[allow(clippy::disallowed_types)] -pub type NodeRequireResolverRc = - deno_fs::sync::MaybeArc; +pub type NodeRequireLoaderRc = std::rc::Rc; -pub trait NodeRequireResolver: std::fmt::Debug + MaybeSend + MaybeSync { +pub trait NodeRequireLoader { #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"] fn ensure_read_permission<'a>( &self, permissions: &mut dyn NodePermissions, path: &'a Path, ) -> Result, AnyError>; + + fn load_text_file_lossy(&self, path: &Path) -> Result; } pub static NODE_ENV_VAR_ALLOWLIST: Lazy> = Lazy::new(|| { @@ -152,10 +149,12 @@ fn op_node_build_os() -> String { env!("TARGET").split('-').nth(2).unwrap().to_string() } +#[derive(Clone)] pub struct NodeExtInitServices { - pub node_require_resolver: NodeRequireResolverRc, + pub node_require_loader: NodeRequireLoaderRc, pub node_resolver: NodeResolverRc, pub npm_resolver: NpmResolverRc, + pub pkg_json_resolver: PackageJsonResolverRc, } deno_core::extension!(deno_node, @@ -639,9 +638,10 @@ deno_core::extension!(deno_node, state.put(options.fs.clone()); if let Some(init) = &options.maybe_init { - state.put(init.node_require_resolver.clone()); + state.put(init.node_require_loader.clone()); state.put(init.node_resolver.clone()); state.put(init.npm_resolver.clone()); + state.put(init.pkg_json_resolver.clone()); } }, global_template_middleware = global_template_middleware, @@ -761,33 +761,16 @@ deno_core::extension!(deno_node, }, ); -pub fn load_cjs_module( - js_runtime: &mut JsRuntime, - module: &str, - main: bool, - inspect_brk: bool, -) -> Result<(), AnyError> { - fn escape_for_single_quote_string(text: &str) -> String { - text.replace('\\', r"\\").replace('\'', r"\'") - } - - let source_code = format!( - r#"(function loadCjsModule(moduleName, isMain, inspectBrk) {{ - Deno[Deno.internal].node.loadCjsModule(moduleName, isMain, inspectBrk); - }})('{module}', {main}, {inspect_brk});"#, - main = main, - module = escape_for_single_quote_string(module), - inspect_brk = inspect_brk, - ); - - js_runtime.execute_script(located_script_name!(), source_code)?; - Ok(()) -} - pub type NodeResolver = node_resolver::NodeResolver; #[allow(clippy::disallowed_types)] pub type NodeResolverRc = deno_fs::sync::MaybeArc>; +pub type PackageJsonResolver = + node_resolver::PackageJsonResolver; +#[allow(clippy::disallowed_types)] +pub type PackageJsonResolverRc = deno_fs::sync::MaybeArc< + node_resolver::PackageJsonResolver, +>; #[derive(Debug)] pub struct DenoFsNodeResolverEnv { diff --git a/ext/node/ops/require.rs b/ext/node/ops/require.rs index 7524fb43c7..30db8b6293 100644 --- a/ext/node/ops/require.rs +++ b/ext/node/ops/require.rs @@ -9,6 +9,7 @@ use deno_core::OpState; use deno_fs::FileSystemRc; use deno_package_json::PackageJsonRc; use deno_path_util::normalize_path; +use deno_path_util::url_to_file_path; use node_resolver::NodeModuleKind; use node_resolver::NodeResolutionMode; use node_resolver::REQUIRE_CONDITIONS; @@ -19,9 +20,10 @@ use std::path::PathBuf; use std::rc::Rc; use crate::NodePermissions; -use crate::NodeRequireResolverRc; +use crate::NodeRequireLoaderRc; use crate::NodeResolverRc; use crate::NpmResolverRc; +use crate::PackageJsonResolverRc; #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"] fn ensure_read_permission<'a, P>( @@ -31,9 +33,9 @@ fn ensure_read_permission<'a, P>( where P: NodePermissions + 'static, { - let resolver = state.borrow::().clone(); + let loader = state.borrow::().clone(); let permissions = state.borrow_mut::

(); - resolver.ensure_read_permission(permissions, file_path) + loader.ensure_read_permission(permissions, file_path) } #[derive(Debug, thiserror::Error)] @@ -54,10 +56,14 @@ pub enum RequireError { PackageImportsResolve( #[from] node_resolver::errors::PackageImportsResolveError, ), - #[error("failed to convert '{0}' to file path")] - FilePathConversion(Url), + #[error(transparent)] + FilePathConversion(#[from] deno_path_util::UrlToFilePathError), + #[error(transparent)] + UrlConversion(#[from] deno_path_util::PathToUrlError), #[error(transparent)] Fs(#[from] deno_io::fs::FsError), + #[error(transparent)] + ReadModule(deno_core::error::AnyError), #[error("Unable to get CWD: {0}")] UnableToGetCwd(deno_io::fs::FsError), } @@ -229,8 +235,11 @@ pub fn op_require_is_deno_dir_package( state: &mut OpState, #[string] path: String, ) -> bool { - let resolver = state.borrow::(); - resolver.in_npm_package_at_file_path(&PathBuf::from(path)) + let resolver = state.borrow::(); + match deno_path_util::url_from_file_path(&PathBuf::from(path)) { + Ok(specifier) => resolver.in_npm_package(&specifier), + Err(_) => false, + } } #[op2] @@ -411,8 +420,8 @@ where return Ok(None); } - let node_resolver = state.borrow::(); - let pkg = node_resolver + let pkg_json_resolver = state.borrow::(); + let pkg = pkg_json_resolver .get_closest_package_json_from_path(&PathBuf::from(parent_path.unwrap())) .ok() .flatten(); @@ -441,6 +450,7 @@ where let referrer = deno_core::url::Url::from_file_path(&pkg.path).unwrap(); if let Some(exports) = &pkg.exports { + let node_resolver = state.borrow::(); let r = node_resolver.package_exports_resolve( &pkg.path, &expansion, @@ -470,10 +480,13 @@ where P: NodePermissions + 'static, { let file_path = PathBuf::from(file_path); + // todo(dsherret): there's multiple borrows to NodeRequireLoaderRc here let file_path = ensure_read_permission::

(state, &file_path) .map_err(RequireError::Permission)?; - let fs = state.borrow::(); - Ok(fs.read_text_file_lossy_sync(&file_path, None)?) + let loader = state.borrow::(); + loader + .load_text_file_lossy(&file_path) + .map_err(RequireError::ReadModule) } #[op2] @@ -503,11 +516,12 @@ where P: NodePermissions + 'static, { let fs = state.borrow::(); - let npm_resolver = state.borrow::(); let node_resolver = state.borrow::(); + let pkg_json_resolver = state.borrow::(); let modules_path = PathBuf::from(&modules_path_str); - let pkg_path = if npm_resolver.in_npm_package_at_file_path(&modules_path) + let modules_specifier = deno_path_util::url_from_file_path(&modules_path)?; + let pkg_path = if node_resolver.in_npm_package(&modules_specifier) && !uses_local_node_modules_dir { modules_path @@ -521,7 +535,7 @@ where } }; let Some(pkg) = - node_resolver.load_package_json(&pkg_path.join("package.json"))? + pkg_json_resolver.load_package_json(&pkg_path.join("package.json"))? else { return Ok(None); }; @@ -561,8 +575,8 @@ where { let filename = PathBuf::from(filename); // permissions: allow reading the closest package.json files - let node_resolver = state.borrow::().clone(); - node_resolver.get_closest_package_json_from_path(&filename) + let pkg_json_resolver = state.borrow::(); + pkg_json_resolver.get_closest_package_json_from_path(&filename) } #[op2] @@ -574,13 +588,13 @@ pub fn op_require_read_package_scope

( where P: NodePermissions + 'static, { - let node_resolver = state.borrow::().clone(); + let pkg_json_resolver = state.borrow::(); let package_json_path = PathBuf::from(package_json_path); if package_json_path.file_name() != Some("package.json".as_ref()) { // permissions: do not allow reading a non-package.json file return None; } - node_resolver + pkg_json_resolver .load_package_json(&package_json_path) .ok() .flatten() @@ -599,14 +613,15 @@ where let referrer_path = PathBuf::from(&referrer_filename); let referrer_path = ensure_read_permission::

(state, &referrer_path) .map_err(RequireError::Permission)?; - let node_resolver = state.borrow::(); + let pkg_json_resolver = state.borrow::(); let Some(pkg) = - node_resolver.get_closest_package_json_from_path(&referrer_path)? + pkg_json_resolver.get_closest_package_json_from_path(&referrer_path)? else { return Ok(None); }; if pkg.imports.is_some() { + let node_resolver = state.borrow::(); let referrer_url = Url::from_file_path(&referrer_filename).unwrap(); let url = node_resolver.package_imports_resolve( &request, @@ -637,13 +652,6 @@ fn url_to_file_path_string(url: &Url) -> Result { Ok(file_path.to_string_lossy().into_owned()) } -fn url_to_file_path(url: &Url) -> Result { - match url.to_file_path() { - Ok(file_path) => Ok(file_path), - Err(()) => Err(RequireError::FilePathConversion(url.clone())), - } -} - #[op2(fast)] pub fn op_require_can_parse_as_esm( scope: &mut v8::HandleScope, diff --git a/ext/node/ops/worker_threads.rs b/ext/node/ops/worker_threads.rs index e33cf91574..d2e5758826 100644 --- a/ext/node/ops/worker_threads.rs +++ b/ext/node/ops/worker_threads.rs @@ -4,14 +4,12 @@ use deno_core::op2; use deno_core::url::Url; use deno_core::OpState; use deno_fs::FileSystemRc; -use node_resolver::NodeResolution; use std::borrow::Cow; use std::path::Path; use std::path::PathBuf; use crate::NodePermissions; -use crate::NodeRequireResolverRc; -use crate::NodeResolverRc; +use crate::NodeRequireLoaderRc; #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"] fn ensure_read_permission<'a, P>( @@ -21,9 +19,9 @@ fn ensure_read_permission<'a, P>( where P: NodePermissions + 'static, { - let resolver = state.borrow::().clone(); + let loader = state.borrow::().clone(); let permissions = state.borrow_mut::

(); - resolver.ensure_read_permission(permissions, file_path) + loader.ensure_read_permission(permissions, file_path) } #[derive(Debug, thiserror::Error)] @@ -42,14 +40,11 @@ pub enum WorkerThreadsFilenameError { UrlToPath, #[error("File not found [{0:?}]")] FileNotFound(PathBuf), - #[error("Neither ESM nor CJS")] - NeitherEsmNorCjs, - #[error("{0}")] - UrlToNodeResolution(node_resolver::errors::UrlToNodeResolutionError), #[error(transparent)] Fs(#[from] deno_io::fs::FsError), } +// todo(dsherret): we should remove this and do all this work inside op_create_worker #[op2] #[string] pub fn op_worker_threads_filename

( @@ -88,30 +83,5 @@ where url_path.to_path_buf(), )); } - let node_resolver = state.borrow::(); - match node_resolver - .url_to_node_resolution(url) - .map_err(WorkerThreadsFilenameError::UrlToNodeResolution)? - { - NodeResolution::Esm(u) => Ok(u.to_string()), - NodeResolution::CommonJs(u) => wrap_cjs(u), - NodeResolution::BuiltIn(_) => { - Err(WorkerThreadsFilenameError::NeitherEsmNorCjs) - } - } -} - -/// -/// Wrap a CJS file-URL and the required setup in a stringified `data:`-URL -/// -fn wrap_cjs(url: Url) -> Result { - let path = url - .to_file_path() - .map_err(|_| WorkerThreadsFilenameError::UrlToPath)?; - let filename = path.file_name().unwrap().to_string_lossy(); - Ok(format!( - "data:text/javascript,import {{ createRequire }} from \"node:module\";\ - const require = createRequire(\"{}\"); require(\"./{}\");", - url, filename, - )) + Ok(url.to_string()) } diff --git a/ext/node/polyfills/01_require.js b/ext/node/polyfills/01_require.js index 7935903a8d..296d819aa5 100644 --- a/ext/node/polyfills/01_require.js +++ b/ext/node/polyfills/01_require.js @@ -1071,13 +1071,35 @@ Module._extensions[".js"] = function (module, filename) { } else if (pkg?.type === "commonjs") { format = "commonjs"; } - } else if (StringPrototypeEndsWith(filename, ".cjs")) { - format = "commonjs"; } module._compile(content, filename, format); }; +Module._extensions[".ts"] = + Module._extensions[".jsx"] = + Module._extensions[".tsx"] = + function (module, filename) { + const content = op_require_read_file(filename); + + let format; + const pkg = op_require_read_closest_package_json(filename); + if (pkg?.type === "module") { + format = "module"; + } else if (pkg?.type === "commonjs") { + format = "commonjs"; + } + + module._compile(content, filename, format); + }; + +Module._extensions[".cjs"] = + Module._extensions[".cts"] = + function (module, filename) { + const content = op_require_read_file(filename); + module._compile(content, filename, "commonjs"); + }; + function loadESMFromCJS(module, filename, code) { const namespace = op_import_sync( url.pathToFileURL(filename).toString(), @@ -1087,7 +1109,10 @@ function loadESMFromCJS(module, filename, code) { module.exports = namespace; } -Module._extensions[".mjs"] = function (module, filename) { +Module._extensions[".mjs"] = Module._extensions[".mts"] = function ( + module, + filename, +) { loadESMFromCJS(module, filename); }; diff --git a/resolvers/deno/fs.rs b/resolvers/deno/fs.rs index b08be37982..44495fa7c2 100644 --- a/resolvers/deno/fs.rs +++ b/resolvers/deno/fs.rs @@ -15,13 +15,3 @@ pub trait DenoResolverFs { fn is_dir_sync(&self, path: &Path) -> bool; fn read_dir_sync(&self, dir_path: &Path) -> std::io::Result>; } - -pub(crate) struct DenoPkgJsonFsAdapter<'a, Fs: DenoResolverFs>(pub &'a Fs); - -impl<'a, Fs: DenoResolverFs> deno_package_json::fs::DenoPkgJsonFs - for DenoPkgJsonFsAdapter<'a, Fs> -{ - fn read_to_string_lossy(&self, path: &Path) -> std::io::Result { - self.0.read_to_string_lossy(path) - } -} diff --git a/resolvers/deno/npm/byonm.rs b/resolvers/deno/npm/byonm.rs index 3394b3e501..b85117052c 100644 --- a/resolvers/deno/npm/byonm.rs +++ b/resolvers/deno/npm/byonm.rs @@ -10,16 +10,17 @@ use deno_package_json::PackageJsonDepValue; use deno_path_util::url_to_file_path; use deno_semver::package::PackageReq; use deno_semver::Version; +use node_resolver::env::NodeResolverEnv; use node_resolver::errors::PackageFolderResolveError; use node_resolver::errors::PackageFolderResolveIoError; use node_resolver::errors::PackageJsonLoadError; use node_resolver::errors::PackageNotFoundError; -use node_resolver::load_pkg_json; +use node_resolver::InNpmPackageChecker; use node_resolver::NpmResolver; +use node_resolver::PackageJsonResolverRc; use thiserror::Error; use url::Url; -use crate::fs::DenoPkgJsonFsAdapter; use crate::fs::DenoResolverFs; use super::local::normalize_pkg_name_for_node_modules_deno_folder; @@ -36,32 +37,41 @@ pub enum ByonmResolvePkgFolderFromDenoReqError { Io(#[from] std::io::Error), } -pub struct ByonmNpmResolverCreateOptions { - pub fs: Fs, +pub struct ByonmNpmResolverCreateOptions< + Fs: DenoResolverFs, + TEnv: NodeResolverEnv, +> { // todo(dsherret): investigate removing this pub root_node_modules_dir: Option, + pub fs: Fs, + pub pkg_json_resolver: PackageJsonResolverRc, } #[derive(Debug)] -pub struct ByonmNpmResolver { +pub struct ByonmNpmResolver { fs: Fs, + pkg_json_resolver: PackageJsonResolverRc, root_node_modules_dir: Option, } -impl Clone for ByonmNpmResolver { +impl Clone + for ByonmNpmResolver +{ fn clone(&self) -> Self { Self { fs: self.fs.clone(), + pkg_json_resolver: self.pkg_json_resolver.clone(), root_node_modules_dir: self.root_node_modules_dir.clone(), } } } -impl ByonmNpmResolver { - pub fn new(options: ByonmNpmResolverCreateOptions) -> Self { +impl ByonmNpmResolver { + pub fn new(options: ByonmNpmResolverCreateOptions) -> Self { Self { - fs: options.fs, root_node_modules_dir: options.root_node_modules_dir, + fs: options.fs, + pkg_json_resolver: options.pkg_json_resolver, } } @@ -73,7 +83,7 @@ impl ByonmNpmResolver { &self, path: &Path, ) -> Result>, PackageJsonLoadError> { - load_pkg_json(&DenoPkgJsonFsAdapter(&self.fs), path) + self.pkg_json_resolver.load_package_json(path) } /// Finds the ancestor package.json that contains the specified dependency. @@ -290,8 +300,10 @@ impl ByonmNpmResolver { } } -impl NpmResolver - for ByonmNpmResolver +impl< + Fs: DenoResolverFs + Send + Sync + std::fmt::Debug, + TEnv: NodeResolverEnv, + > NpmResolver for ByonmNpmResolver { fn resolve_package_folder_from_package( &self, @@ -342,7 +354,12 @@ impl NpmResolver .into() }) } +} +#[derive(Debug)] +pub struct ByonmInNpmPackageChecker; + +impl InNpmPackageChecker for ByonmInNpmPackageChecker { fn in_npm_package(&self, specifier: &Url) -> bool { specifier.scheme() == "file" && specifier diff --git a/resolvers/deno/npm/mod.rs b/resolvers/deno/npm/mod.rs index 9d885cad31..45e2341c78 100644 --- a/resolvers/deno/npm/mod.rs +++ b/resolvers/deno/npm/mod.rs @@ -3,6 +3,7 @@ mod byonm; mod local; +pub use byonm::ByonmInNpmPackageChecker; pub use byonm::ByonmNpmResolver; pub use byonm::ByonmNpmResolverCreateOptions; pub use byonm::ByonmResolvePkgFolderFromDenoReqError; diff --git a/resolvers/deno/sloppy_imports.rs b/resolvers/deno/sloppy_imports.rs index e215e87686..7aba5b771a 100644 --- a/resolvers/deno/sloppy_imports.rs +++ b/resolvers/deno/sloppy_imports.rs @@ -232,7 +232,7 @@ impl SloppyImportsResolver { | MediaType::Tsx | MediaType::Json | MediaType::Wasm - | MediaType::TsBuildInfo + | MediaType::Css | MediaType::SourceMap => { return None; } diff --git a/resolvers/node/analyze.rs b/resolvers/node/analyze.rs index 009296006a..c7415933d7 100644 --- a/resolvers/node/analyze.rs +++ b/resolvers/node/analyze.rs @@ -19,18 +19,19 @@ use anyhow::Error as AnyError; use url::Url; use crate::env::NodeResolverEnv; -use crate::package_json::load_pkg_json; +use crate::npm::InNpmPackageCheckerRc; use crate::resolution::NodeResolverRc; use crate::NodeModuleKind; use crate::NodeResolutionMode; use crate::NpmResolverRc; +use crate::PackageJsonResolverRc; use crate::PathClean; #[derive(Debug, Clone)] -pub enum CjsAnalysis { +pub enum CjsAnalysis<'a> { /// File was found to be an ES module and the translator should /// load the code as ESM. - Esm(String), + Esm(Cow<'a, str>), Cjs(CjsAnalysisExports), } @@ -50,11 +51,11 @@ pub trait CjsCodeAnalyzer { /// already has it. If the source is needed by the implementation, /// then it can use the provided source, or otherwise load it if /// necessary. - async fn analyze_cjs( + async fn analyze_cjs<'a>( &self, specifier: &Url, - maybe_source: Option, - ) -> Result; + maybe_source: Option>, + ) -> Result, AnyError>; } pub struct NodeCodeTranslator< @@ -63,8 +64,10 @@ pub struct NodeCodeTranslator< > { cjs_code_analyzer: TCjsCodeAnalyzer, env: TNodeResolverEnv, + in_npm_pkg_checker: InNpmPackageCheckerRc, node_resolver: NodeResolverRc, npm_resolver: NpmResolverRc, + pkg_json_resolver: PackageJsonResolverRc, } impl @@ -73,14 +76,18 @@ impl pub fn new( cjs_code_analyzer: TCjsCodeAnalyzer, env: TNodeResolverEnv, + in_npm_pkg_checker: InNpmPackageCheckerRc, node_resolver: NodeResolverRc, npm_resolver: NpmResolverRc, + pkg_json_resolver: PackageJsonResolverRc, ) -> Self { Self { cjs_code_analyzer, env, + in_npm_pkg_checker, node_resolver, npm_resolver, + pkg_json_resolver, } } @@ -90,11 +97,11 @@ impl /// For all discovered reexports the analysis will be performed recursively. /// /// If successful a source code for equivalent ES module is returned. - pub async fn translate_cjs_to_esm( + pub async fn translate_cjs_to_esm<'a>( &self, entry_specifier: &Url, - source: Option, - ) -> Result { + source: Option>, + ) -> Result, AnyError> { let mut temp_var_count = 0; let analysis = self @@ -108,7 +115,7 @@ impl }; let mut source = vec![ - r#"import {createRequire as __internalCreateRequire} from "node:module"; + r#"import {createRequire as __internalCreateRequire, Module as __internalModule } from "node:module"; const require = __internalCreateRequire(import.meta.url);"# .to_string(), ]; @@ -135,7 +142,12 @@ impl } source.push(format!( - "const mod = require(\"{}\");", + r#"let mod; + if (import.meta.main) {{ + mod = __internalModule._load("{0}", null, true) + }} else {{ + mod = require("{0}"); + }}"#, url_to_file_path(entry_specifier) .unwrap() .to_str() @@ -159,7 +171,7 @@ impl source.push("export default mod;".to_string()); let translated_source = source.join("\n"); - Ok(translated_source) + Ok(Cow::Owned(translated_source)) } async fn analyze_reexports<'a>( @@ -174,7 +186,7 @@ impl struct Analysis { reexport_specifier: url::Url, referrer: url::Url, - analysis: CjsAnalysis, + analysis: CjsAnalysis<'static>, } type AnalysisFuture<'a> = LocalBoxFuture<'a, Result>; @@ -329,8 +341,9 @@ impl }?; let package_json_path = module_dir.join("package.json"); - let maybe_package_json = - load_pkg_json(self.env.pkg_json_fs(), &package_json_path)?; + let maybe_package_json = self + .pkg_json_resolver + .load_package_json(&package_json_path)?; if let Some(package_json) = maybe_package_json { if let Some(exports) = &package_json.exports { return Some( @@ -356,8 +369,9 @@ impl if self.env.is_dir_sync(&d) { // subdir might have a package.json that specifies the entrypoint let package_json_path = d.join("package.json"); - let maybe_package_json = - load_pkg_json(self.env.pkg_json_fs(), &package_json_path)?; + let maybe_package_json = self + .pkg_json_resolver + .load_package_json(&package_json_path)?; if let Some(package_json) = maybe_package_json { if let Some(main) = package_json.main(NodeModuleKind::Cjs) { return Ok(Some(url_from_file_path(&d.join(main).clean())?)); @@ -382,7 +396,7 @@ impl // as a fallback, attempt to resolve it via the ancestor directories let mut last = referrer_path.as_path(); while let Some(parent) = last.parent() { - if !self.npm_resolver.in_npm_package_at_dir_path(parent) { + if !self.in_npm_pkg_checker.in_npm_package_at_dir_path(parent) { break; } let path = if parent.ends_with("node_modules") { diff --git a/resolvers/node/errors.rs b/resolvers/node/errors.rs index 4ba829eda5..aacbecefb4 100644 --- a/resolvers/node/errors.rs +++ b/resolvers/node/errors.rs @@ -81,29 +81,6 @@ pub trait NodeJsErrorCoded { fn code(&self) -> NodeJsErrorCode; } -kinded_err!( - ResolvePkgSubpathFromDenoModuleError, - ResolvePkgSubpathFromDenoModuleErrorKind -); - -impl NodeJsErrorCoded for ResolvePkgSubpathFromDenoModuleError { - fn code(&self) -> NodeJsErrorCode { - use ResolvePkgSubpathFromDenoModuleErrorKind::*; - match self.as_kind() { - PackageSubpathResolve(e) => e.code(), - UrlToNodeResolution(e) => e.code(), - } - } -} - -#[derive(Debug, Error)] -pub enum ResolvePkgSubpathFromDenoModuleErrorKind { - #[error(transparent)] - PackageSubpathResolve(#[from] PackageSubpathResolveError), - #[error(transparent)] - UrlToNodeResolution(#[from] UrlToNodeResolutionError), -} - // todo(https://github.com/denoland/deno_core/issues/810): make this a TypeError #[derive(Debug, Clone, Error)] #[error( @@ -394,37 +371,6 @@ impl NodeJsErrorCoded for CanonicalizingPkgJsonDirError { } } -#[derive(Debug, Error)] -#[error("TypeScript files are not supported in npm packages: {specifier}")] -pub struct TypeScriptNotSupportedInNpmError { - pub specifier: Url, -} - -impl NodeJsErrorCoded for TypeScriptNotSupportedInNpmError { - fn code(&self) -> NodeJsErrorCode { - NodeJsErrorCode::ERR_UNKNOWN_FILE_EXTENSION - } -} - -kinded_err!(UrlToNodeResolutionError, UrlToNodeResolutionErrorKind); - -impl NodeJsErrorCoded for UrlToNodeResolutionError { - fn code(&self) -> NodeJsErrorCode { - match self.as_kind() { - UrlToNodeResolutionErrorKind::TypeScriptNotSupported(e) => e.code(), - UrlToNodeResolutionErrorKind::ClosestPkgJson(e) => e.code(), - } - } -} - -#[derive(Debug, Error)] -pub enum UrlToNodeResolutionErrorKind { - #[error(transparent)] - TypeScriptNotSupported(#[from] TypeScriptNotSupportedInNpmError), - #[error(transparent)] - ClosestPkgJson(#[from] ClosestPkgJsonError), -} - // todo(https://github.com/denoland/deno_core/issues/810): make this a TypeError #[derive(Debug, Error)] #[error( @@ -533,8 +479,6 @@ pub enum NodeResolveErrorKind { TypesNotFound(#[from] TypesNotFoundError), #[error(transparent)] FinalizeResolution(#[from] FinalizeResolutionError), - #[error(transparent)] - UrlToNodeResolution(#[from] UrlToNodeResolutionError), } kinded_err!(FinalizeResolutionError, FinalizeResolutionErrorKind); @@ -728,8 +672,6 @@ pub enum ResolvePkgJsonBinExportError { MissingPkgJson { pkg_json_path: PathBuf }, #[error("Failed resolving binary export. {message}")] InvalidBinProperty { message: String }, - #[error(transparent)] - UrlToNodeResolution(#[from] UrlToNodeResolutionError), } #[derive(Debug, Error)] diff --git a/resolvers/node/lib.rs b/resolvers/node/lib.rs index f03f770486..18b0a85363 100644 --- a/resolvers/node/lib.rs +++ b/resolvers/node/lib.rs @@ -13,9 +13,12 @@ mod resolution; mod sync; pub use deno_package_json::PackageJson; +pub use npm::InNpmPackageChecker; +pub use npm::InNpmPackageCheckerRc; pub use npm::NpmResolver; pub use npm::NpmResolverRc; -pub use package_json::load_pkg_json; +pub use package_json::PackageJsonResolver; +pub use package_json::PackageJsonResolverRc; pub use package_json::PackageJsonThreadLocalCache; pub use path::PathClean; pub use resolution::parse_npm_pkg_name; diff --git a/resolvers/node/npm.rs b/resolvers/node/npm.rs index 6b5f21db62..2132f0b545 100644 --- a/resolvers/node/npm.rs +++ b/resolvers/node/npm.rs @@ -22,7 +22,13 @@ pub trait NpmResolver: std::fmt::Debug + MaybeSend + MaybeSync { specifier: &str, referrer: &Url, ) -> Result; +} +#[allow(clippy::disallowed_types)] +pub type InNpmPackageCheckerRc = crate::sync::MaybeArc; + +/// Checks if a provided specifier is in an npm package. +pub trait InNpmPackageChecker: std::fmt::Debug + MaybeSend + MaybeSync { fn in_npm_package(&self, specifier: &Url) -> bool; fn in_npm_package_at_dir_path(&self, path: &Path) -> bool { diff --git a/resolvers/node/package_json.rs b/resolvers/node/package_json.rs index de750f1d7e..6967779e5d 100644 --- a/resolvers/node/package_json.rs +++ b/resolvers/node/package_json.rs @@ -2,15 +2,21 @@ use deno_package_json::PackageJson; use deno_package_json::PackageJsonRc; +use deno_path_util::strip_unc_prefix; use std::cell::RefCell; use std::collections::HashMap; use std::io::ErrorKind; use std::path::Path; use std::path::PathBuf; +use url::Url; +use crate::env::NodeResolverEnv; +use crate::errors::CanonicalizingPkgJsonDirError; +use crate::errors::ClosestPkgJsonError; use crate::errors::PackageJsonLoadError; -// use a thread local cache so that workers have their own distinct cache +// todo(dsherret): this isn't exactly correct and we should change it to instead +// be created per worker and passed down as a ctor arg to the pkg json resolver thread_local! { static CACHE: RefCell> = RefCell::new(HashMap::new()); } @@ -33,21 +39,91 @@ impl deno_package_json::PackageJsonCache for PackageJsonThreadLocalCache { } } -/// Helper to load a package.json file using the thread local cache -/// in node_resolver. -pub fn load_pkg_json( - fs: &dyn deno_package_json::fs::DenoPkgJsonFs, - path: &Path, -) -> Result, PackageJsonLoadError> { - let result = - PackageJson::load_from_path(path, fs, Some(&PackageJsonThreadLocalCache)); - match result { - Ok(pkg_json) => Ok(Some(pkg_json)), - Err(deno_package_json::PackageJsonLoadError::Io { source, .. }) - if source.kind() == ErrorKind::NotFound => - { +#[allow(clippy::disallowed_types)] +pub type PackageJsonResolverRc = + crate::sync::MaybeArc>; + +#[derive(Debug)] +pub struct PackageJsonResolver { + env: TEnv, +} + +impl PackageJsonResolver { + pub fn new(env: TEnv) -> Self { + Self { env } + } + + pub fn get_closest_package_json( + &self, + url: &Url, + ) -> Result, ClosestPkgJsonError> { + let Ok(file_path) = deno_path_util::url_to_file_path(url) else { + return Ok(None); + }; + self.get_closest_package_json_from_path(&file_path) + } + + pub fn get_closest_package_json_from_path( + &self, + file_path: &Path, + ) -> Result, ClosestPkgJsonError> { + // we use this for deno compile using byonm because the script paths + // won't be in virtual file system, but the package.json paths will be + fn canonicalize_first_ancestor_exists( + dir_path: &Path, + env: &TEnv, + ) -> Result, std::io::Error> { + for ancestor in dir_path.ancestors() { + match env.realpath_sync(ancestor) { + Ok(dir_path) => return Ok(Some(dir_path)), + Err(err) if err.kind() == std::io::ErrorKind::NotFound => { + // keep searching + } + Err(err) => return Err(err), + } + } Ok(None) } - Err(err) => Err(PackageJsonLoadError(err)), + + let parent_dir = file_path.parent().unwrap(); + let Some(start_dir) = canonicalize_first_ancestor_exists( + parent_dir, &self.env, + ) + .map_err(|source| CanonicalizingPkgJsonDirError { + dir_path: parent_dir.to_path_buf(), + source, + })? + else { + return Ok(None); + }; + let start_dir = strip_unc_prefix(start_dir); + for current_dir in start_dir.ancestors() { + let package_json_path = current_dir.join("package.json"); + if let Some(pkg_json) = self.load_package_json(&package_json_path)? { + return Ok(Some(pkg_json)); + } + } + + Ok(None) + } + + pub fn load_package_json( + &self, + path: &Path, + ) -> Result, PackageJsonLoadError> { + let result = PackageJson::load_from_path( + path, + self.env.pkg_json_fs(), + Some(&PackageJsonThreadLocalCache), + ); + match result { + Ok(pkg_json) => Ok(Some(pkg_json)), + Err(deno_package_json::PackageJsonLoadError::Io { source, .. }) + if source.kind() == ErrorKind::NotFound => + { + Ok(None) + } + Err(err) => Err(PackageJsonLoadError(err)), + } } } diff --git a/resolvers/node/resolution.rs b/resolvers/node/resolution.rs index 811583a5ee..d44539e978 100644 --- a/resolvers/node/resolution.rs +++ b/resolvers/node/resolution.rs @@ -6,9 +6,6 @@ use std::path::PathBuf; use anyhow::bail; use anyhow::Error as AnyError; -use deno_media_type::MediaType; -use deno_package_json::PackageJsonRc; -use deno_path_util::strip_unc_prefix; use deno_path_util::url_from_file_path; use serde_json::Map; use serde_json::Value; @@ -16,8 +13,6 @@ use url::Url; use crate::env::NodeResolverEnv; use crate::errors; -use crate::errors::CanonicalizingPkgJsonDirError; -use crate::errors::ClosestPkgJsonError; use crate::errors::DataUrlReferrerError; use crate::errors::FinalizeResolutionError; use crate::errors::InvalidModuleSpecifierError; @@ -32,7 +27,6 @@ use crate::errors::PackageExportsResolveError; use crate::errors::PackageImportNotDefinedError; use crate::errors::PackageImportsResolveError; use crate::errors::PackageImportsResolveErrorKind; -use crate::errors::PackageJsonLoadError; use crate::errors::PackagePathNotExportedError; use crate::errors::PackageResolveError; use crate::errors::PackageSubpathResolveError; @@ -42,14 +36,13 @@ use crate::errors::PackageTargetResolveError; use crate::errors::PackageTargetResolveErrorKind; use crate::errors::ResolveBinaryCommandsError; use crate::errors::ResolvePkgJsonBinExportError; -use crate::errors::ResolvePkgSubpathFromDenoModuleError; -use crate::errors::TypeScriptNotSupportedInNpmError; use crate::errors::TypesNotFoundError; use crate::errors::TypesNotFoundErrorData; use crate::errors::UnsupportedDirImportError; use crate::errors::UnsupportedEsmUrlSchemeError; -use crate::errors::UrlToNodeResolutionError; +use crate::npm::InNpmPackageCheckerRc; use crate::NpmResolverRc; +use crate::PackageJsonResolverRc; use crate::PathClean; use deno_package_json::PackageJson; @@ -73,16 +66,14 @@ impl NodeResolutionMode { #[derive(Debug)] pub enum NodeResolution { - Esm(Url), - CommonJs(Url), + Module(Url), BuiltIn(String), } impl NodeResolution { pub fn into_url(self) -> Url { match self { - Self::Esm(u) => u, - Self::CommonJs(u) => u, + Self::Module(u) => u, Self::BuiltIn(specifier) => { if specifier.starts_with("node:") { Url::parse(&specifier).unwrap() @@ -92,42 +83,6 @@ impl NodeResolution { } } } - - pub fn into_specifier_and_media_type( - resolution: Option, - ) -> (Url, MediaType) { - match resolution { - Some(NodeResolution::CommonJs(specifier)) => { - let media_type = MediaType::from_specifier(&specifier); - ( - specifier, - match media_type { - MediaType::JavaScript | MediaType::Jsx => MediaType::Cjs, - MediaType::TypeScript | MediaType::Tsx => MediaType::Cts, - MediaType::Dts => MediaType::Dcts, - _ => media_type, - }, - ) - } - Some(NodeResolution::Esm(specifier)) => { - let media_type = MediaType::from_specifier(&specifier); - ( - specifier, - match media_type { - MediaType::JavaScript | MediaType::Jsx => MediaType::Mjs, - MediaType::TypeScript | MediaType::Tsx => MediaType::Mts, - MediaType::Dts => MediaType::Dmts, - _ => media_type, - }, - ) - } - Some(resolution) => (resolution.into_url(), MediaType::Dts), - None => ( - Url::parse("internal:///missing_dependency.d.ts").unwrap(), - MediaType::Dts, - ), - } - } } #[allow(clippy::disallowed_types)] @@ -136,16 +91,28 @@ pub type NodeResolverRc = crate::sync::MaybeArc>; #[derive(Debug)] pub struct NodeResolver { env: TEnv, + in_npm_pkg_checker: InNpmPackageCheckerRc, npm_resolver: NpmResolverRc, + pkg_json_resolver: PackageJsonResolverRc, } impl NodeResolver { - pub fn new(env: TEnv, npm_resolver: NpmResolverRc) -> Self { - Self { env, npm_resolver } + pub fn new( + env: TEnv, + in_npm_pkg_checker: InNpmPackageCheckerRc, + npm_resolver: NpmResolverRc, + pkg_json_resolver: PackageJsonResolverRc, + ) -> Self { + Self { + env, + in_npm_pkg_checker, + npm_resolver, + pkg_json_resolver, + } } pub fn in_npm_package(&self, specifier: &Url) -> bool { - self.npm_resolver.in_npm_package(specifier) + self.in_npm_pkg_checker.in_npm_package(specifier) } /// This function is an implementation of `defaultResolve` in @@ -166,7 +133,7 @@ impl NodeResolver { if let Ok(url) = Url::parse(specifier) { if url.scheme() == "data" { - return Ok(NodeResolution::Esm(url)); + return Ok(NodeResolution::Module(url)); } if let Some(module_name) = @@ -191,7 +158,7 @@ impl NodeResolver { let url = referrer .join(specifier) .map_err(|source| DataUrlReferrerError { source })?; - return Ok(NodeResolution::Esm(url)); + return Ok(NodeResolution::Module(url)); } } @@ -212,7 +179,7 @@ impl NodeResolver { }; let url = self.finalize_resolution(url, Some(referrer))?; - let resolve_response = self.url_to_node_resolution(url)?; + let resolve_response = NodeResolution::Module(url); // TODO(bartlomieju): skipped checking errors for commonJS resolution and // "preserveSymlinksMain"/"preserveSymlinks" options. Ok(resolve_response) @@ -236,6 +203,7 @@ impl NodeResolver { })?) } else if specifier.starts_with('#') { let pkg_config = self + .pkg_json_resolver .get_closest_package_json(referrer) .map_err(PackageImportsResolveErrorKind::ClosestPkgJson) .map_err(|err| PackageImportsResolveError(Box::new(err)))?; @@ -332,7 +300,7 @@ impl NodeResolver { package_subpath: Option<&str>, maybe_referrer: Option<&Url>, mode: NodeResolutionMode, - ) -> Result { + ) -> Result { let node_module_kind = NodeModuleKind::Esm; let package_subpath = package_subpath .map(|s| format!("./{s}")) @@ -345,10 +313,9 @@ impl NodeResolver { DEFAULT_CONDITIONS, mode, )?; - let resolve_response = self.url_to_node_resolution(resolved_url)?; // TODO(bartlomieju): skipped checking errors for commonJS resolution and // "preserveSymlinksMain"/"preserveSymlinks" options. - Ok(resolve_response) + Ok(resolved_url) } pub fn resolve_binary_commands( @@ -356,7 +323,9 @@ impl NodeResolver { package_folder: &Path, ) -> Result, ResolveBinaryCommandsError> { let pkg_json_path = package_folder.join("package.json"); - let Some(package_json) = self.load_package_json(&pkg_json_path)? else { + let Some(package_json) = + self.pkg_json_resolver.load_package_json(&pkg_json_path)? + else { return Ok(Vec::new()); }; @@ -381,9 +350,11 @@ impl NodeResolver { &self, package_folder: &Path, sub_path: Option<&str>, - ) -> Result { + ) -> Result { let pkg_json_path = package_folder.join("package.json"); - let Some(package_json) = self.load_package_json(&pkg_json_path)? else { + let Some(package_json) = + self.pkg_json_resolver.load_package_json(&pkg_json_path)? + else { return Err(ResolvePkgJsonBinExportError::MissingPkgJson { pkg_json_path, }); @@ -396,37 +367,9 @@ impl NodeResolver { })?; let url = url_from_file_path(&package_folder.join(bin_entry)).unwrap(); - let resolve_response = self.url_to_node_resolution(url)?; // TODO(bartlomieju): skipped checking errors for commonJS resolution and // "preserveSymlinksMain"/"preserveSymlinks" options. - Ok(resolve_response) - } - - pub fn url_to_node_resolution( - &self, - url: Url, - ) -> Result { - let url_str = url.as_str().to_lowercase(); - if url_str.starts_with("http") || url_str.ends_with(".json") { - Ok(NodeResolution::Esm(url)) - } else if url_str.ends_with(".js") || url_str.ends_with(".d.ts") { - let maybe_package_config = self.get_closest_package_json(&url)?; - match maybe_package_config { - Some(c) if c.typ == "module" => Ok(NodeResolution::Esm(url)), - Some(_) => Ok(NodeResolution::CommonJs(url)), - None => Ok(NodeResolution::Esm(url)), - } - } else if url_str.ends_with(".mjs") || url_str.ends_with(".d.mts") { - Ok(NodeResolution::Esm(url)) - } else if url_str.ends_with(".ts") || url_str.ends_with(".mts") { - if self.in_npm_package(&url) { - Err(TypeScriptNotSupportedInNpmError { specifier: url }.into()) - } else { - Ok(NodeResolution::Esm(url)) - } - } else { - Ok(NodeResolution::CommonJs(url)) - } + Ok(url) } /// Checks if the resolved file has a corresponding declaration file. @@ -1101,7 +1044,9 @@ impl NodeResolver { let (package_name, package_subpath, _is_scoped) = parse_npm_pkg_name(specifier, referrer)?; - if let Some(package_config) = self.get_closest_package_json(referrer)? { + if let Some(package_config) = + self.pkg_json_resolver.get_closest_package_json(referrer)? + { // ResolveSelf if package_config.name.as_ref() == Some(&package_name) { if let Some(exports) = &package_config.exports { @@ -1216,7 +1161,10 @@ impl NodeResolver { mode: NodeResolutionMode, ) -> Result { let package_json_path = package_dir_path.join("package.json"); - match self.load_package_json(&package_json_path)? { + match self + .pkg_json_resolver + .load_package_json(&package_json_path)? + { Some(pkg_json) => self.resolve_package_subpath( &pkg_json, package_subpath, @@ -1337,70 +1285,6 @@ impl NodeResolver { } } - pub fn get_closest_package_json( - &self, - url: &Url, - ) -> Result, ClosestPkgJsonError> { - let Ok(file_path) = deno_path_util::url_to_file_path(url) else { - return Ok(None); - }; - self.get_closest_package_json_from_path(&file_path) - } - - pub fn get_closest_package_json_from_path( - &self, - file_path: &Path, - ) -> Result, ClosestPkgJsonError> { - // we use this for deno compile using byonm because the script paths - // won't be in virtual file system, but the package.json paths will be - fn canonicalize_first_ancestor_exists( - dir_path: &Path, - env: &dyn NodeResolverEnv, - ) -> Result, std::io::Error> { - for ancestor in dir_path.ancestors() { - match env.realpath_sync(ancestor) { - Ok(dir_path) => return Ok(Some(dir_path)), - Err(err) if err.kind() == std::io::ErrorKind::NotFound => { - // keep searching - } - Err(err) => return Err(err), - } - } - Ok(None) - } - - let parent_dir = file_path.parent().unwrap(); - let Some(start_dir) = canonicalize_first_ancestor_exists( - parent_dir, &self.env, - ) - .map_err(|source| CanonicalizingPkgJsonDirError { - dir_path: parent_dir.to_path_buf(), - source, - })? - else { - return Ok(None); - }; - let start_dir = strip_unc_prefix(start_dir); - for current_dir in start_dir.ancestors() { - let package_json_path = current_dir.join("package.json"); - if let Some(pkg_json) = self.load_package_json(&package_json_path)? { - return Ok(Some(pkg_json)); - } - } - - Ok(None) - } - - pub fn load_package_json( - &self, - package_json_path: &Path, - ) -> Result, PackageJsonLoadError> { - crate::package_json::load_pkg_json( - self.env.pkg_json_fs(), - package_json_path, - ) - } - pub(super) fn legacy_main_resolve( &self, package_json: &PackageJson, diff --git a/runtime/errors.rs b/runtime/errors.rs index a5c436e751..ada26ec35f 100644 --- a/runtime/errors.rs +++ b/runtime/errors.rs @@ -1048,8 +1048,6 @@ mod node { WorkerThreadsFilenameError::UrlToPathString => "Error", WorkerThreadsFilenameError::UrlToPath => "Error", WorkerThreadsFilenameError::FileNotFound(_) => "Error", - WorkerThreadsFilenameError::NeitherEsmNorCjs => "Error", - WorkerThreadsFilenameError::UrlToNodeResolution(_) => "Error", WorkerThreadsFilenameError::Fs(e) => super::get_fs_error(e), } } @@ -1058,11 +1056,13 @@ mod node { match error { RequireError::UrlParse(e) => get_url_parse_error_class(e), RequireError::Permission(e) => get_error_class_name(e).unwrap_or("Error"), - RequireError::PackageExportsResolve(_) => "Error", - RequireError::PackageJsonLoad(_) => "Error", - RequireError::ClosestPkgJson(_) => "Error", - RequireError::FilePathConversion(_) => "Error", - RequireError::PackageImportsResolve(_) => "Error", + RequireError::PackageExportsResolve(_) + | RequireError::PackageJsonLoad(_) + | RequireError::ClosestPkgJson(_) + | RequireError::FilePathConversion(_) + | RequireError::UrlConversion(_) + | RequireError::ReadModule(_) + | RequireError::PackageImportsResolve(_) => "Error", RequireError::Fs(e) | RequireError::UnableToGetCwd(e) => { super::get_fs_error(e) } diff --git a/runtime/shared.rs b/runtime/shared.rs index 02dfd18719..f7d76f67a7 100644 --- a/runtime/shared.rs +++ b/runtime/shared.rs @@ -98,6 +98,7 @@ pub fn maybe_transpile_source( imports_not_used_as_values: deno_ast::ImportsNotUsedAsValues::Remove, ..Default::default() }, + &deno_ast::TranspileModuleOptions::default(), &deno_ast::EmitOptions { source_map: if cfg!(debug_assertions) { SourceMapOption::Separate @@ -109,9 +110,9 @@ pub fn maybe_transpile_source( )? .into_source(); - let maybe_source_map: Option = - transpiled_source.source_map.map(|sm| sm.into()); - let source_text = String::from_utf8(transpiled_source.source)?; - + let maybe_source_map: Option = transpiled_source + .source_map + .map(|sm| sm.into_bytes().into()); + let source_text = transpiled_source.text; Ok((source_text.into(), maybe_source_map)) } diff --git a/tests/integration/run_tests.rs b/tests/integration/run_tests.rs index db9f79556e..a07dd56c83 100644 --- a/tests/integration/run_tests.rs +++ b/tests/integration/run_tests.rs @@ -3605,7 +3605,8 @@ fn running_declaration_files() { temp_dir.write(file, ""); context .new_command() - .args_vec(["run", file]) + // todo(dsherret): investigate why --allow-read is required here + .args_vec(["run", "--allow-read", file]) .run() .skip_output_check() .assert_exit_code(0); diff --git a/tests/registry/npm/@denotest/type-commonjs/1.0.0/index.js b/tests/registry/npm/@denotest/type-commonjs/1.0.0/index.js index cb0ff5c3b5..d3f80a0495 100644 --- a/tests/registry/npm/@denotest/type-commonjs/1.0.0/index.js +++ b/tests/registry/npm/@denotest/type-commonjs/1.0.0/index.js @@ -1 +1,5 @@ -export {}; +// this module is declared as CommonJS, but during loading we'll +// discover it's ESM and load it fine +export function add(a, b) { + return a + b; +} diff --git a/tests/specs/check/css_import/exists_run_with_check.out b/tests/specs/check/css_import/exists_run_with_check.out index 1a1dafeb74..315769e40c 100644 --- a/tests/specs/check/css_import/exists_run_with_check.out +++ b/tests/specs/check/css_import/exists_run_with_check.out @@ -1,3 +1,3 @@ -error: Expected a JavaScript or TypeScript module, but identified a Unknown module. Importing these types of modules is currently not supported. +error: Expected a JavaScript or TypeScript module, but identified a Css module. Importing these types of modules is currently not supported. Specifier: file:///[WILDLINE]/app.css at file:///[WILDLINE]/exists.ts:2:8 diff --git a/tests/specs/compile/cjs/__test__.jsonc b/tests/specs/compile/cjs/__test__.jsonc new file mode 100644 index 0000000000..9bdcf4724f --- /dev/null +++ b/tests/specs/compile/cjs/__test__.jsonc @@ -0,0 +1,24 @@ +{ + "tempDir": true, + "steps": [{ + "if": "unix", + "args": "compile --output main main.js", + "output": "[WILDCARD]" + }, { + "if": "unix", + "commandName": "./main", + "args": [], + "output": "output.out", + "exitCode": 0 + }, { + "if": "windows", + "args": "compile --output main.exe main.js", + "output": "[WILDCARD]" + }, { + "if": "windows", + "commandName": "./main.exe", + "args": [], + "output": "output.out", + "exitCode": 0 + }] +} diff --git a/tests/specs/compile/cjs/add.cjs b/tests/specs/compile/cjs/add.cjs new file mode 100644 index 0000000000..bf90601267 --- /dev/null +++ b/tests/specs/compile/cjs/add.cjs @@ -0,0 +1 @@ +module.exports = (a, b) => a + b; diff --git a/tests/specs/compile/cjs/divide.cts b/tests/specs/compile/cjs/divide.cts new file mode 100644 index 0000000000..d89a600a4e --- /dev/null +++ b/tests/specs/compile/cjs/divide.cts @@ -0,0 +1 @@ +module.exports.divide = (a: number, b: number) => a / b; diff --git a/tests/specs/compile/cjs/main.js b/tests/specs/compile/cjs/main.js new file mode 100644 index 0000000000..c2a8c7be02 --- /dev/null +++ b/tests/specs/compile/cjs/main.js @@ -0,0 +1,5 @@ +import { add } from "./reexport.cjs"; +import { multiply } from "./multiply.cts"; + +console.log(add(1, 2)); +console.log(multiply(2, 3)); diff --git a/tests/specs/compile/cjs/multiply.cts b/tests/specs/compile/cjs/multiply.cts new file mode 100644 index 0000000000..3c0618cfc5 --- /dev/null +++ b/tests/specs/compile/cjs/multiply.cts @@ -0,0 +1,4 @@ +/// +exports.multiply = function (a: number, b: number): number { + return require("./divide.cts").divide(a, 1 / b); +}; diff --git a/tests/specs/compile/cjs/output.out b/tests/specs/compile/cjs/output.out new file mode 100644 index 0000000000..2559e5c49e --- /dev/null +++ b/tests/specs/compile/cjs/output.out @@ -0,0 +1,2 @@ +3 +6 diff --git a/tests/specs/compile/cjs/reexport.cjs b/tests/specs/compile/cjs/reexport.cjs new file mode 100644 index 0000000000..af7cecfdf4 --- /dev/null +++ b/tests/specs/compile/cjs/reexport.cjs @@ -0,0 +1 @@ +module.exports.add = require("./add.cjs"); diff --git a/tests/specs/compile/detect_cjs/__test__.jsonc b/tests/specs/compile/detect_cjs/__test__.jsonc index 32bebb7a57..0abf121f05 100644 --- a/tests/specs/compile/detect_cjs/__test__.jsonc +++ b/tests/specs/compile/detect_cjs/__test__.jsonc @@ -1,24 +1,27 @@ { "tempDir": true, "steps": [{ + "args": "install", + "output": "[WILDCARD]" + }, { "if": "unix", - "args": "compile --allow-read --output main main.js", + "args": "compile --output main main.js", "output": "compile.out" }, { "if": "unix", "commandName": "./main", "args": [], "output": "output.out", - "exitCode": 1 + "exitCode": 0 }, { "if": "windows", - "args": "compile --allow-read --output main.exe main.js", + "args": "compile --output main.exe main.js", "output": "compile.out" }, { "if": "windows", "commandName": "./main.exe", "args": [], "output": "output.out", - "exitCode": 1 + "exitCode": 0 }] } diff --git a/tests/specs/compile/detect_cjs/add.js b/tests/specs/compile/detect_cjs/add.js index 2a886fbc18..94b0263f0e 100644 --- a/tests/specs/compile/detect_cjs/add.js +++ b/tests/specs/compile/detect_cjs/add.js @@ -1,3 +1,3 @@ module.exports.add = function (a, b) { - return a + b; + return require("./subtract.ts").subtract(a, -b); }; diff --git a/tests/specs/compile/detect_cjs/compile.out b/tests/specs/compile/detect_cjs/compile.out index 6509b7f29c..913e363c3e 100644 --- a/tests/specs/compile/detect_cjs/compile.out +++ b/tests/specs/compile/detect_cjs/compile.out @@ -1,3 +1,2 @@ -Warning --unstable-detect-cjs is not properly supported in deno compile. The compiled executable may encounter runtime errors. Check file:///[WILDLINE]/main.js Compile file:///[WILDLINE] diff --git a/tests/specs/compile/detect_cjs/output.out b/tests/specs/compile/detect_cjs/output.out index e1c27b8dcf..00750edc07 100644 --- a/tests/specs/compile/detect_cjs/output.out +++ b/tests/specs/compile/detect_cjs/output.out @@ -1,2 +1 @@ -error: Uncaught SyntaxError: The requested module './add.js' does not provide an export named 'add' - at (file:///[WILDLINE]) +3 diff --git a/tests/specs/compile/detect_cjs/package.json b/tests/specs/compile/detect_cjs/package.json index 5bbefffbab..6e65b32ed5 100644 --- a/tests/specs/compile/detect_cjs/package.json +++ b/tests/specs/compile/detect_cjs/package.json @@ -1,3 +1,6 @@ { - "type": "commonjs" + "type": "commonjs", + "dependencies": { + "@types/node": "*" + } } diff --git a/tests/specs/compile/detect_cjs/subtract.ts b/tests/specs/compile/detect_cjs/subtract.ts new file mode 100644 index 0000000000..e4f6760b77 --- /dev/null +++ b/tests/specs/compile/detect_cjs/subtract.ts @@ -0,0 +1,2 @@ +/// +module.exports.subtract = (a: number, b: number) => a - b; diff --git a/tests/specs/npm/require_type_commonjs/__test__.jsonc b/tests/specs/npm/require_type_commonjs/__test__.jsonc index c9ba97ff52..a71173ce37 100644 --- a/tests/specs/npm/require_type_commonjs/__test__.jsonc +++ b/tests/specs/npm/require_type_commonjs/__test__.jsonc @@ -1,5 +1,4 @@ { "args": "run --allow-read --quiet main.ts", - "output": "main.out", - "exitCode": 1 + "output": "main.out" } diff --git a/tests/specs/npm/require_type_commonjs/main.out b/tests/specs/npm/require_type_commonjs/main.out index d715db8a94..00750edc07 100644 --- a/tests/specs/npm/require_type_commonjs/main.out +++ b/tests/specs/npm/require_type_commonjs/main.out @@ -1,4 +1 @@ -error: 'import', and 'export' cannot be used outside of module code at file://[WILDCARD]/@denotest/type-commonjs/1.0.0/index.js:1:1 - - export {}; - ~~~~~~ +3 diff --git a/tests/specs/npm/require_type_commonjs/main.ts b/tests/specs/npm/require_type_commonjs/main.ts index 243eb216e4..95ec6099ec 100644 --- a/tests/specs/npm/require_type_commonjs/main.ts +++ b/tests/specs/npm/require_type_commonjs/main.ts @@ -1 +1,2 @@ -import "npm:@denotest/type-commonjs"; +import { add } from "npm:@denotest/type-commonjs"; +console.log(add(1, 2)); diff --git a/tests/specs/npm/typescript_file_in_package/__test__.jsonc b/tests/specs/npm/typescript_file_in_package/__test__.jsonc index 08979ed257..7b5c5e1b67 100644 --- a/tests/specs/npm/typescript_file_in_package/__test__.jsonc +++ b/tests/specs/npm/typescript_file_in_package/__test__.jsonc @@ -1,5 +1,5 @@ { - "args": "run typescript_file_in_package/main.ts", - "output": "typescript_file_in_package/main.out", + "args": "run main.ts", + "output": "main.out", "exitCode": 1 } diff --git a/tests/specs/npm/typescript_file_in_package/main.out b/tests/specs/npm/typescript_file_in_package/main.out new file mode 100644 index 0000000000..58290a8737 --- /dev/null +++ b/tests/specs/npm/typescript_file_in_package/main.out @@ -0,0 +1,3 @@ +Download http://localhost:4260/@denotest%2ftypescript-file +Download http://localhost:4260/@denotest/typescript-file/1.0.0.tgz +error: TypeScript files are not supported in npm packages: file:///[WILDCARD]/@denotest/typescript-file/1.0.0/index.ts diff --git a/tests/specs/npm/typescript_file_in_package/typescript_file_in_package/main.ts b/tests/specs/npm/typescript_file_in_package/main.ts similarity index 100% rename from tests/specs/npm/typescript_file_in_package/typescript_file_in_package/main.ts rename to tests/specs/npm/typescript_file_in_package/main.ts diff --git a/tests/specs/npm/typescript_file_in_package/typescript_file_in_package/main.out b/tests/specs/npm/typescript_file_in_package/typescript_file_in_package/main.out deleted file mode 100644 index b3faa87900..0000000000 --- a/tests/specs/npm/typescript_file_in_package/typescript_file_in_package/main.out +++ /dev/null @@ -1,6 +0,0 @@ -Download http://localhost:4260/@denotest%2ftypescript-file -Download http://localhost:4260/@denotest/typescript-file/1.0.0.tgz -error: Could not resolve 'npm:@denotest/typescript-file@1.0.0'. - -Caused by: - TypeScript files are not supported in npm packages: file:///[WILDCARD]/@denotest/typescript-file/1.0.0/index.ts diff --git a/tests/specs/run/cjs/main_module/__test__.jsonc b/tests/specs/run/cjs/main_module/__test__.jsonc new file mode 100644 index 0000000000..e756a63627 --- /dev/null +++ b/tests/specs/run/cjs/main_module/__test__.jsonc @@ -0,0 +1,4 @@ +{ + "args": "run --allow-read main.cjs", + "output": "main.out" +} diff --git a/tests/specs/run/cjs/main_module/main.cjs b/tests/specs/run/cjs/main_module/main.cjs new file mode 100644 index 0000000000..2a4c57ab1c --- /dev/null +++ b/tests/specs/run/cjs/main_module/main.cjs @@ -0,0 +1 @@ +console.log(require.main); diff --git a/tests/specs/run/cjs/main_module/main.out b/tests/specs/run/cjs/main_module/main.out new file mode 100644 index 0000000000..93b86d27e3 --- /dev/null +++ b/tests/specs/run/cjs/main_module/main.out @@ -0,0 +1,5 @@ +Module { + id: ".", + [WILDCARD] + filename: "[WILDCARD]main.cjs", +[WILDCARD] \ No newline at end of file diff --git a/tests/specs/run/cjs_reexport_non_analyzable/__test__.jsonc b/tests/specs/run/cjs/reexport_non_analyzable/__test__.jsonc similarity index 100% rename from tests/specs/run/cjs_reexport_non_analyzable/__test__.jsonc rename to tests/specs/run/cjs/reexport_non_analyzable/__test__.jsonc diff --git a/tests/specs/run/cjs_reexport_non_analyzable/deno.json b/tests/specs/run/cjs/reexport_non_analyzable/deno.json similarity index 100% rename from tests/specs/run/cjs_reexport_non_analyzable/deno.json rename to tests/specs/run/cjs/reexport_non_analyzable/deno.json diff --git a/tests/specs/run/cjs_reexport_non_analyzable/main.ts b/tests/specs/run/cjs/reexport_non_analyzable/main.ts similarity index 100% rename from tests/specs/run/cjs_reexport_non_analyzable/main.ts rename to tests/specs/run/cjs/reexport_non_analyzable/main.ts diff --git a/tests/specs/run/cjs_reexport_non_analyzable/node_modules/foo.cjs b/tests/specs/run/cjs/reexport_non_analyzable/node_modules/foo.cjs similarity index 100% rename from tests/specs/run/cjs_reexport_non_analyzable/node_modules/foo.cjs rename to tests/specs/run/cjs/reexport_non_analyzable/node_modules/foo.cjs diff --git a/tests/specs/run/cjs/unprepared/__test__.jsonc b/tests/specs/run/cjs/unprepared/__test__.jsonc new file mode 100644 index 0000000000..f816bad869 --- /dev/null +++ b/tests/specs/run/cjs/unprepared/__test__.jsonc @@ -0,0 +1,4 @@ +{ + "args": "run -A main.ts", + "output": "main.out" +} diff --git a/tests/specs/run/cjs/unprepared/file.cjs b/tests/specs/run/cjs/unprepared/file.cjs new file mode 100644 index 0000000000..ba0c6fa6ff --- /dev/null +++ b/tests/specs/run/cjs/unprepared/file.cjs @@ -0,0 +1,7 @@ +// non-analyzable +const moduleName = "./output.cjs"; +function getModuleName() { + return moduleName; +} + +require(getModuleName()); diff --git a/tests/specs/run/cjs/unprepared/main.out b/tests/specs/run/cjs/unprepared/main.out new file mode 100644 index 0000000000..e965047ad7 --- /dev/null +++ b/tests/specs/run/cjs/unprepared/main.out @@ -0,0 +1 @@ +Hello diff --git a/tests/specs/run/cjs/unprepared/main.ts b/tests/specs/run/cjs/unprepared/main.ts new file mode 100644 index 0000000000..5630b4fbc3 --- /dev/null +++ b/tests/specs/run/cjs/unprepared/main.ts @@ -0,0 +1,7 @@ +// non-analyzable +const moduleName = "./output.cjs"; +function getModuleName() { + return moduleName; +} + +await import(getModuleName()); diff --git a/tests/specs/run/cjs/unprepared/output.cjs b/tests/specs/run/cjs/unprepared/output.cjs new file mode 100644 index 0000000000..54ed3702f5 --- /dev/null +++ b/tests/specs/run/cjs/unprepared/output.cjs @@ -0,0 +1,3 @@ +console.log("Hello"); + +module.exports = 1; diff --git a/tests/specs/run/cts/cjs_import_cts/__test__.jsonc b/tests/specs/run/cts/cjs_import_cts/__test__.jsonc new file mode 100644 index 0000000000..2205183d07 --- /dev/null +++ b/tests/specs/run/cts/cjs_import_cts/__test__.jsonc @@ -0,0 +1,13 @@ +{ + "tests": { + "no_check": { + "args": "run --allow-read main.js", + "output": "main.out" + }, + "check": { + "args": "check main.js", + "output": "check.out", + "exitCode": 1 + } + } +} diff --git a/tests/specs/run/package_json_type/commonjs/add.js b/tests/specs/run/cts/cjs_import_cts/add.cts similarity index 100% rename from tests/specs/run/package_json_type/commonjs/add.js rename to tests/specs/run/cts/cjs_import_cts/add.cts diff --git a/tests/specs/run/cts/cjs_import_cts/check.out b/tests/specs/run/cts/cjs_import_cts/check.out new file mode 100644 index 0000000000..a27e8d7af7 --- /dev/null +++ b/tests/specs/run/cts/cjs_import_cts/check.out @@ -0,0 +1,17 @@ +Check file:///[WILDLINE]main.js +error: TS2580 [ERROR]: Cannot find name 'module'. +module.exports.add = function (a, b) { +~~~~~~ + at file:///[WILDLINE] + +TS7006 [ERROR]: Parameter 'a' implicitly has an 'any' type. +module.exports.add = function (a, b) { + ^ + at file:///[WILDLINE] + +TS7006 [ERROR]: Parameter 'b' implicitly has an 'any' type. +module.exports.add = function (a, b) { + ^ + at file:///[WILDLINE] + +Found 3 errors. diff --git a/tests/specs/run/cts/cjs_import_cts/main.js b/tests/specs/run/cts/cjs_import_cts/main.js new file mode 100644 index 0000000000..9546a0fea5 --- /dev/null +++ b/tests/specs/run/cts/cjs_import_cts/main.js @@ -0,0 +1,3 @@ +import { subtract } from "./subtract.cjs"; + +console.log(subtract(1, 2)); diff --git a/tests/specs/run/cts/cjs_import_cts/main.out b/tests/specs/run/cts/cjs_import_cts/main.out new file mode 100644 index 0000000000..3a2e3f4984 --- /dev/null +++ b/tests/specs/run/cts/cjs_import_cts/main.out @@ -0,0 +1 @@ +-1 diff --git a/tests/specs/run/cts/cjs_import_cts/subtract.cjs b/tests/specs/run/cts/cjs_import_cts/subtract.cjs new file mode 100644 index 0000000000..7dee54346f --- /dev/null +++ b/tests/specs/run/cts/cjs_import_cts/subtract.cjs @@ -0,0 +1,3 @@ +module.exports.subtract = function (a, b) { + return require("./add.cts").add(a, -b); +}; diff --git a/tests/specs/run/cts/import_export_equals/__test__.jsonc b/tests/specs/run/cts/import_export_equals/__test__.jsonc new file mode 100644 index 0000000000..6a5c6db420 --- /dev/null +++ b/tests/specs/run/cts/import_export_equals/__test__.jsonc @@ -0,0 +1,17 @@ +{ + "tests": { + "main": { + "args": "run --check --allow-read=. main.cts", + "output": "main.out" + }, + "mts": { + "args": "run --allow-read=. mod.mts", + "output": "mod.mts.out" + }, + "mts_check": { + "args": "check mod.mts", + "output": "mod.mts.check.out", + "exitCode": 1 + } + } +} diff --git a/tests/specs/run/cts/import_export_equals/add.cts b/tests/specs/run/cts/import_export_equals/add.cts new file mode 100644 index 0000000000..adf3503ac5 --- /dev/null +++ b/tests/specs/run/cts/import_export_equals/add.cts @@ -0,0 +1,3 @@ +export = function (a: number, b: number) { + return a + b; +}; diff --git a/tests/specs/run/cts/import_export_equals/main.cts b/tests/specs/run/cts/import_export_equals/main.cts new file mode 100644 index 0000000000..e5c45f92e0 --- /dev/null +++ b/tests/specs/run/cts/import_export_equals/main.cts @@ -0,0 +1,3 @@ +import add = require("./add.cts"); + +console.log(add(1, 2)); diff --git a/tests/specs/run/cts/import_export_equals/main.out b/tests/specs/run/cts/import_export_equals/main.out new file mode 100644 index 0000000000..e7a973a7d0 --- /dev/null +++ b/tests/specs/run/cts/import_export_equals/main.out @@ -0,0 +1,2 @@ +Check file:///[WILDLINE]/main.cts +3 diff --git a/tests/specs/run/cts/import_export_equals/mod.mts b/tests/specs/run/cts/import_export_equals/mod.mts new file mode 100644 index 0000000000..5fbbd6c6a0 --- /dev/null +++ b/tests/specs/run/cts/import_export_equals/mod.mts @@ -0,0 +1,3 @@ +import add from "./add.cts"; + +console.log(add(1, "test")); diff --git a/tests/specs/run/cts/import_export_equals/mod.mts.check.out b/tests/specs/run/cts/import_export_equals/mod.mts.check.out new file mode 100644 index 0000000000..8703539019 --- /dev/null +++ b/tests/specs/run/cts/import_export_equals/mod.mts.check.out @@ -0,0 +1,5 @@ +Check file:///[WILDLINE]/mod.mts +error: TS2345 [ERROR]: Argument of type 'string' is not assignable to parameter of type 'number'. +console.log(add(1, "test")); + ~~~~~~ + at file:///[WILDLINE]/mod.mts:3:20 diff --git a/tests/specs/run/cts/import_export_equals/mod.mts.out b/tests/specs/run/cts/import_export_equals/mod.mts.out new file mode 100644 index 0000000000..208465a080 --- /dev/null +++ b/tests/specs/run/cts/import_export_equals/mod.mts.out @@ -0,0 +1 @@ +1test diff --git a/tests/specs/run/cts/main/__test__.jsonc b/tests/specs/run/cts/main/__test__.jsonc new file mode 100644 index 0000000000..0157b44e96 --- /dev/null +++ b/tests/specs/run/cts/main/__test__.jsonc @@ -0,0 +1,4 @@ +{ + "args": "run --allow-read main.cts", + "output": "main.out" +} diff --git a/tests/specs/run/cts/main/import_main.cjs b/tests/specs/run/cts/main/import_main.cjs new file mode 100644 index 0000000000..f61e0da2a6 --- /dev/null +++ b/tests/specs/run/cts/main/import_main.cjs @@ -0,0 +1 @@ +require("./main.cts").sayHello(); diff --git a/tests/specs/run/cts/main/main.cts b/tests/specs/run/cts/main/main.cts new file mode 100644 index 0000000000..0b0330686b --- /dev/null +++ b/tests/specs/run/cts/main/main.cts @@ -0,0 +1,5 @@ +module.exports.sayHello = function () { + console.log("Hello"); +}; + +require("./import_main.cjs"); diff --git a/tests/specs/run/cts/main/main.out b/tests/specs/run/cts/main/main.out new file mode 100644 index 0000000000..e965047ad7 --- /dev/null +++ b/tests/specs/run/cts/main/main.out @@ -0,0 +1 @@ +Hello diff --git a/tests/specs/run/package_json_type/commonjs/__test__.jsonc b/tests/specs/run/package_json_type/commonjs/basic/__test__.jsonc similarity index 100% rename from tests/specs/run/package_json_type/commonjs/__test__.jsonc rename to tests/specs/run/package_json_type/commonjs/basic/__test__.jsonc diff --git a/tests/specs/run/package_json_type/commonjs/basic/add.js b/tests/specs/run/package_json_type/commonjs/basic/add.js new file mode 100644 index 0000000000..2a886fbc18 --- /dev/null +++ b/tests/specs/run/package_json_type/commonjs/basic/add.js @@ -0,0 +1,3 @@ +module.exports.add = function (a, b) { + return a + b; +}; diff --git a/tests/specs/run/package_json_type/commonjs/deno.jsonc b/tests/specs/run/package_json_type/commonjs/basic/deno.jsonc similarity index 100% rename from tests/specs/run/package_json_type/commonjs/deno.jsonc rename to tests/specs/run/package_json_type/commonjs/basic/deno.jsonc diff --git a/tests/specs/run/package_json_type/commonjs/import_import_meta.js b/tests/specs/run/package_json_type/commonjs/basic/import_import_meta.js similarity index 100% rename from tests/specs/run/package_json_type/commonjs/import_import_meta.js rename to tests/specs/run/package_json_type/commonjs/basic/import_import_meta.js diff --git a/tests/specs/run/package_json_type/commonjs/import_meta.js b/tests/specs/run/package_json_type/commonjs/basic/import_meta.js similarity index 100% rename from tests/specs/run/package_json_type/commonjs/import_meta.js rename to tests/specs/run/package_json_type/commonjs/basic/import_meta.js diff --git a/tests/specs/run/package_json_type/commonjs/main_cjs.js b/tests/specs/run/package_json_type/commonjs/basic/main_cjs.js similarity index 100% rename from tests/specs/run/package_json_type/commonjs/main_cjs.js rename to tests/specs/run/package_json_type/commonjs/basic/main_cjs.js diff --git a/tests/specs/run/package_json_type/commonjs/main_esm.js b/tests/specs/run/package_json_type/commonjs/basic/main_esm.js similarity index 100% rename from tests/specs/run/package_json_type/commonjs/main_esm.js rename to tests/specs/run/package_json_type/commonjs/basic/main_esm.js diff --git a/tests/specs/run/package_json_type/commonjs/main_esm_import_meta.js b/tests/specs/run/package_json_type/commonjs/basic/main_esm_import_meta.js similarity index 100% rename from tests/specs/run/package_json_type/commonjs/main_esm_import_meta.js rename to tests/specs/run/package_json_type/commonjs/basic/main_esm_import_meta.js diff --git a/tests/specs/run/package_json_type/commonjs/main_esm_import_meta.out b/tests/specs/run/package_json_type/commonjs/basic/main_esm_import_meta.out similarity index 100% rename from tests/specs/run/package_json_type/commonjs/main_esm_import_meta.out rename to tests/specs/run/package_json_type/commonjs/basic/main_esm_import_meta.out diff --git a/tests/specs/run/package_json_type/commonjs/main_mix.js b/tests/specs/run/package_json_type/commonjs/basic/main_mix.js similarity index 100% rename from tests/specs/run/package_json_type/commonjs/main_mix.js rename to tests/specs/run/package_json_type/commonjs/basic/main_mix.js diff --git a/tests/specs/run/package_json_type/commonjs/main_mix.out b/tests/specs/run/package_json_type/commonjs/basic/main_mix.out similarity index 100% rename from tests/specs/run/package_json_type/commonjs/main_mix.out rename to tests/specs/run/package_json_type/commonjs/basic/main_mix.out diff --git a/tests/specs/run/package_json_type/commonjs/not_import_meta.js b/tests/specs/run/package_json_type/commonjs/basic/not_import_meta.js similarity index 100% rename from tests/specs/run/package_json_type/commonjs/not_import_meta.js rename to tests/specs/run/package_json_type/commonjs/basic/not_import_meta.js diff --git a/tests/specs/run/package_json_type/commonjs/package.json b/tests/specs/run/package_json_type/commonjs/basic/package.json similarity index 100% rename from tests/specs/run/package_json_type/commonjs/package.json rename to tests/specs/run/package_json_type/commonjs/basic/package.json diff --git a/tests/specs/run/package_json_type/commonjs/tla.js b/tests/specs/run/package_json_type/commonjs/basic/tla.js similarity index 100% rename from tests/specs/run/package_json_type/commonjs/tla.js rename to tests/specs/run/package_json_type/commonjs/basic/tla.js diff --git a/tests/specs/run/package_json_type/commonjs/jsx/__test__.jsonc b/tests/specs/run/package_json_type/commonjs/jsx/__test__.jsonc new file mode 100644 index 0000000000..f815fd72cb --- /dev/null +++ b/tests/specs/run/package_json_type/commonjs/jsx/__test__.jsonc @@ -0,0 +1,5 @@ +{ + "tempDir": true, + "args": "run -A --quiet main.jsx", + "output": "main.out" +} diff --git a/tests/specs/run/package_json_type/commonjs/jsx/add.js b/tests/specs/run/package_json_type/commonjs/jsx/add.js new file mode 100644 index 0000000000..2a886fbc18 --- /dev/null +++ b/tests/specs/run/package_json_type/commonjs/jsx/add.js @@ -0,0 +1,3 @@ +module.exports.add = function (a, b) { + return a + b; +}; diff --git a/tests/specs/run/package_json_type/commonjs/jsx/deno.jsonc b/tests/specs/run/package_json_type/commonjs/jsx/deno.jsonc new file mode 100644 index 0000000000..192ddb98c4 --- /dev/null +++ b/tests/specs/run/package_json_type/commonjs/jsx/deno.jsonc @@ -0,0 +1,10 @@ +{ + "nodeModulesDir": "auto", + "compilerOptions": { + "jsx": "react-jsx", + "jsxImportSource": "react" + }, + "unstable": [ + "detect-cjs" + ] +} diff --git a/tests/specs/run/package_json_type/commonjs/jsx/main.jsx b/tests/specs/run/package_json_type/commonjs/jsx/main.jsx new file mode 100644 index 0000000000..1922fce1bb --- /dev/null +++ b/tests/specs/run/package_json_type/commonjs/jsx/main.jsx @@ -0,0 +1,7 @@ +const { add } = require("./add.js"); + +console.log(add(1, 2)); + +console.log(

!= null); + +require("./tsx.tsx"); diff --git a/tests/specs/run/package_json_type/commonjs/jsx/main.out b/tests/specs/run/package_json_type/commonjs/jsx/main.out new file mode 100644 index 0000000000..cdcf045418 --- /dev/null +++ b/tests/specs/run/package_json_type/commonjs/jsx/main.out @@ -0,0 +1,4 @@ +3 +true +4 +true diff --git a/tests/specs/run/package_json_type/commonjs/jsx/package.json b/tests/specs/run/package_json_type/commonjs/jsx/package.json new file mode 100644 index 0000000000..88afcdcd95 --- /dev/null +++ b/tests/specs/run/package_json_type/commonjs/jsx/package.json @@ -0,0 +1,7 @@ +{ + "type": "commonjs", + "dependencies": { + "@types/react": "*", + "react": "*" + } +} diff --git a/tests/specs/run/package_json_type/commonjs/jsx/tsx.tsx b/tests/specs/run/package_json_type/commonjs/jsx/tsx.tsx new file mode 100644 index 0000000000..ad8f0c0a9c --- /dev/null +++ b/tests/specs/run/package_json_type/commonjs/jsx/tsx.tsx @@ -0,0 +1,5 @@ +import mod = require("./add.js"); + +console.log(mod.add(2, 2)); + +console.log(
!= null); diff --git a/tests/specs/run/remote_cjs_main/output.out b/tests/specs/run/remote_cjs_main/output.out index f75c33907a..360934acb7 100644 --- a/tests/specs/run/remote_cjs_main/output.out +++ b/tests/specs/run/remote_cjs_main/output.out @@ -1,3 +1,3 @@ Download http://localhost:4545/run/add.cjs -error: Expected a JavaScript or TypeScript module, but identified a Cjs module. Importing these types of modules is currently not supported. +error: Remote CJS modules are not supported. Specifier: http://localhost:4545/run/add.cjs diff --git a/tests/specs/run/require_esm/main.out b/tests/specs/run/require_esm/main.out index d17b1ead55..57b842b345 100644 --- a/tests/specs/run/require_esm/main.out +++ b/tests/specs/run/require_esm/main.out @@ -1,6 +1,6 @@ [Module: null prototype] { sync_js: 1 } [Module: null prototype] { sync_mjs: 1 } -error: Uncaught Error: Top-level await is not allowed in synchronous evaluation +error: Uncaught (in promise) Error: Top-level await is not allowed in synchronous evaluation at loadESMFromCJS (node:module:[WILDCARD]) at Module._compile (node:module:[WILDCARD]) at Object.Module._extensions..js (node:module:[WILDCARD]) diff --git a/tests/testdata/npm/deno_run_cowsay_no_permissions.out b/tests/testdata/npm/deno_run_cowsay_no_permissions.out index 6434620e2e..25b79d9a7d 100644 --- a/tests/testdata/npm/deno_run_cowsay_no_permissions.out +++ b/tests/testdata/npm/deno_run_cowsay_no_permissions.out @@ -1,2 +1,2 @@ -error: Uncaught NotCapable: Requires read access to , specify the required permissions during compilation using `deno compile --allow-read` +error: Uncaught (in promise) NotCapable: Requires read access to , specify the required permissions during compilation using `deno compile --allow-read` [WILDCARD]