2024-01-01 14:58:21 -05:00
|
|
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
2023-04-21 16:38:10 -04:00
|
|
|
|
2024-08-26 11:13:39 -04:00
|
|
|
use std::borrow::Cow;
|
2024-06-18 17:24:18 -04:00
|
|
|
use std::collections::BTreeSet;
|
2023-04-21 16:38:10 -04:00
|
|
|
use std::collections::HashSet;
|
|
|
|
use std::path::Path;
|
|
|
|
use std::path::PathBuf;
|
|
|
|
|
2024-09-30 09:33:32 -04:00
|
|
|
use deno_path_util::url_from_file_path;
|
|
|
|
use deno_path_util::url_to_file_path;
|
2024-07-25 19:08:14 -04:00
|
|
|
use futures::future::LocalBoxFuture;
|
|
|
|
use futures::stream::FuturesUnordered;
|
|
|
|
use futures::FutureExt;
|
|
|
|
use futures::StreamExt;
|
2023-04-21 16:38:10 -04:00
|
|
|
use once_cell::sync::Lazy;
|
|
|
|
|
2024-07-25 19:08:14 -04:00
|
|
|
use anyhow::Context;
|
|
|
|
use anyhow::Error as AnyError;
|
|
|
|
use url::Url;
|
2023-04-21 16:38:10 -04:00
|
|
|
|
2024-07-25 19:08:14 -04:00
|
|
|
use crate::env::NodeResolverEnv;
|
2024-06-26 17:24:10 -04:00
|
|
|
use crate::package_json::load_pkg_json;
|
2023-05-08 11:02:02 -04:00
|
|
|
use crate::resolution::NodeResolverRc;
|
2023-04-21 16:38:10 -04:00
|
|
|
use crate::NodeModuleKind;
|
|
|
|
use crate::NodeResolutionMode;
|
2023-05-08 11:02:02 -04:00
|
|
|
use crate::NpmResolverRc;
|
2023-04-21 16:38:10 -04:00
|
|
|
use crate::PathClean;
|
|
|
|
|
|
|
|
#[derive(Debug, Clone)]
|
2024-03-21 14:35:51 -04:00
|
|
|
pub enum CjsAnalysis {
|
|
|
|
/// File was found to be an ES module and the translator should
|
|
|
|
/// load the code as ESM.
|
|
|
|
Esm(String),
|
|
|
|
Cjs(CjsAnalysisExports),
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, Clone)]
|
|
|
|
pub struct CjsAnalysisExports {
|
2023-04-21 16:38:10 -04:00
|
|
|
pub exports: Vec<String>,
|
|
|
|
pub reexports: Vec<String>,
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Code analyzer for CJS and ESM files.
|
2024-05-18 11:42:03 -04:00
|
|
|
#[async_trait::async_trait(?Send)]
|
2023-07-19 04:30:04 -04:00
|
|
|
pub trait CjsCodeAnalyzer {
|
2023-04-21 16:38:10 -04:00
|
|
|
/// Analyzes CommonJs code for exports and reexports, which is
|
|
|
|
/// then used to determine the wrapper ESM module exports.
|
2023-07-24 15:35:13 -04:00
|
|
|
///
|
|
|
|
/// Note that the source is provided by the caller when the caller
|
|
|
|
/// already has it. If the source is needed by the implementation,
|
|
|
|
/// then it can use the provided source, or otherwise load it if
|
|
|
|
/// necessary.
|
2024-05-18 11:42:03 -04:00
|
|
|
async fn analyze_cjs(
|
2023-04-21 16:38:10 -04:00
|
|
|
&self,
|
2024-07-25 19:08:14 -04:00
|
|
|
specifier: &Url,
|
2024-03-21 14:35:51 -04:00
|
|
|
maybe_source: Option<String>,
|
2023-04-21 16:38:10 -04:00
|
|
|
) -> Result<CjsAnalysis, AnyError>;
|
|
|
|
}
|
|
|
|
|
2024-07-25 19:08:14 -04:00
|
|
|
pub struct NodeCodeTranslator<
|
|
|
|
TCjsCodeAnalyzer: CjsCodeAnalyzer,
|
|
|
|
TNodeResolverEnv: NodeResolverEnv,
|
|
|
|
> {
|
2023-07-19 04:30:04 -04:00
|
|
|
cjs_code_analyzer: TCjsCodeAnalyzer,
|
2024-07-25 19:08:14 -04:00
|
|
|
env: TNodeResolverEnv,
|
|
|
|
node_resolver: NodeResolverRc<TNodeResolverEnv>,
|
2023-05-08 11:02:02 -04:00
|
|
|
npm_resolver: NpmResolverRc,
|
2023-04-21 16:38:10 -04:00
|
|
|
}
|
|
|
|
|
2024-07-25 19:08:14 -04:00
|
|
|
impl<TCjsCodeAnalyzer: CjsCodeAnalyzer, TNodeResolverEnv: NodeResolverEnv>
|
|
|
|
NodeCodeTranslator<TCjsCodeAnalyzer, TNodeResolverEnv>
|
|
|
|
{
|
2023-04-21 16:38:10 -04:00
|
|
|
pub fn new(
|
2023-07-19 04:30:04 -04:00
|
|
|
cjs_code_analyzer: TCjsCodeAnalyzer,
|
2024-07-25 19:08:14 -04:00
|
|
|
env: TNodeResolverEnv,
|
|
|
|
node_resolver: NodeResolverRc<TNodeResolverEnv>,
|
2023-05-08 11:02:02 -04:00
|
|
|
npm_resolver: NpmResolverRc,
|
2023-04-21 16:38:10 -04:00
|
|
|
) -> Self {
|
|
|
|
Self {
|
2023-07-19 04:30:04 -04:00
|
|
|
cjs_code_analyzer,
|
2024-07-25 19:08:14 -04:00
|
|
|
env,
|
2023-04-24 19:44:35 -04:00
|
|
|
node_resolver,
|
2023-04-21 16:38:10 -04:00
|
|
|
npm_resolver,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Translates given CJS module into ESM. This function will perform static
|
|
|
|
/// analysis on the file to find defined exports and reexports.
|
|
|
|
///
|
|
|
|
/// For all discovered reexports the analysis will be performed recursively.
|
|
|
|
///
|
|
|
|
/// If successful a source code for equivalent ES module is returned.
|
2024-05-18 11:42:03 -04:00
|
|
|
pub async fn translate_cjs_to_esm(
|
2023-04-21 16:38:10 -04:00
|
|
|
&self,
|
2024-07-25 19:08:14 -04:00
|
|
|
entry_specifier: &Url,
|
2024-03-21 14:35:51 -04:00
|
|
|
source: Option<String>,
|
2023-04-21 16:38:10 -04:00
|
|
|
) -> Result<String, AnyError> {
|
|
|
|
let mut temp_var_count = 0;
|
|
|
|
|
2024-05-18 11:42:03 -04:00
|
|
|
let analysis = self
|
|
|
|
.cjs_code_analyzer
|
2024-05-21 10:35:51 -04:00
|
|
|
.analyze_cjs(entry_specifier, source)
|
2024-05-18 11:42:03 -04:00
|
|
|
.await?;
|
2023-04-21 16:38:10 -04:00
|
|
|
|
2024-03-21 14:35:51 -04:00
|
|
|
let analysis = match analysis {
|
|
|
|
CjsAnalysis::Esm(source) => return Ok(source),
|
|
|
|
CjsAnalysis::Cjs(analysis) => analysis,
|
|
|
|
};
|
|
|
|
|
2023-04-21 16:38:10 -04:00
|
|
|
let mut source = vec![
|
|
|
|
r#"import {createRequire as __internalCreateRequire} from "node:module";
|
|
|
|
const require = __internalCreateRequire(import.meta.url);"#
|
|
|
|
.to_string(),
|
|
|
|
];
|
|
|
|
|
2024-06-18 17:24:18 -04:00
|
|
|
// use a BTreeSet to make the output deterministic for v8's code cache
|
|
|
|
let mut all_exports = analysis.exports.into_iter().collect::<BTreeSet<_>>();
|
2024-05-10 09:55:20 -04:00
|
|
|
|
2024-05-21 10:35:51 -04:00
|
|
|
if !analysis.reexports.is_empty() {
|
|
|
|
let mut errors = Vec::new();
|
|
|
|
self
|
|
|
|
.analyze_reexports(
|
|
|
|
entry_specifier,
|
|
|
|
analysis.reexports,
|
|
|
|
&mut all_exports,
|
|
|
|
&mut errors,
|
|
|
|
)
|
|
|
|
.await;
|
2023-07-24 15:35:13 -04:00
|
|
|
|
2024-05-21 10:35:51 -04:00
|
|
|
// surface errors afterwards in a deterministic way
|
|
|
|
if !errors.is_empty() {
|
|
|
|
errors.sort_by_cached_key(|e| e.to_string());
|
|
|
|
return Err(errors.remove(0));
|
2023-04-21 16:38:10 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
source.push(format!(
|
|
|
|
"const mod = require(\"{}\");",
|
2024-09-30 09:33:32 -04:00
|
|
|
url_to_file_path(entry_specifier)
|
2023-04-21 16:38:10 -04:00
|
|
|
.unwrap()
|
|
|
|
.to_str()
|
|
|
|
.unwrap()
|
|
|
|
.replace('\\', "\\\\")
|
|
|
|
.replace('\'', "\\\'")
|
|
|
|
.replace('\"', "\\\"")
|
|
|
|
));
|
|
|
|
|
|
|
|
for export in &all_exports {
|
|
|
|
if export.as_str() != "default" {
|
|
|
|
add_export(
|
|
|
|
&mut source,
|
|
|
|
export,
|
2023-07-03 14:41:09 -04:00
|
|
|
&format!("mod[\"{}\"]", escape_for_double_quote_string(export)),
|
2023-04-21 16:38:10 -04:00
|
|
|
&mut temp_var_count,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
source.push("export default mod;".to_string());
|
|
|
|
|
|
|
|
let translated_source = source.join("\n");
|
|
|
|
Ok(translated_source)
|
|
|
|
}
|
|
|
|
|
2024-05-21 10:35:51 -04:00
|
|
|
async fn analyze_reexports<'a>(
|
|
|
|
&'a self,
|
|
|
|
entry_specifier: &url::Url,
|
|
|
|
reexports: Vec<String>,
|
2024-06-18 17:24:18 -04:00
|
|
|
all_exports: &mut BTreeSet<String>,
|
2024-05-21 10:35:51 -04:00
|
|
|
// this goes through the modules concurrently, so collect
|
|
|
|
// the errors in order to be deterministic
|
|
|
|
errors: &mut Vec<anyhow::Error>,
|
|
|
|
) {
|
|
|
|
struct Analysis {
|
|
|
|
reexport_specifier: url::Url,
|
|
|
|
referrer: url::Url,
|
|
|
|
analysis: CjsAnalysis,
|
|
|
|
}
|
|
|
|
|
|
|
|
type AnalysisFuture<'a> = LocalBoxFuture<'a, Result<Analysis, AnyError>>;
|
|
|
|
|
2024-07-25 19:08:14 -04:00
|
|
|
let mut handled_reexports: HashSet<Url> = HashSet::default();
|
2024-05-21 10:35:51 -04:00
|
|
|
handled_reexports.insert(entry_specifier.clone());
|
|
|
|
let mut analyze_futures: FuturesUnordered<AnalysisFuture<'a>> =
|
|
|
|
FuturesUnordered::new();
|
|
|
|
let cjs_code_analyzer = &self.cjs_code_analyzer;
|
|
|
|
let mut handle_reexports =
|
|
|
|
|referrer: url::Url,
|
|
|
|
reexports: Vec<String>,
|
|
|
|
analyze_futures: &mut FuturesUnordered<AnalysisFuture<'a>>,
|
|
|
|
errors: &mut Vec<anyhow::Error>| {
|
|
|
|
// 1. Resolve the re-exports and start a future to analyze each one
|
|
|
|
for reexport in reexports {
|
|
|
|
let result = self.resolve(
|
|
|
|
&reexport,
|
|
|
|
&referrer,
|
|
|
|
// FIXME(bartlomieju): check if these conditions are okay, probably
|
|
|
|
// should be `deno-require`, because `deno` is already used in `esm_resolver.rs`
|
2024-09-21 19:10:38 -04:00
|
|
|
&["deno", "node", "require", "default"],
|
2024-05-21 10:35:51 -04:00
|
|
|
NodeResolutionMode::Execution,
|
|
|
|
);
|
|
|
|
let reexport_specifier = match result {
|
2024-09-19 21:37:36 -04:00
|
|
|
Ok(Some(specifier)) => specifier,
|
|
|
|
Ok(None) => continue,
|
2024-05-21 10:35:51 -04:00
|
|
|
Err(err) => {
|
|
|
|
errors.push(err);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
if !handled_reexports.insert(reexport_specifier.clone()) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
let referrer = referrer.clone();
|
|
|
|
let future = async move {
|
|
|
|
let analysis = cjs_code_analyzer
|
|
|
|
.analyze_cjs(&reexport_specifier, None)
|
|
|
|
.await
|
|
|
|
.with_context(|| {
|
|
|
|
format!(
|
|
|
|
"Could not load '{}' ({}) referenced from {}",
|
|
|
|
reexport, reexport_specifier, referrer
|
|
|
|
)
|
|
|
|
})?;
|
|
|
|
|
|
|
|
Ok(Analysis {
|
|
|
|
reexport_specifier,
|
|
|
|
referrer,
|
|
|
|
analysis,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
.boxed_local();
|
|
|
|
analyze_futures.push(future);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
handle_reexports(
|
|
|
|
entry_specifier.clone(),
|
|
|
|
reexports,
|
|
|
|
&mut analyze_futures,
|
|
|
|
errors,
|
|
|
|
);
|
|
|
|
|
|
|
|
while let Some(analysis_result) = analyze_futures.next().await {
|
|
|
|
// 2. Look at the analysis result and resolve its exports and re-exports
|
|
|
|
let Analysis {
|
|
|
|
reexport_specifier,
|
|
|
|
referrer,
|
|
|
|
analysis,
|
|
|
|
} = match analysis_result {
|
|
|
|
Ok(analysis) => analysis,
|
|
|
|
Err(err) => {
|
|
|
|
errors.push(err);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
match analysis {
|
|
|
|
CjsAnalysis::Esm(_) => {
|
|
|
|
// todo(dsherret): support this once supporting requiring ES modules
|
|
|
|
errors.push(anyhow::anyhow!(
|
|
|
|
"Cannot require ES module '{}' from '{}'",
|
|
|
|
reexport_specifier,
|
|
|
|
referrer,
|
|
|
|
));
|
|
|
|
}
|
|
|
|
CjsAnalysis::Cjs(analysis) => {
|
|
|
|
if !analysis.reexports.is_empty() {
|
|
|
|
handle_reexports(
|
|
|
|
reexport_specifier.clone(),
|
|
|
|
analysis.reexports,
|
|
|
|
&mut analyze_futures,
|
|
|
|
errors,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
all_exports.extend(
|
|
|
|
analysis
|
|
|
|
.exports
|
|
|
|
.into_iter()
|
|
|
|
.filter(|e| e.as_str() != "default"),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-07-09 12:15:03 -04:00
|
|
|
// todo(dsherret): what is going on here? Isn't this a bunch of duplicate code?
|
2023-04-24 19:44:35 -04:00
|
|
|
fn resolve(
|
2023-04-21 16:38:10 -04:00
|
|
|
&self,
|
|
|
|
specifier: &str,
|
2024-07-25 19:08:14 -04:00
|
|
|
referrer: &Url,
|
2023-04-21 16:38:10 -04:00
|
|
|
conditions: &[&str],
|
|
|
|
mode: NodeResolutionMode,
|
2024-09-19 21:37:36 -04:00
|
|
|
) -> Result<Option<Url>, AnyError> {
|
2023-04-21 16:38:10 -04:00
|
|
|
if specifier.starts_with('/') {
|
|
|
|
todo!();
|
|
|
|
}
|
|
|
|
|
2024-09-30 09:33:32 -04:00
|
|
|
let referrer_path = url_to_file_path(referrer).unwrap();
|
2023-04-21 16:38:10 -04:00
|
|
|
if specifier.starts_with("./") || specifier.starts_with("../") {
|
|
|
|
if let Some(parent) = referrer_path.parent() {
|
2024-09-30 09:33:32 -04:00
|
|
|
return self
|
|
|
|
.file_extension_probe(parent.join(specifier), &referrer_path)
|
|
|
|
.and_then(|p| url_from_file_path(&p).map_err(AnyError::from))
|
|
|
|
.map(Some);
|
2023-04-21 16:38:10 -04:00
|
|
|
} else {
|
|
|
|
todo!();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// We've got a bare specifier or maybe bare_specifier/blah.js"
|
|
|
|
let (package_specifier, package_subpath) =
|
|
|
|
parse_specifier(specifier).unwrap();
|
|
|
|
|
2024-09-19 21:37:36 -04:00
|
|
|
let module_dir = match self
|
|
|
|
.npm_resolver
|
|
|
|
.resolve_package_folder_from_package(package_specifier.as_str(), referrer)
|
|
|
|
{
|
|
|
|
Err(err)
|
|
|
|
if matches!(
|
|
|
|
err.as_kind(),
|
|
|
|
crate::errors::PackageFolderResolveErrorKind::PackageNotFound(..)
|
|
|
|
) =>
|
|
|
|
{
|
|
|
|
return Ok(None);
|
|
|
|
}
|
|
|
|
other => other,
|
|
|
|
}?;
|
2023-04-21 16:38:10 -04:00
|
|
|
|
|
|
|
let package_json_path = module_dir.join("package.json");
|
2024-07-25 19:08:14 -04:00
|
|
|
let maybe_package_json =
|
|
|
|
load_pkg_json(self.env.pkg_json_fs(), &package_json_path)?;
|
2024-06-26 17:24:10 -04:00
|
|
|
if let Some(package_json) = maybe_package_json {
|
2023-04-21 16:38:10 -04:00
|
|
|
if let Some(exports) = &package_json.exports {
|
2024-09-19 21:37:36 -04:00
|
|
|
return Some(
|
|
|
|
self
|
|
|
|
.node_resolver
|
|
|
|
.package_exports_resolve(
|
|
|
|
&package_json_path,
|
|
|
|
&package_subpath,
|
|
|
|
exports,
|
|
|
|
Some(referrer),
|
|
|
|
NodeModuleKind::Esm,
|
|
|
|
conditions,
|
|
|
|
mode,
|
|
|
|
)
|
|
|
|
.map_err(AnyError::from),
|
|
|
|
)
|
|
|
|
.transpose();
|
2023-04-21 16:38:10 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// old school
|
|
|
|
if package_subpath != "." {
|
|
|
|
let d = module_dir.join(package_subpath);
|
2024-07-25 19:08:14 -04:00
|
|
|
if self.env.is_dir_sync(&d) {
|
2023-04-21 16:38:10 -04:00
|
|
|
// subdir might have a package.json that specifies the entrypoint
|
|
|
|
let package_json_path = d.join("package.json");
|
2024-06-26 17:24:10 -04:00
|
|
|
let maybe_package_json =
|
2024-07-25 19:08:14 -04:00
|
|
|
load_pkg_json(self.env.pkg_json_fs(), &package_json_path)?;
|
2024-06-26 17:24:10 -04:00
|
|
|
if let Some(package_json) = maybe_package_json {
|
2023-04-21 16:38:10 -04:00
|
|
|
if let Some(main) = package_json.main(NodeModuleKind::Cjs) {
|
2024-09-30 09:33:32 -04:00
|
|
|
return Ok(Some(url_from_file_path(&d.join(main).clean())?));
|
2023-04-21 16:38:10 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-09-30 09:33:32 -04:00
|
|
|
return Ok(Some(url_from_file_path(&d.join("index.js").clean())?));
|
2023-04-21 16:38:10 -04:00
|
|
|
}
|
2024-09-30 09:33:32 -04:00
|
|
|
return self
|
|
|
|
.file_extension_probe(d, &referrer_path)
|
|
|
|
.and_then(|p| url_from_file_path(&p).map_err(AnyError::from))
|
|
|
|
.map(Some);
|
2023-04-21 16:38:10 -04:00
|
|
|
} else if let Some(main) = package_json.main(NodeModuleKind::Cjs) {
|
2024-09-30 09:33:32 -04:00
|
|
|
return Ok(Some(url_from_file_path(&module_dir.join(main).clean())?));
|
2023-04-21 16:38:10 -04:00
|
|
|
} else {
|
2024-09-30 09:33:32 -04:00
|
|
|
return Ok(Some(url_from_file_path(
|
2024-09-19 21:37:36 -04:00
|
|
|
&module_dir.join("index.js").clean(),
|
2024-09-30 09:33:32 -04:00
|
|
|
)?));
|
2023-04-21 16:38:10 -04:00
|
|
|
}
|
|
|
|
}
|
2023-11-07 09:56:06 -05:00
|
|
|
|
|
|
|
// as a fallback, attempt to resolve it via the ancestor directories
|
|
|
|
let mut last = referrer_path.as_path();
|
|
|
|
while let Some(parent) = last.parent() {
|
|
|
|
if !self.npm_resolver.in_npm_package_at_dir_path(parent) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
let path = if parent.ends_with("node_modules") {
|
|
|
|
parent.join(specifier)
|
|
|
|
} else {
|
|
|
|
parent.join("node_modules").join(specifier)
|
|
|
|
};
|
|
|
|
if let Ok(path) = self.file_extension_probe(path, &referrer_path) {
|
2024-09-30 09:33:32 -04:00
|
|
|
return Ok(Some(url_from_file_path(&path)?));
|
2023-11-07 09:56:06 -05:00
|
|
|
}
|
|
|
|
last = parent;
|
|
|
|
}
|
|
|
|
|
2023-04-21 16:38:10 -04:00
|
|
|
Err(not_found(specifier, &referrer_path))
|
|
|
|
}
|
2023-04-24 19:44:35 -04:00
|
|
|
|
|
|
|
fn file_extension_probe(
|
|
|
|
&self,
|
|
|
|
p: PathBuf,
|
|
|
|
referrer: &Path,
|
|
|
|
) -> Result<PathBuf, AnyError> {
|
|
|
|
let p = p.clean();
|
2024-07-25 19:08:14 -04:00
|
|
|
if self.env.exists_sync(&p) {
|
2023-04-24 19:44:35 -04:00
|
|
|
let file_name = p.file_name().unwrap();
|
|
|
|
let p_js =
|
|
|
|
p.with_file_name(format!("{}.js", file_name.to_str().unwrap()));
|
2024-07-25 19:08:14 -04:00
|
|
|
if self.env.is_file_sync(&p_js) {
|
2023-04-24 19:44:35 -04:00
|
|
|
return Ok(p_js);
|
2024-07-25 19:08:14 -04:00
|
|
|
} else if self.env.is_dir_sync(&p) {
|
2023-04-24 19:44:35 -04:00
|
|
|
return Ok(p.join("index.js"));
|
|
|
|
} else {
|
|
|
|
return Ok(p);
|
|
|
|
}
|
|
|
|
} else if let Some(file_name) = p.file_name() {
|
2023-11-07 16:38:55 -05:00
|
|
|
{
|
|
|
|
let p_js =
|
|
|
|
p.with_file_name(format!("{}.js", file_name.to_str().unwrap()));
|
2024-07-25 19:08:14 -04:00
|
|
|
if self.env.is_file_sync(&p_js) {
|
2023-11-07 16:38:55 -05:00
|
|
|
return Ok(p_js);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
{
|
|
|
|
let p_json =
|
|
|
|
p.with_file_name(format!("{}.json", file_name.to_str().unwrap()));
|
2024-07-25 19:08:14 -04:00
|
|
|
if self.env.is_file_sync(&p_json) {
|
2023-11-07 16:38:55 -05:00
|
|
|
return Ok(p_json);
|
|
|
|
}
|
2023-04-24 19:44:35 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
Err(not_found(&p.to_string_lossy(), referrer))
|
|
|
|
}
|
2023-04-21 16:38:10 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
static RESERVED_WORDS: Lazy<HashSet<&str>> = Lazy::new(|| {
|
|
|
|
HashSet::from([
|
2023-07-01 21:43:17 -04:00
|
|
|
"abstract",
|
|
|
|
"arguments",
|
|
|
|
"async",
|
|
|
|
"await",
|
|
|
|
"boolean",
|
2023-04-21 16:38:10 -04:00
|
|
|
"break",
|
2023-07-01 21:43:17 -04:00
|
|
|
"byte",
|
2023-04-21 16:38:10 -04:00
|
|
|
"case",
|
|
|
|
"catch",
|
2023-07-01 21:43:17 -04:00
|
|
|
"char",
|
2023-04-21 16:38:10 -04:00
|
|
|
"class",
|
|
|
|
"const",
|
|
|
|
"continue",
|
|
|
|
"debugger",
|
|
|
|
"default",
|
|
|
|
"delete",
|
|
|
|
"do",
|
2023-07-01 21:43:17 -04:00
|
|
|
"double",
|
2023-04-21 16:38:10 -04:00
|
|
|
"else",
|
2023-07-01 21:43:17 -04:00
|
|
|
"enum",
|
|
|
|
"eval",
|
2023-04-21 16:38:10 -04:00
|
|
|
"export",
|
|
|
|
"extends",
|
|
|
|
"false",
|
2023-07-01 21:43:17 -04:00
|
|
|
"final",
|
2023-04-21 16:38:10 -04:00
|
|
|
"finally",
|
2023-07-01 21:43:17 -04:00
|
|
|
"float",
|
2023-04-21 16:38:10 -04:00
|
|
|
"for",
|
|
|
|
"function",
|
2023-07-01 21:43:17 -04:00
|
|
|
"get",
|
|
|
|
"goto",
|
2023-04-21 16:38:10 -04:00
|
|
|
"if",
|
2023-07-01 21:43:17 -04:00
|
|
|
"implements",
|
2023-04-21 16:38:10 -04:00
|
|
|
"import",
|
|
|
|
"in",
|
|
|
|
"instanceof",
|
2023-07-01 21:43:17 -04:00
|
|
|
"int",
|
|
|
|
"interface",
|
|
|
|
"let",
|
|
|
|
"long",
|
2023-12-11 11:23:50 -05:00
|
|
|
"mod",
|
2023-07-01 21:43:17 -04:00
|
|
|
"native",
|
2023-04-21 16:38:10 -04:00
|
|
|
"new",
|
|
|
|
"null",
|
2023-07-01 21:43:17 -04:00
|
|
|
"package",
|
|
|
|
"private",
|
|
|
|
"protected",
|
|
|
|
"public",
|
2023-04-21 16:38:10 -04:00
|
|
|
"return",
|
2023-07-01 21:43:17 -04:00
|
|
|
"set",
|
|
|
|
"short",
|
|
|
|
"static",
|
2023-04-21 16:38:10 -04:00
|
|
|
"super",
|
|
|
|
"switch",
|
2023-07-01 21:43:17 -04:00
|
|
|
"synchronized",
|
2023-04-21 16:38:10 -04:00
|
|
|
"this",
|
|
|
|
"throw",
|
2023-07-01 21:43:17 -04:00
|
|
|
"throws",
|
|
|
|
"transient",
|
2023-04-21 16:38:10 -04:00
|
|
|
"true",
|
|
|
|
"try",
|
|
|
|
"typeof",
|
|
|
|
"var",
|
|
|
|
"void",
|
2023-07-01 21:43:17 -04:00
|
|
|
"volatile",
|
2023-04-21 16:38:10 -04:00
|
|
|
"while",
|
|
|
|
"with",
|
|
|
|
"yield",
|
|
|
|
])
|
|
|
|
});
|
|
|
|
|
|
|
|
fn add_export(
|
|
|
|
source: &mut Vec<String>,
|
|
|
|
name: &str,
|
|
|
|
initializer: &str,
|
|
|
|
temp_var_count: &mut usize,
|
|
|
|
) {
|
|
|
|
fn is_valid_var_decl(name: &str) -> bool {
|
|
|
|
// it's ok to be super strict here
|
2024-01-08 12:50:52 -05:00
|
|
|
if name.is_empty() {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
if let Some(first) = name.chars().next() {
|
|
|
|
if !first.is_ascii_alphabetic() && first != '_' && first != '$' {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-21 16:38:10 -04:00
|
|
|
name
|
|
|
|
.chars()
|
|
|
|
.all(|c| c.is_ascii_alphanumeric() || c == '_' || c == '$')
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO(bartlomieju): Node actually checks if a given export exists in `exports` object,
|
|
|
|
// but it might not be necessary here since our analysis is more detailed?
|
|
|
|
if RESERVED_WORDS.contains(name) || !is_valid_var_decl(name) {
|
|
|
|
*temp_var_count += 1;
|
|
|
|
// we can't create an identifier with a reserved word or invalid identifier name,
|
|
|
|
// so assign it to a temporary variable that won't have a conflict, then re-export
|
|
|
|
// it as a string
|
|
|
|
source.push(format!(
|
|
|
|
"const __deno_export_{temp_var_count}__ = {initializer};"
|
|
|
|
));
|
|
|
|
source.push(format!(
|
2023-07-03 14:41:09 -04:00
|
|
|
"export {{ __deno_export_{temp_var_count}__ as \"{}\" }};",
|
|
|
|
escape_for_double_quote_string(name)
|
2023-04-21 16:38:10 -04:00
|
|
|
));
|
|
|
|
} else {
|
|
|
|
source.push(format!("export const {name} = {initializer};"));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn parse_specifier(specifier: &str) -> Option<(String, String)> {
|
|
|
|
let mut separator_index = specifier.find('/');
|
|
|
|
let mut valid_package_name = true;
|
|
|
|
// let mut is_scoped = false;
|
|
|
|
if specifier.is_empty() {
|
|
|
|
valid_package_name = false;
|
|
|
|
} else if specifier.starts_with('@') {
|
|
|
|
// is_scoped = true;
|
|
|
|
if let Some(index) = separator_index {
|
|
|
|
separator_index = specifier[index + 1..].find('/').map(|i| i + index + 1);
|
|
|
|
} else {
|
|
|
|
valid_package_name = false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let package_name = if let Some(index) = separator_index {
|
|
|
|
specifier[0..index].to_string()
|
|
|
|
} else {
|
|
|
|
specifier.to_string()
|
|
|
|
};
|
|
|
|
|
|
|
|
// Package name cannot have leading . and cannot have percent-encoding or separators.
|
|
|
|
for ch in package_name.chars() {
|
|
|
|
if ch == '%' || ch == '\\' {
|
|
|
|
valid_package_name = false;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if !valid_package_name {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
|
|
|
let package_subpath = if let Some(index) = separator_index {
|
|
|
|
format!(".{}", specifier.chars().skip(index).collect::<String>())
|
|
|
|
} else {
|
|
|
|
".".to_string()
|
|
|
|
};
|
|
|
|
|
|
|
|
Some((package_name, package_subpath))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn not_found(path: &str, referrer: &Path) -> AnyError {
|
|
|
|
let msg = format!(
|
|
|
|
"[ERR_MODULE_NOT_FOUND] Cannot find module \"{}\" imported from \"{}\"",
|
|
|
|
path,
|
|
|
|
referrer.to_string_lossy()
|
|
|
|
);
|
|
|
|
std::io::Error::new(std::io::ErrorKind::NotFound, msg).into()
|
|
|
|
}
|
|
|
|
|
2024-08-26 11:13:39 -04:00
|
|
|
fn escape_for_double_quote_string(text: &str) -> Cow<str> {
|
|
|
|
// this should be rare, so doing a scan first before allocating is ok
|
|
|
|
if text.chars().any(|c| matches!(c, '"' | '\\')) {
|
|
|
|
// don't bother making this more complex for perf because it's rare
|
|
|
|
Cow::Owned(text.replace('\\', "\\\\").replace('"', "\\\""))
|
|
|
|
} else {
|
|
|
|
Cow::Borrowed(text)
|
|
|
|
}
|
2023-07-03 14:41:09 -04:00
|
|
|
}
|
2024-08-26 11:13:39 -04:00
|
|
|
|
2023-04-21 16:38:10 -04:00
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
|
|
|
use super::*;
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_add_export() {
|
|
|
|
let mut temp_var_count = 0;
|
|
|
|
let mut source = vec![];
|
|
|
|
|
2024-01-08 12:50:52 -05:00
|
|
|
let exports = vec!["static", "server", "app", "dashed-export", "3d"];
|
2023-04-21 16:38:10 -04:00
|
|
|
for export in exports {
|
|
|
|
add_export(&mut source, export, "init", &mut temp_var_count);
|
|
|
|
}
|
|
|
|
assert_eq!(
|
|
|
|
source,
|
|
|
|
vec![
|
|
|
|
"const __deno_export_1__ = init;".to_string(),
|
|
|
|
"export { __deno_export_1__ as \"static\" };".to_string(),
|
|
|
|
"export const server = init;".to_string(),
|
|
|
|
"export const app = init;".to_string(),
|
|
|
|
"const __deno_export_2__ = init;".to_string(),
|
|
|
|
"export { __deno_export_2__ as \"dashed-export\" };".to_string(),
|
2024-01-08 12:50:52 -05:00
|
|
|
"const __deno_export_3__ = init;".to_string(),
|
|
|
|
"export { __deno_export_3__ as \"3d\" };".to_string(),
|
2023-04-21 16:38:10 -04:00
|
|
|
]
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_parse_specifier() {
|
|
|
|
assert_eq!(
|
|
|
|
parse_specifier("@some-package/core/actions"),
|
|
|
|
Some(("@some-package/core".to_string(), "./actions".to_string()))
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|