1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-12-23 23:59:59 -05:00

chore: improve unanalyzable dynamic import diagnostic (#22051)

This commit is contained in:
Luca Casonato 2024-01-24 14:49:33 +01:00 committed by GitHub
parent 930ce20870
commit 745333f073
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 196 additions and 63 deletions

View file

@ -50,6 +50,17 @@ itest!(javascript_missing_decl_file {
temp_cwd: true,
});
itest!(unanalyzable_dynamic_import {
args: "publish --token 'sadfasdf'",
output: "publish/unanalyzable_dynamic_import.out",
cwd: Some("publish/unanalyzable_dynamic_import"),
copy_temp_dir: Some("publish/unanalyzable_dynamic_import"),
envs: env_vars_for_registry(),
exit_code: 0,
http_server: true,
temp_cwd: true,
});
itest!(javascript_decl_file {
args: "publish --token 'sadfasdf'",
output: "publish/javascript_decl_file.out",

View file

@ -9,5 +9,4 @@ error[zap-missing-explicit-return-type]: missing explicit return type in the pub
info: all functions in the public API must have an explicit return type
docs: https://jsr.io/go/zap-missing-explicit-return-type
error: Found 1 problem

View file

@ -7,7 +7,6 @@ warning[zap-unsupported-javascript-entrypoint]: used a JavaScript module without
info: fast check avoids type inference, so JavaScript entrypoints should be avoided
docs: https://jsr.io/go/zap-unsupported-javascript-entrypoint
Publishing @foo/bar@1.0.0 ...
Successfully published @foo/bar@1.0.0
Visit http://127.0.0.1:4250/@foo/bar@1.0.0 for details

View file

@ -0,0 +1,16 @@
Checking fast check type graph for errors...
Ensuring type checks...
Check file://[WILDCARD]/mod.ts
warning[unanalyzable-dynamic-import]: unable to analyze dynamic import
--> [WILDCARD]mod.ts:1:7
|
1 | await import("asd " + asd);
| ^^^^^^^^^^^^^^^^^^^^ the unanalyzable dynamic import
info: after publishing this package, imports from the local import map do not work
info: dynamic imports that can not be analyzed at publish time will not be rewritten automatically
info: make sure the dynamic import is resolvable at runtime without an import map
Publishing @foo/bar@1.0.0 ...
Successfully published @foo/bar@1.0.0
Visit http://127.0.0.1:4250/@foo/bar@1.0.0 for details

View file

@ -0,0 +1,7 @@
{
"name": "@foo/bar",
"version": "1.0.0",
"exports": {
".": "./mod.ts"
}
}

View file

@ -0,0 +1 @@
await import("asd " + asd);

View file

@ -21,6 +21,7 @@ use crate::diagnostics::DiagnosticSnippetSource;
use crate::diagnostics::DiagnosticSourcePos;
use crate::diagnostics::DiagnosticSourceRange;
use crate::diagnostics::SourceTextParsedSourceStore;
use crate::util::import_map::ImportMapUnfurlDiagnostic;
#[derive(Clone, Default)]
pub struct PublishDiagnosticsCollector {
@ -36,7 +37,7 @@ impl PublishDiagnosticsCollector {
let diagnostics = self.diagnostics.lock().unwrap().take();
let sources = SourceTextParsedSourceStore(sources);
for diagnostic in diagnostics {
eprintln!("{}", diagnostic.display(&sources));
eprint!("{}", diagnostic.display(&sources));
if matches!(diagnostic.level(), DiagnosticLevel::Error) {
errors += 1;
}
@ -58,34 +59,42 @@ impl PublishDiagnosticsCollector {
}
pub enum PublishDiagnostic {
FastCheck { diagnostic: FastCheckDiagnostic },
FastCheck(FastCheckDiagnostic),
ImportMapUnfurl(ImportMapUnfurlDiagnostic),
}
impl Diagnostic for PublishDiagnostic {
fn level(&self) -> DiagnosticLevel {
match self {
PublishDiagnostic::FastCheck {
diagnostic: FastCheckDiagnostic::UnsupportedJavaScriptEntrypoint { .. },
} => DiagnosticLevel::Warning,
PublishDiagnostic::FastCheck { .. } => DiagnosticLevel::Error,
PublishDiagnostic::FastCheck(
FastCheckDiagnostic::UnsupportedJavaScriptEntrypoint { .. },
) => DiagnosticLevel::Warning,
PublishDiagnostic::FastCheck(_) => DiagnosticLevel::Error,
PublishDiagnostic::ImportMapUnfurl(_) => DiagnosticLevel::Warning,
}
}
fn code(&self) -> impl Display + '_ {
match &self {
PublishDiagnostic::FastCheck { diagnostic, .. } => diagnostic.code(),
PublishDiagnostic::FastCheck(diagnostic) => diagnostic.code(),
PublishDiagnostic::ImportMapUnfurl(diagnostic) => diagnostic.code(),
}
}
fn message(&self) -> impl Display + '_ {
match &self {
PublishDiagnostic::FastCheck { diagnostic, .. } => diagnostic.to_string(), // todo
PublishDiagnostic::FastCheck(diagnostic) => {
Cow::Owned(diagnostic.to_string())
}
PublishDiagnostic::ImportMapUnfurl(diagnostic) => {
Cow::Borrowed(diagnostic.message())
}
}
}
fn location(&self) -> DiagnosticLocation {
match &self {
PublishDiagnostic::FastCheck { diagnostic } => match diagnostic.range() {
PublishDiagnostic::FastCheck(diagnostic) => match diagnostic.range() {
Some(range) => DiagnosticLocation::PositionInFile {
specifier: Cow::Borrowed(diagnostic.specifier()),
source_pos: DiagnosticSourcePos::SourcePos(range.range.start),
@ -94,12 +103,21 @@ impl Diagnostic for PublishDiagnostic {
specifier: Cow::Borrowed(diagnostic.specifier()),
},
},
PublishDiagnostic::ImportMapUnfurl(diagnostic) => match diagnostic {
ImportMapUnfurlDiagnostic::UnanalyzableDynamicImport {
specifier,
range,
} => DiagnosticLocation::PositionInFile {
specifier: Cow::Borrowed(specifier),
source_pos: DiagnosticSourcePos::SourcePos(range.start),
},
},
}
}
fn snippet(&self) -> Option<DiagnosticSnippet<'_>> {
match &self {
PublishDiagnostic::FastCheck { diagnostic } => {
PublishDiagnostic::FastCheck(diagnostic) => {
diagnostic.range().map(|range| DiagnosticSnippet {
source: DiagnosticSnippetSource::Specifier(Cow::Borrowed(
diagnostic.specifier(),
@ -114,14 +132,29 @@ impl Diagnostic for PublishDiagnostic {
},
})
}
PublishDiagnostic::ImportMapUnfurl(diagnostic) => match diagnostic {
ImportMapUnfurlDiagnostic::UnanalyzableDynamicImport {
specifier,
range,
} => Some(DiagnosticSnippet {
source: DiagnosticSnippetSource::Specifier(Cow::Borrowed(specifier)),
highlight: DiagnosticSnippetHighlight {
style: DiagnosticSnippetHighlightStyle::Warning,
range: DiagnosticSourceRange {
start: DiagnosticSourcePos::SourcePos(range.start),
end: DiagnosticSourcePos::SourcePos(range.end),
},
description: Some("the unanalyzable dynamic import".into()),
},
}),
},
}
}
fn hint(&self) -> Option<impl Display + '_> {
match &self {
PublishDiagnostic::FastCheck { diagnostic } => {
Some(diagnostic.fix_hint())
}
PublishDiagnostic::FastCheck(diagnostic) => Some(diagnostic.fix_hint()),
PublishDiagnostic::ImportMapUnfurl(_) => None,
}
}
@ -131,7 +164,7 @@ impl Diagnostic for PublishDiagnostic {
fn info(&self) -> Cow<'_, [Cow<'_, str>]> {
match &self {
PublishDiagnostic::FastCheck { diagnostic } => {
PublishDiagnostic::FastCheck(diagnostic) => {
let infos = diagnostic
.additional_info()
.iter()
@ -139,14 +172,24 @@ impl Diagnostic for PublishDiagnostic {
.collect();
Cow::Owned(infos)
}
PublishDiagnostic::ImportMapUnfurl(diagnostic) => match diagnostic {
ImportMapUnfurlDiagnostic::UnanalyzableDynamicImport { .. } => Cow::Borrowed(&[
Cow::Borrowed("after publishing this package, imports from the local import map do not work"),
Cow::Borrowed("dynamic imports that can not be analyzed at publish time will not be rewritten automatically"),
Cow::Borrowed("make sure the dynamic import is resolvable at runtime without an import map")
]),
},
}
}
fn docs_url(&self) -> Option<impl Display + '_> {
match &self {
PublishDiagnostic::FastCheck { diagnostic } => {
PublishDiagnostic::FastCheck(diagnostic) => {
Some(format!("https://jsr.io/go/{}", diagnostic.code()))
}
PublishDiagnostic::ImportMapUnfurl(diagnostic) => match diagnostic {
ImportMapUnfurlDiagnostic::UnanalyzableDynamicImport { .. } => None,
},
}
}
}

View file

@ -94,9 +94,8 @@ pub fn collect_fast_check_type_graph_diagnostics(
{
continue;
}
diagnostics_collector.push(PublishDiagnostic::FastCheck {
diagnostic: diagnostic.clone(),
});
diagnostics_collector
.push(PublishDiagnostic::FastCheck(diagnostic.clone()));
if matches!(
diagnostic,
FastCheckDiagnostic::UnsupportedJavaScriptEntrypoint { .. }

View file

@ -11,9 +11,9 @@ use deno_config::ConfigFile;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::futures::FutureExt;
use deno_core::serde_json;
use deno_core::serde_json::json;
use deno_core::unsync::JoinHandle;
use deno_core::unsync::JoinSet;
use deno_runtime::colors;
use deno_runtime::deno_fetch::reqwest;
@ -89,6 +89,7 @@ async fn prepare_publish(
deno_json: &ConfigFile,
source_cache: Arc<ParsedSourceCache>,
import_map: Arc<ImportMap>,
diagnostics_collector: &PublishDiagnosticsCollector,
) -> Result<Rc<PreparedPublishPackage>, AnyError> {
let config_path = deno_json.specifier.to_file_path().unwrap();
let dir_path = config_path.parent().unwrap().to_path_buf();
@ -134,11 +135,13 @@ async fn prepare_publish(
.to_files_config()
.map(|files| files.map(|f| f.exclude).unwrap_or_default())?;
let diagnostics_collector = diagnostics_collector.clone();
let tarball = deno_core::unsync::spawn_blocking(move || {
let unfurler = ImportMapUnfurler::new(&import_map);
tar::create_gzipped_tarball(
&dir_path,
&*source_cache,
&diagnostics_collector,
&unfurler,
&exclude_patterns,
)
@ -666,8 +669,13 @@ async fn prepare_packages_for_publishing(
)
.await?;
let mut prepared_package_by_name = HashMap::with_capacity(1);
let package =
prepare_publish(&deno_json, source_cache.clone(), import_map).await?;
let package = prepare_publish(
&deno_json,
source_cache.clone(),
import_map,
diagnostics_collector,
)
.await?;
let package_name = format!("@{}/{}", package.scope, package.package);
let publish_order_graph =
PublishOrderGraph::new_single(package_name.clone());
@ -692,29 +700,31 @@ async fn prepare_packages_for_publishing(
let publish_order_graph =
publish_order::build_publish_order_graph(&graph, &roots)?;
let results =
workspace_config
.members
.iter()
.cloned()
.map(|member| {
let import_map = import_map.clone();
let source_cache = source_cache.clone();
deno_core::unsync::spawn(async move {
let package = prepare_publish(&member.config_file, source_cache, import_map)
.await
.with_context(|| {
format!("Failed preparing '{}'.", member.package_name)
})?;
Ok((member.package_name, package))
})
})
.collect::<Vec<
JoinHandle<Result<(String, Rc<PreparedPublishPackage>), AnyError>>,
>>();
let results = workspace_config
.members
.iter()
.cloned()
.map(|member| {
let import_map = import_map.clone();
async move {
let package = prepare_publish(
&member.config_file,
source_cache.clone(),
import_map.clone(),
diagnostics_collector,
)
.await
.with_context(|| {
format!("Failed preparing '{}'.", member.package_name)
})?;
Ok::<_, AnyError>((member.package_name, package))
}
.boxed()
})
.collect::<Vec<_>>();
let results = deno_core::futures::future::join_all(results).await;
for result in results {
let (package_name, package) = result??;
let (package_name, package) = result?;
prepared_package_by_name.insert(package_name, package);
}
Ok((publish_order_graph, prepared_package_by_name))

View file

@ -15,6 +15,9 @@ use tar::Header;
use crate::util::import_map::ImportMapUnfurler;
use deno_config::glob::PathOrPatternSet;
use super::diagnostics::PublishDiagnostic;
use super::diagnostics::PublishDiagnosticsCollector;
#[derive(Debug, Clone, PartialEq)]
pub struct PublishableTarballFile {
pub path: PathBuf,
@ -32,6 +35,7 @@ pub struct PublishableTarball {
pub fn create_gzipped_tarball(
dir: &Path,
source_cache: &dyn deno_graph::ParsedSourceStore,
diagnostics_collector: &PublishDiagnosticsCollector,
unfurler: &ImportMapUnfurler,
exclude_patterns: &PathOrPatternSet,
) -> Result<PublishableTarball, AnyError> {
@ -72,9 +76,11 @@ pub fn create_gzipped_tarball(
});
let content = match source_cache.get_parsed_source(&url) {
Some(parsed_source) => {
let (content, unfurl_diagnostics) =
unfurler.unfurl(&url, &parsed_source);
diagnostics.extend_from_slice(&unfurl_diagnostics);
let mut reporter = |diagnostic| {
diagnostics_collector
.push(PublishDiagnostic::ImportMapUnfurl(diagnostic));
};
let content = unfurler.unfurl(&url, &parsed_source, &mut reporter);
content.into_bytes()
}
None => data,

View file

@ -3,6 +3,7 @@
use std::collections::HashSet;
use deno_ast::ParsedSource;
use deno_ast::SourceRange;
use deno_core::serde_json;
use deno_core::ModuleSpecifier;
use deno_graph::DefaultModuleAnalyzer;
@ -14,8 +15,6 @@ use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference;
use import_map::ImportMap;
use crate::graph_util::format_range_with_colors;
pub fn import_map_deps(value: &serde_json::Value) -> HashSet<JsrDepPackageReq> {
let Some(obj) = value.as_object() else {
return Default::default();
@ -69,6 +68,30 @@ fn values_to_set<'a>(
entries
}
#[derive(Debug, Clone)]
pub enum ImportMapUnfurlDiagnostic {
UnanalyzableDynamicImport {
specifier: ModuleSpecifier,
range: SourceRange,
},
}
impl ImportMapUnfurlDiagnostic {
pub fn code(&self) -> &'static str {
match self {
Self::UnanalyzableDynamicImport { .. } => "unanalyzable-dynamic-import",
}
}
pub fn message(&self) -> &'static str {
match self {
Self::UnanalyzableDynamicImport { .. } => {
"unable to analyze dynamic import"
}
}
}
}
pub struct ImportMapUnfurler<'a> {
import_map: &'a ImportMap,
}
@ -82,8 +105,8 @@ impl<'a> ImportMapUnfurler<'a> {
&self,
url: &ModuleSpecifier,
parsed_source: &ParsedSource,
) -> (String, Vec<String>) {
let mut diagnostics = Vec::new();
diagnostic_reporter: &mut dyn FnMut(ImportMapUnfurlDiagnostic),
) -> String {
let mut text_changes = Vec::new();
let module_info = DefaultModuleAnalyzer::module_info(parsed_source);
let analyze_specifier =
@ -117,14 +140,17 @@ impl<'a> ImportMapUnfurler<'a> {
);
if !success {
diagnostics.push(
format!("Dynamic import was not analyzable and won't use the import map once published.\n at {}",
format_range_with_colors(&deno_graph::Range {
specifier: url.clone(),
start: dep.range.start.clone(),
end: dep.range.end.clone(),
})
)
let start_pos =
parsed_source.text_info().line_start(dep.range.start.line)
+ dep.range.start.character;
let end_pos =
parsed_source.text_info().line_start(dep.range.end.line)
+ dep.range.end.character;
diagnostic_reporter(
ImportMapUnfurlDiagnostic::UnanalyzableDynamicImport {
specifier: url.to_owned(),
range: SourceRange::new(start_pos, end_pos),
},
);
}
}
@ -160,7 +186,7 @@ impl<'a> ImportMapUnfurler<'a> {
parsed_source.text_info().text_str(),
text_changes,
);
(rewritten_text, diagnostics)
rewritten_text
}
}
@ -311,10 +337,26 @@ const test6 = await import(`${expr}`);
"#;
let specifier = ModuleSpecifier::parse("file:///dev/mod.ts").unwrap();
let source = parse_ast(&specifier, source_code);
let (unfurled_source, d) = unfurler.unfurl(&specifier, &source);
let mut d = Vec::new();
let mut reporter = |diagnostic| d.push(diagnostic);
let unfurled_source = unfurler.unfurl(&specifier, &source, &mut reporter);
assert_eq!(d.len(), 2);
assert!(d[0].starts_with("Dynamic import was not analyzable and won't use the import map once published."));
assert!(d[1].starts_with("Dynamic import was not analyzable and won't use the import map once published."));
assert!(
matches!(
d[0],
ImportMapUnfurlDiagnostic::UnanalyzableDynamicImport { .. }
),
"{:?}",
d[0]
);
assert!(
matches!(
d[1],
ImportMapUnfurlDiagnostic::UnanalyzableDynamicImport { .. }
),
"{:?}",
d[1]
);
let expected_source = r#"import express from "npm:express@5";"
import foo from "./lib/foo.ts";
import bar from "./lib/bar.ts";