1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-11-21 15:04:11 -05:00

feat(lsp): auto-imports with @deno-types directives (#26821)

Co-authored-by: David Sherret <dsherret@gmail.com>
This commit is contained in:
Nayeem Rahman 2024-11-15 14:40:32 +00:00 committed by GitHub
parent 032ae7fb19
commit 3f26310728
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 338 additions and 78 deletions

View file

@ -5,6 +5,7 @@ use super::cache::LspCache;
use super::config::Config; use super::config::Config;
use super::resolver::LspIsCjsResolver; use super::resolver::LspIsCjsResolver;
use super::resolver::LspResolver; use super::resolver::LspResolver;
use super::resolver::ScopeDepInfo;
use super::resolver::SingleReferrerGraphResolver; use super::resolver::SingleReferrerGraphResolver;
use super::testing::TestCollector; use super::testing::TestCollector;
use super::testing::TestModule; use super::testing::TestModule;
@ -38,7 +39,6 @@ use indexmap::IndexSet;
use node_resolver::NodeModuleKind; use node_resolver::NodeModuleKind;
use std::borrow::Cow; use std::borrow::Cow;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::collections::HashMap; use std::collections::HashMap;
use std::collections::HashSet; use std::collections::HashSet;
use std::fs; use std::fs;
@ -989,12 +989,7 @@ pub struct Documents {
open_docs: HashMap<ModuleSpecifier, Arc<Document>>, open_docs: HashMap<ModuleSpecifier, Arc<Document>>,
/// Documents stored on the file system. /// Documents stored on the file system.
file_system_docs: Arc<FileSystemDocuments>, file_system_docs: Arc<FileSystemDocuments>,
/// The npm package requirements found in npm specifiers. dep_info_by_scope: Arc<BTreeMap<Option<ModuleSpecifier>, Arc<ScopeDepInfo>>>,
npm_reqs_by_scope:
Arc<BTreeMap<Option<ModuleSpecifier>, BTreeSet<PackageReq>>>,
/// Config scopes that contain a node: specifier such that a @types/node
/// package should be injected.
scopes_with_node_specifier: Arc<HashSet<Option<ModuleSpecifier>>>,
} }
impl Documents { impl Documents {
@ -1157,17 +1152,20 @@ impl Documents {
false false
} }
pub fn npm_reqs_by_scope( pub fn dep_info_by_scope(
&mut self, &mut self,
) -> Arc<BTreeMap<Option<ModuleSpecifier>, BTreeSet<PackageReq>>> { ) -> Arc<BTreeMap<Option<ModuleSpecifier>, Arc<ScopeDepInfo>>> {
self.calculate_npm_reqs_if_dirty(); self.calculate_dep_info_if_dirty();
self.npm_reqs_by_scope.clone() self.dep_info_by_scope.clone()
} }
pub fn scopes_with_node_specifier( pub fn scopes_with_node_specifier(&self) -> HashSet<Option<ModuleSpecifier>> {
&self, self
) -> &Arc<HashSet<Option<ModuleSpecifier>>> { .dep_info_by_scope
&self.scopes_with_node_specifier .iter()
.filter(|(_, i)| i.has_node_specifier)
.map(|(s, _)| s.clone())
.collect::<HashSet<_>>()
} }
/// Return a document for the specifier. /// Return a document for the specifier.
@ -1410,34 +1408,46 @@ impl Documents {
/// Iterate through the documents, building a map where the key is a unique /// Iterate through the documents, building a map where the key is a unique
/// document and the value is a set of specifiers that depend on that /// document and the value is a set of specifiers that depend on that
/// document. /// document.
fn calculate_npm_reqs_if_dirty(&mut self) { fn calculate_dep_info_if_dirty(&mut self) {
let mut npm_reqs_by_scope: BTreeMap<_, BTreeSet<_>> = Default::default(); let mut dep_info_by_scope: BTreeMap<_, ScopeDepInfo> = Default::default();
let mut scopes_with_specifier = HashSet::new();
let is_fs_docs_dirty = self.file_system_docs.set_dirty(false); let is_fs_docs_dirty = self.file_system_docs.set_dirty(false);
if !is_fs_docs_dirty && !self.dirty { if !is_fs_docs_dirty && !self.dirty {
return; return;
} }
let mut visit_doc = |doc: &Arc<Document>| { let mut visit_doc = |doc: &Arc<Document>| {
let scope = doc.scope(); let scope = doc.scope();
let reqs = npm_reqs_by_scope.entry(scope.cloned()).or_default(); let dep_info = dep_info_by_scope.entry(scope.cloned()).or_default();
for dependency in doc.dependencies().values() { for dependency in doc.dependencies().values() {
if let Some(dep) = dependency.get_code() { let code_specifier = dependency.get_code();
let type_specifier = dependency.get_type();
if let Some(dep) = code_specifier {
if dep.scheme() == "node" { if dep.scheme() == "node" {
scopes_with_specifier.insert(scope.cloned()); dep_info.has_node_specifier = true;
} }
if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) { if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) {
reqs.insert(reference.into_inner().req); dep_info.npm_reqs.insert(reference.into_inner().req);
} }
} }
if let Some(dep) = dependency.get_type() { if let Some(dep) = type_specifier {
if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) { if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) {
reqs.insert(reference.into_inner().req); dep_info.npm_reqs.insert(reference.into_inner().req);
}
}
if dependency.maybe_deno_types_specifier.is_some() {
if let (Some(code_specifier), Some(type_specifier)) =
(code_specifier, type_specifier)
{
if MediaType::from_specifier(type_specifier).is_declaration() {
dep_info
.deno_types_to_code_resolutions
.insert(type_specifier.clone(), code_specifier.clone());
}
} }
} }
} }
if let Some(dep) = doc.maybe_types_dependency().maybe_specifier() { if let Some(dep) = doc.maybe_types_dependency().maybe_specifier() {
if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) { if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) {
reqs.insert(reference.into_inner().req); dep_info.npm_reqs.insert(reference.into_inner().req);
} }
} }
}; };
@ -1448,14 +1458,49 @@ impl Documents {
visit_doc(doc); visit_doc(doc);
} }
// fill the reqs from the lockfile
for (scope, config_data) in self.config.tree.data_by_scope().as_ref() { for (scope, config_data) in self.config.tree.data_by_scope().as_ref() {
let dep_info = dep_info_by_scope.entry(Some(scope.clone())).or_default();
(|| {
let config_file = config_data.maybe_deno_json()?;
let jsx_config =
config_file.to_maybe_jsx_import_source_config().ok()??;
let type_specifier = jsx_config.default_types_specifier.as_ref()?;
let code_specifier = jsx_config.default_specifier.as_ref()?;
let cli_resolver = self.resolver.as_cli_resolver(Some(scope));
let range = deno_graph::Range {
specifier: jsx_config.base_url.clone(),
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
};
let type_specifier = cli_resolver
.resolve(
type_specifier,
&range,
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
deno_package_json::NodeModuleKind::Esm,
ResolutionMode::Types,
)
.ok()?;
let code_specifier = cli_resolver
.resolve(
code_specifier,
&range,
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
deno_package_json::NodeModuleKind::Esm,
ResolutionMode::Execution,
)
.ok()?;
dep_info
.deno_types_to_code_resolutions
.insert(type_specifier, code_specifier);
Some(())
})();
// fill the reqs from the lockfile
if let Some(lockfile) = config_data.lockfile.as_ref() { if let Some(lockfile) = config_data.lockfile.as_ref() {
let reqs = npm_reqs_by_scope.entry(Some(scope.clone())).or_default();
let lockfile = lockfile.lock(); let lockfile = lockfile.lock();
for dep_req in lockfile.content.packages.specifiers.keys() { for dep_req in lockfile.content.packages.specifiers.keys() {
if dep_req.kind == deno_semver::package::PackageKind::Npm { if dep_req.kind == deno_semver::package::PackageKind::Npm {
reqs.insert(dep_req.req.clone()); dep_info.npm_reqs.insert(dep_req.req.clone());
} }
} }
} }
@ -1464,15 +1509,22 @@ impl Documents {
// Ensure a @types/node package exists when any module uses a node: specifier. // Ensure a @types/node package exists when any module uses a node: specifier.
// Unlike on the command line, here we just add @types/node to the npm package // Unlike on the command line, here we just add @types/node to the npm package
// requirements since this won't end up in the lockfile. // requirements since this won't end up in the lockfile.
for scope in &scopes_with_specifier { for dep_info in dep_info_by_scope.values_mut() {
let reqs = npm_reqs_by_scope.entry(scope.clone()).or_default(); if dep_info.has_node_specifier
if !reqs.iter().any(|r| r.name == "@types/node") { && !dep_info.npm_reqs.iter().any(|r| r.name == "@types/node")
reqs.insert(PackageReq::from_str("@types/node").unwrap()); {
dep_info
.npm_reqs
.insert(PackageReq::from_str("@types/node").unwrap());
} }
} }
self.npm_reqs_by_scope = Arc::new(npm_reqs_by_scope); self.dep_info_by_scope = Arc::new(
self.scopes_with_node_specifier = Arc::new(scopes_with_specifier); dep_info_by_scope
.into_iter()
.map(|(s, i)| (s, Arc::new(i)))
.collect(),
);
self.dirty = false; self.dirty = false;
} }

View file

@ -1036,7 +1036,7 @@ impl Inner {
// refresh the npm specifiers because it might have discovered // refresh the npm specifiers because it might have discovered
// a @types/node package and now's a good time to do that anyway // a @types/node package and now's a good time to do that anyway
self.refresh_npm_specifiers().await; self.refresh_dep_info().await;
self.project_changed([], true); self.project_changed([], true);
} }
@ -1082,7 +1082,7 @@ impl Inner {
); );
if document.is_diagnosable() { if document.is_diagnosable() {
self.project_changed([(document.specifier(), ChangeKind::Opened)], false); self.project_changed([(document.specifier(), ChangeKind::Opened)], false);
self.refresh_npm_specifiers().await; self.refresh_dep_info().await;
self.diagnostics_server.invalidate(&[specifier]); self.diagnostics_server.invalidate(&[specifier]);
self.send_diagnostics_update(); self.send_diagnostics_update();
self.send_testing_update(); self.send_testing_update();
@ -1103,8 +1103,8 @@ impl Inner {
Ok(document) => { Ok(document) => {
if document.is_diagnosable() { if document.is_diagnosable() {
let old_scopes_with_node_specifier = let old_scopes_with_node_specifier =
self.documents.scopes_with_node_specifier().clone(); self.documents.scopes_with_node_specifier();
self.refresh_npm_specifiers().await; self.refresh_dep_info().await;
let mut config_changed = false; let mut config_changed = false;
if !self if !self
.documents .documents
@ -1155,13 +1155,15 @@ impl Inner {
})); }));
} }
async fn refresh_npm_specifiers(&mut self) { async fn refresh_dep_info(&mut self) {
let package_reqs = self.documents.npm_reqs_by_scope(); let dep_info_by_scope = self.documents.dep_info_by_scope();
let resolver = self.resolver.clone(); let resolver = self.resolver.clone();
// spawn due to the lsp's `Send` requirement // spawn due to the lsp's `Send` requirement
spawn(async move { resolver.set_npm_reqs(&package_reqs).await }) spawn(
.await async move { resolver.set_dep_info_by_scope(&dep_info_by_scope).await },
.ok(); )
.await
.ok();
} }
async fn did_close(&mut self, params: DidCloseTextDocumentParams) { async fn did_close(&mut self, params: DidCloseTextDocumentParams) {
@ -1180,7 +1182,7 @@ impl Inner {
.uri_to_specifier(&params.text_document.uri, LspUrlKind::File); .uri_to_specifier(&params.text_document.uri, LspUrlKind::File);
self.diagnostics_state.clear(&specifier); self.diagnostics_state.clear(&specifier);
if self.is_diagnosable(&specifier) { if self.is_diagnosable(&specifier) {
self.refresh_npm_specifiers().await; self.refresh_dep_info().await;
self.diagnostics_server.invalidate(&[specifier.clone()]); self.diagnostics_server.invalidate(&[specifier.clone()]);
self.send_diagnostics_update(); self.send_diagnostics_update();
self.send_testing_update(); self.send_testing_update();
@ -3600,15 +3602,16 @@ impl Inner {
if byonm { if byonm {
roots.retain(|s| s.scheme() != "npm"); roots.retain(|s| s.scheme() != "npm");
} else if let Some(npm_reqs) = self } else if let Some(dep_info) = self
.documents .documents
.npm_reqs_by_scope() .dep_info_by_scope()
.get(&config_data.map(|d| d.scope.as_ref().clone())) .get(&config_data.map(|d| d.scope.as_ref().clone()))
{ {
// always include the npm packages since resolution of one npm package // always include the npm packages since resolution of one npm package
// might affect the resolution of other npm packages // might affect the resolution of other npm packages
roots.extend( roots.extend(
npm_reqs dep_info
.npm_reqs
.iter() .iter()
.map(|req| ModuleSpecifier::parse(&format!("npm:{}", req)).unwrap()), .map(|req| ModuleSpecifier::parse(&format!("npm:{}", req)).unwrap()),
); );
@ -3686,7 +3689,7 @@ impl Inner {
async fn post_cache(&mut self) { async fn post_cache(&mut self) {
self.resolver.did_cache(); self.resolver.did_cache();
self.refresh_npm_specifiers().await; self.refresh_dep_info().await;
self.diagnostics_server.invalidate_all(); self.diagnostics_server.invalidate_all();
self.project_changed([], true); self.project_changed([], true);
self.ts_server.cleanup_semantic_cache(self.snapshot()).await; self.ts_server.cleanup_semantic_cache(self.snapshot()).await;

View file

@ -7,6 +7,7 @@ use deno_cache_dir::HttpCache;
use deno_config::deno_json::JsxImportSourceConfig; use deno_config::deno_json::JsxImportSourceConfig;
use deno_config::workspace::PackageJsonDepResolution; use deno_config::workspace::PackageJsonDepResolution;
use deno_config::workspace::WorkspaceResolver; use deno_config::workspace::WorkspaceResolver;
use deno_core::parking_lot::Mutex;
use deno_core::url::Url; use deno_core::url::Url;
use deno_graph::source::ResolutionMode; use deno_graph::source::ResolutionMode;
use deno_graph::GraphImport; use deno_graph::GraphImport;
@ -84,6 +85,7 @@ struct LspScopeResolver {
pkg_json_resolver: Arc<PackageJsonResolver>, pkg_json_resolver: Arc<PackageJsonResolver>,
redirect_resolver: Option<Arc<RedirectResolver>>, redirect_resolver: Option<Arc<RedirectResolver>>,
graph_imports: Arc<IndexMap<ModuleSpecifier, GraphImport>>, graph_imports: Arc<IndexMap<ModuleSpecifier, GraphImport>>,
dep_info: Arc<Mutex<Arc<ScopeDepInfo>>>,
package_json_deps_by_resolution: Arc<IndexMap<ModuleSpecifier, String>>, package_json_deps_by_resolution: Arc<IndexMap<ModuleSpecifier, String>>,
config_data: Option<Arc<ConfigData>>, config_data: Option<Arc<ConfigData>>,
} }
@ -101,6 +103,7 @@ impl Default for LspScopeResolver {
pkg_json_resolver: factory.pkg_json_resolver().clone(), pkg_json_resolver: factory.pkg_json_resolver().clone(),
redirect_resolver: None, redirect_resolver: None,
graph_imports: Default::default(), graph_imports: Default::default(),
dep_info: Default::default(),
package_json_deps_by_resolution: Default::default(), package_json_deps_by_resolution: Default::default(),
config_data: None, config_data: None,
} }
@ -180,6 +183,15 @@ impl LspScopeResolver {
NodeModuleKind::Esm, NodeModuleKind::Esm,
NodeResolutionMode::Types, NodeResolutionMode::Types,
) )
.or_else(|_| {
npm_pkg_req_resolver.resolve_req_reference(
&req_ref,
&referrer,
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
NodeModuleKind::Esm,
NodeResolutionMode::Execution,
)
})
.ok()?, .ok()?,
)) ))
.0; .0;
@ -200,6 +212,7 @@ impl LspScopeResolver {
pkg_json_resolver, pkg_json_resolver,
redirect_resolver, redirect_resolver,
graph_imports, graph_imports,
dep_info: Default::default(),
package_json_deps_by_resolution, package_json_deps_by_resolution,
config_data: config_data.cloned(), config_data: config_data.cloned(),
} }
@ -222,6 +235,7 @@ impl LspScopeResolver {
redirect_resolver: self.redirect_resolver.clone(), redirect_resolver: self.redirect_resolver.clone(),
pkg_json_resolver: factory.pkg_json_resolver().clone(), pkg_json_resolver: factory.pkg_json_resolver().clone(),
graph_imports: self.graph_imports.clone(), graph_imports: self.graph_imports.clone(),
dep_info: self.dep_info.clone(),
package_json_deps_by_resolution: self package_json_deps_by_resolution: self
.package_json_deps_by_resolution .package_json_deps_by_resolution
.clone(), .clone(),
@ -288,19 +302,24 @@ impl LspResolver {
} }
} }
pub async fn set_npm_reqs( pub async fn set_dep_info_by_scope(
&self, &self,
reqs: &BTreeMap<Option<ModuleSpecifier>, BTreeSet<PackageReq>>, dep_info_by_scope: &Arc<
BTreeMap<Option<ModuleSpecifier>, Arc<ScopeDepInfo>>,
>,
) { ) {
for (scope, resolver) in [(None, &self.unscoped)] for (scope, resolver) in [(None, &self.unscoped)]
.into_iter() .into_iter()
.chain(self.by_scope.iter().map(|(s, r)| (Some(s), r))) .chain(self.by_scope.iter().map(|(s, r)| (Some(s), r)))
{ {
let dep_info = dep_info_by_scope.get(&scope.cloned());
if let Some(dep_info) = dep_info {
*resolver.dep_info.lock() = dep_info.clone();
}
if let Some(npm_resolver) = resolver.npm_resolver.as_ref() { if let Some(npm_resolver) = resolver.npm_resolver.as_ref() {
if let Some(npm_resolver) = npm_resolver.as_managed() { if let Some(npm_resolver) = npm_resolver.as_managed() {
let reqs = reqs let reqs = dep_info
.get(&scope.cloned()) .map(|i| i.npm_reqs.iter().cloned().collect::<Vec<_>>())
.map(|reqs| reqs.iter().cloned().collect::<Vec<_>>())
.unwrap_or_default(); .unwrap_or_default();
if let Err(err) = npm_resolver.set_package_reqs(&reqs).await { if let Err(err) = npm_resolver.set_package_reqs(&reqs).await {
lsp_warn!("Could not set npm package requirements: {:#}", err); lsp_warn!("Could not set npm package requirements: {:#}", err);
@ -434,6 +453,19 @@ impl LspResolver {
.cloned() .cloned()
} }
pub fn deno_types_to_code_resolution(
&self,
specifier: &ModuleSpecifier,
file_referrer: Option<&ModuleSpecifier>,
) -> Option<ModuleSpecifier> {
let resolver = self.get_scope_resolver(file_referrer);
let dep_info = resolver.dep_info.lock().clone();
dep_info
.deno_types_to_code_resolutions
.get(specifier)
.cloned()
}
pub fn in_node_modules(&self, specifier: &ModuleSpecifier) -> bool { pub fn in_node_modules(&self, specifier: &ModuleSpecifier) -> bool {
fn has_node_modules_dir(specifier: &ModuleSpecifier) -> bool { fn has_node_modules_dir(specifier: &ModuleSpecifier) -> bool {
// consider any /node_modules/ directory as being in the node_modules // consider any /node_modules/ directory as being in the node_modules
@ -538,6 +570,13 @@ impl LspResolver {
} }
} }
#[derive(Debug, Default, Clone)]
pub struct ScopeDepInfo {
pub deno_types_to_code_resolutions: HashMap<ModuleSpecifier, ModuleSpecifier>,
pub npm_reqs: BTreeSet<PackageReq>,
pub has_node_specifier: bool,
}
#[derive(Default)] #[derive(Default)]
struct ResolverFactoryServices { struct ResolverFactoryServices {
cli_resolver: Deferred<Arc<CliResolver>>, cli_resolver: Deferred<Arc<CliResolver>>,

View file

@ -3417,9 +3417,18 @@ fn parse_code_actions(
additional_text_edits.extend(change.text_changes.iter().map(|tc| { additional_text_edits.extend(change.text_changes.iter().map(|tc| {
let mut text_edit = tc.as_text_edit(asset_or_doc.line_index()); let mut text_edit = tc.as_text_edit(asset_or_doc.line_index());
if let Some(specifier_rewrite) = &data.specifier_rewrite { if let Some(specifier_rewrite) = &data.specifier_rewrite {
text_edit.new_text = text_edit text_edit.new_text = text_edit.new_text.replace(
.new_text &specifier_rewrite.old_specifier,
.replace(&specifier_rewrite.0, &specifier_rewrite.1); &specifier_rewrite.new_specifier,
);
if let Some(deno_types_specifier) =
&specifier_rewrite.new_deno_types_specifier
{
text_edit.new_text = format!(
"// @deno-types=\"{}\"\n{}",
deno_types_specifier, &text_edit.new_text
);
}
} }
text_edit text_edit
})); }));
@ -3578,17 +3587,23 @@ impl CompletionEntryDetails {
let mut text_edit = original_item.text_edit.clone(); let mut text_edit = original_item.text_edit.clone();
if let Some(specifier_rewrite) = &data.specifier_rewrite { if let Some(specifier_rewrite) = &data.specifier_rewrite {
if let Some(text_edit) = &mut text_edit { if let Some(text_edit) = &mut text_edit {
match text_edit { let new_text = match text_edit {
lsp::CompletionTextEdit::Edit(text_edit) => { lsp::CompletionTextEdit::Edit(text_edit) => &mut text_edit.new_text,
text_edit.new_text = text_edit
.new_text
.replace(&specifier_rewrite.0, &specifier_rewrite.1);
}
lsp::CompletionTextEdit::InsertAndReplace(insert_replace_edit) => { lsp::CompletionTextEdit::InsertAndReplace(insert_replace_edit) => {
insert_replace_edit.new_text = insert_replace_edit &mut insert_replace_edit.new_text
.new_text
.replace(&specifier_rewrite.0, &specifier_rewrite.1);
} }
};
*new_text = new_text.replace(
&specifier_rewrite.old_specifier,
&specifier_rewrite.new_specifier,
);
if let Some(deno_types_specifier) =
&specifier_rewrite.new_deno_types_specifier
{
*new_text = format!(
"// @deno-types=\"{}\"\n{}",
deno_types_specifier, new_text
);
} }
} }
} }
@ -3693,6 +3708,13 @@ impl CompletionInfo {
} }
} }
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct CompletionSpecifierRewrite {
old_specifier: String,
new_specifier: String,
new_deno_types_specifier: Option<String>,
}
#[derive(Debug, Deserialize, Serialize)] #[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct CompletionItemData { pub struct CompletionItemData {
@ -3705,7 +3727,7 @@ pub struct CompletionItemData {
/// be rewritten by replacing the first string with the second. Intended for /// be rewritten by replacing the first string with the second. Intended for
/// auto-import specifiers to be reverse-import-mapped. /// auto-import specifiers to be reverse-import-mapped.
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub specifier_rewrite: Option<(String, String)>, pub specifier_rewrite: Option<CompletionSpecifierRewrite>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub data: Option<Value>, pub data: Option<Value>,
pub use_code_snippet: bool, pub use_code_snippet: bool,
@ -3927,20 +3949,40 @@ impl CompletionEntry {
if let Some(source) = &self.source { if let Some(source) = &self.source {
let mut display_source = source.clone(); let mut display_source = source.clone();
if let Some(import_data) = &self.auto_import_data { if let Some(import_data) = &self.auto_import_data {
if let Some(new_module_specifier) = language_server let import_mapper =
.get_ts_response_import_mapper(specifier) language_server.get_ts_response_import_mapper(specifier);
if let Some(mut new_specifier) = import_mapper
.check_specifier(&import_data.normalized, specifier) .check_specifier(&import_data.normalized, specifier)
.or_else(|| relative_specifier(specifier, &import_data.normalized)) .or_else(|| relative_specifier(specifier, &import_data.normalized))
{ {
if new_module_specifier.contains("/node_modules/") { if new_specifier.contains("/node_modules/") {
return None; return None;
} }
display_source.clone_from(&new_module_specifier); let mut new_deno_types_specifier = None;
if new_module_specifier != import_data.raw.module_specifier { if let Some(code_specifier) = language_server
specifier_rewrite = Some(( .resolver
import_data.raw.module_specifier.clone(), .deno_types_to_code_resolution(
new_module_specifier, &import_data.normalized,
)); Some(specifier),
)
.and_then(|s| {
import_mapper
.check_specifier(&s, specifier)
.or_else(|| relative_specifier(specifier, &s))
})
{
new_deno_types_specifier =
Some(std::mem::replace(&mut new_specifier, code_specifier));
}
display_source.clone_from(&new_specifier);
if new_specifier != import_data.raw.module_specifier
|| new_deno_types_specifier.is_some()
{
specifier_rewrite = Some(CompletionSpecifierRewrite {
old_specifier: import_data.raw.module_specifier.clone(),
new_specifier,
new_deno_types_specifier,
});
} }
} else if source.starts_with(jsr_url().as_str()) { } else if source.starts_with(jsr_url().as_str()) {
return None; return None;
@ -4246,9 +4288,7 @@ impl TscSpecifierMap {
return specifier.to_string(); return specifier.to_string();
} }
let mut specifier = original.to_string(); let mut specifier = original.to_string();
if specifier.contains("/node_modules/.deno/") if !specifier.contains("/node_modules/@types/node/") {
&& !specifier.contains("/node_modules/@types/node/")
{
// The ts server doesn't give completions from files in // The ts server doesn't give completions from files in
// `node_modules/.deno/`. We work around it like this. // `node_modules/.deno/`. We work around it like this.
specifier = specifier.replace("/node_modules/", "/$node_modules/"); specifier = specifier.replace("/node_modules/", "/$node_modules/");
@ -4415,6 +4455,8 @@ fn op_load<'s>(
}) })
}; };
lsp_warn!("op_load {} {}", &specifier, maybe_load_response.is_some());
let serialized = serde_v8::to_v8(scope, maybe_load_response)?; let serialized = serde_v8::to_v8(scope, maybe_load_response)?;
state.performance.measure(mark); state.performance.measure(mark);

View file

@ -8270,6 +8270,130 @@ fn lsp_npm_auto_import_and_quick_fix_byonm() {
client.shutdown(); client.shutdown();
} }
#[test]
fn lsp_npm_auto_import_with_deno_types() {
let context = TestContextBuilder::new()
.use_http_server()
.use_temp_cwd()
.add_npm_env_vars()
.build();
let temp_dir = context.temp_dir();
temp_dir.write(
"deno.json",
json!({
"compilerOptions": {
"jsx": "react-jsx",
"jsxImportSource": "react",
"jsxImportSourceTypes": "@types/react",
},
})
.to_string(),
);
temp_dir.write(
"package.json",
json!({
"dependencies": {
"react": "*",
"@types/react": "*",
"lz-string": "1.3",
"@types/lz-string": "1.3",
},
})
.to_string(),
);
context.run_npm("install");
temp_dir.write(
"other.ts",
r#"
// @deno-types="@types/lz-string"
import "lz-string";
"#,
);
let mut client = context.new_lsp_command().build();
client.initialize_default();
client.did_open(json!({
"textDocument": {
"uri": temp_dir.url().join("file.ts").unwrap(),
"languageId": "typescript",
"version": 1,
"text": r#"
compressToBase64();
createRef();
"#,
},
}));
let list = client.get_completion_list(
temp_dir.url().join("file.ts").unwrap(),
(1, 24),
json!({ "triggerKind": 1 }),
);
let item = list
.items
.iter()
.find(|item| item.label == "compressToBase64")
.unwrap();
let res = client.write_request("completionItem/resolve", item);
assert_eq!(
res,
json!({
"label": "compressToBase64",
"labelDetails": {
"description": "lz-string",
},
"kind": 2,
"detail": "(method) LZString.LZStringStatic.compressToBase64(uncompressed: string): string",
"documentation": {
"kind": "markdown",
"value": "Compresses input string producing an instance of a ASCII UTF-16 string,\nwhich represents the original string encoded in Base64.\nThe result can be safely transported outside the browser with a\nguarantee that none of the characters produced need to be URL-encoded.\n\n*@param* - uncompressed A string which should be compressed.",
},
"sortText": "￿16_0",
"additionalTextEdits": [
{
"range": {
"start": { "line": 0, "character": 0 },
"end": { "line": 0, "character": 0 },
},
"newText": "// @deno-types=\"@types/lz-string\"\nimport { compressToBase64 } from \"lz-string\";\n",
},
],
}),
);
let list = client.get_completion_list(
temp_dir.url().join("file.ts").unwrap(),
(2, 17),
json!({ "triggerKind": 1 }),
);
let item = list
.items
.iter()
.find(|item| item.label == "createRef")
.unwrap();
let res = client.write_request("completionItem/resolve", item);
assert_eq!(
res,
json!({
"label": "createRef",
"labelDetails": {
"description": "react",
},
"kind": 3,
"detail": "function React.createRef<T>(): React.RefObject<T>",
"documentation": { "kind": "markdown", "value": "" },
"sortText": "￿16_0",
"additionalTextEdits": [
{
"range": {
"start": { "line": 0, "character": 0 },
"end": { "line": 0, "character": 0 },
},
"newText": "// @deno-types=\"@types/react\"\nimport { createRef } from \"react\";\n",
},
],
}),
);
client.shutdown();
}
#[test] #[test]
fn lsp_completions_node_specifier() { fn lsp_completions_node_specifier() {
let context = TestContextBuilder::new().use_temp_cwd().build(); let context = TestContextBuilder::new().use_temp_cwd().build();