mirror of
https://github.com/denoland/deno.git
synced 2024-11-24 15:19:26 -05:00
refactor(lsp): cleanup documents dependents calculation (#23295)
This commit is contained in:
parent
08f46ac446
commit
7c5745a204
3 changed files with 42 additions and 139 deletions
|
@ -49,7 +49,6 @@ use std::borrow::Cow;
|
||||||
use std::collections::BTreeSet;
|
use std::collections::BTreeSet;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::collections::VecDeque;
|
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
@ -643,26 +642,6 @@ pub fn to_lsp_range(range: &deno_graph::Range) -> lsp::Range {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Recurse and collect specifiers that appear in the dependent map.
|
|
||||||
fn recurse_dependents(
|
|
||||||
specifier: &ModuleSpecifier,
|
|
||||||
map: &HashMap<ModuleSpecifier, HashSet<ModuleSpecifier>>,
|
|
||||||
) -> Vec<ModuleSpecifier> {
|
|
||||||
let mut dependents = HashSet::new();
|
|
||||||
let mut pending = VecDeque::new();
|
|
||||||
pending.push_front(specifier);
|
|
||||||
while let Some(specifier) = pending.pop_front() {
|
|
||||||
if let Some(deps) = map.get(specifier) {
|
|
||||||
for dep in deps {
|
|
||||||
if dependents.insert(dep) {
|
|
||||||
pending.push_front(dep);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
dependents.into_iter().cloned().collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct RedirectResolver {
|
struct RedirectResolver {
|
||||||
cache: Arc<dyn HttpCache>,
|
cache: Arc<dyn HttpCache>,
|
||||||
|
@ -851,9 +830,6 @@ pub struct Documents {
|
||||||
/// A flag that indicates that stated data is potentially invalid and needs to
|
/// A flag that indicates that stated data is potentially invalid and needs to
|
||||||
/// be recalculated before being considered valid.
|
/// be recalculated before being considered valid.
|
||||||
dirty: bool,
|
dirty: bool,
|
||||||
/// A map where the key is a specifier and the value is a set of specifiers
|
|
||||||
/// that depend on the key.
|
|
||||||
dependents_map: Arc<HashMap<ModuleSpecifier, HashSet<ModuleSpecifier>>>,
|
|
||||||
/// A map of documents that are "open" in the language server.
|
/// A map of documents that are "open" in the language server.
|
||||||
open_docs: HashMap<ModuleSpecifier, Arc<Document>>,
|
open_docs: HashMap<ModuleSpecifier, Arc<Document>>,
|
||||||
/// Documents stored on the file system.
|
/// Documents stored on the file system.
|
||||||
|
@ -883,7 +859,6 @@ impl Documents {
|
||||||
Self {
|
Self {
|
||||||
cache: cache.clone(),
|
cache: cache.clone(),
|
||||||
dirty: true,
|
dirty: true,
|
||||||
dependents_map: Default::default(),
|
|
||||||
open_docs: HashMap::default(),
|
open_docs: HashMap::default(),
|
||||||
file_system_docs: Default::default(),
|
file_system_docs: Default::default(),
|
||||||
imports: Default::default(),
|
imports: Default::default(),
|
||||||
|
@ -1115,24 +1090,9 @@ impl Documents {
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return an array of specifiers, if any, that are dependent upon the
|
|
||||||
/// supplied specifier. This is used to determine invalidation of diagnostics
|
|
||||||
/// when a module has been changed.
|
|
||||||
pub fn dependents(
|
|
||||||
&mut self,
|
|
||||||
specifier: &ModuleSpecifier,
|
|
||||||
) -> Vec<ModuleSpecifier> {
|
|
||||||
self.calculate_dependents_if_dirty();
|
|
||||||
if let Some(specifier) = self.resolve_specifier(specifier) {
|
|
||||||
recurse_dependents(&specifier, &self.dependents_map)
|
|
||||||
} else {
|
|
||||||
vec![]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a collection of npm package requirements.
|
/// Returns a collection of npm package requirements.
|
||||||
pub fn npm_package_reqs(&mut self) -> Arc<Vec<PackageReq>> {
|
pub fn npm_package_reqs(&mut self) -> Arc<Vec<PackageReq>> {
|
||||||
self.calculate_dependents_if_dirty();
|
self.calculate_npm_reqs_if_dirty();
|
||||||
self.npm_specifier_reqs.clone()
|
self.npm_specifier_reqs.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1439,93 +1399,46 @@ impl Documents {
|
||||||
fs_docs.set_dirty(true);
|
fs_docs.set_dirty(true);
|
||||||
}
|
}
|
||||||
self.dirty = true;
|
self.dirty = true;
|
||||||
self.calculate_dependents_if_dirty();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Iterate through the documents, building a map where the key is a unique
|
/// Iterate through the documents, building a map where the key is a unique
|
||||||
/// document and the value is a set of specifiers that depend on that
|
/// document and the value is a set of specifiers that depend on that
|
||||||
/// document.
|
/// document.
|
||||||
fn calculate_dependents_if_dirty(&mut self) {
|
fn calculate_npm_reqs_if_dirty(&mut self) {
|
||||||
#[derive(Default)]
|
let mut npm_reqs = HashSet::new();
|
||||||
struct DocAnalyzer {
|
let mut has_node_builtin_specifier = false;
|
||||||
dependents_map: HashMap<ModuleSpecifier, HashSet<ModuleSpecifier>>,
|
|
||||||
analyzed_specifiers: HashSet<ModuleSpecifier>,
|
|
||||||
pending_specifiers: VecDeque<ModuleSpecifier>,
|
|
||||||
npm_reqs: HashSet<PackageReq>,
|
|
||||||
has_node_builtin_specifier: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DocAnalyzer {
|
|
||||||
fn add(&mut self, dep: &ModuleSpecifier, specifier: &ModuleSpecifier) {
|
|
||||||
if !self.analyzed_specifiers.contains(dep) {
|
|
||||||
self.analyzed_specifiers.insert(dep.clone());
|
|
||||||
// perf: ensure this is not added to unless this specifier has never
|
|
||||||
// been analyzed in order to not cause an extra file system lookup
|
|
||||||
self.pending_specifiers.push_back(dep.clone());
|
|
||||||
if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) {
|
|
||||||
self.npm_reqs.insert(reference.into_inner().req);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
self
|
|
||||||
.dependents_map
|
|
||||||
.entry(dep.clone())
|
|
||||||
.or_default()
|
|
||||||
.insert(specifier.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
fn analyze_doc(&mut self, specifier: &ModuleSpecifier, doc: &Document) {
|
|
||||||
self.analyzed_specifiers.insert(specifier.clone());
|
|
||||||
for dependency in doc.dependencies().values() {
|
|
||||||
if let Some(dep) = dependency.get_code() {
|
|
||||||
if !self.has_node_builtin_specifier && dep.scheme() == "node" {
|
|
||||||
self.has_node_builtin_specifier = true;
|
|
||||||
}
|
|
||||||
self.add(dep, specifier);
|
|
||||||
}
|
|
||||||
if let Some(dep) = dependency.get_type() {
|
|
||||||
self.add(dep, specifier);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let Some(dep) = doc.maybe_types_dependency().maybe_specifier() {
|
|
||||||
self.add(dep, specifier);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let is_fs_docs_dirty = self.file_system_docs.set_dirty(false);
|
let is_fs_docs_dirty = self.file_system_docs.set_dirty(false);
|
||||||
|
|
||||||
if !is_fs_docs_dirty && !self.dirty {
|
if !is_fs_docs_dirty && !self.dirty {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
let mut visit_doc = |doc: &Arc<Document>| {
|
||||||
let mut doc_analyzer = DocAnalyzer::default();
|
for dependency in doc.dependencies().values() {
|
||||||
// favor documents that are open in case a document exists in both collections
|
if let Some(dep) = dependency.get_code() {
|
||||||
for entry in self.file_system_docs.docs.iter() {
|
if dep.scheme() == "node" {
|
||||||
let specifier = entry.key();
|
has_node_builtin_specifier = true;
|
||||||
let doc = entry.value();
|
}
|
||||||
doc_analyzer.analyze_doc(specifier, doc);
|
if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) {
|
||||||
}
|
npm_reqs.insert(reference.into_inner().req);
|
||||||
for (specifier, doc) in self.open_docs.iter() {
|
}
|
||||||
doc_analyzer.analyze_doc(specifier, doc);
|
}
|
||||||
}
|
if let Some(dep) = dependency.get_type() {
|
||||||
|
if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) {
|
||||||
let resolver = self.get_resolver();
|
npm_reqs.insert(reference.into_inner().req);
|
||||||
let npm_resolver = self.get_npm_resolver();
|
}
|
||||||
while let Some(specifier) = doc_analyzer.pending_specifiers.pop_front() {
|
}
|
||||||
if let Some(doc) = self.open_docs.get(&specifier) {
|
|
||||||
doc_analyzer.analyze_doc(&specifier, doc);
|
|
||||||
} else if let Some(doc) = self.file_system_docs.get(
|
|
||||||
&self.cache,
|
|
||||||
resolver,
|
|
||||||
&specifier,
|
|
||||||
npm_resolver,
|
|
||||||
) {
|
|
||||||
doc_analyzer.analyze_doc(&specifier, &doc);
|
|
||||||
}
|
}
|
||||||
|
if let Some(dep) = doc.maybe_types_dependency().maybe_specifier() {
|
||||||
|
if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) {
|
||||||
|
npm_reqs.insert(reference.into_inner().req);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
for entry in self.file_system_docs.docs.iter() {
|
||||||
|
visit_doc(entry.value())
|
||||||
|
}
|
||||||
|
for doc in self.open_docs.values() {
|
||||||
|
visit_doc(doc);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut npm_reqs = doc_analyzer.npm_reqs;
|
|
||||||
|
|
||||||
// fill the reqs from the lockfile
|
// fill the reqs from the lockfile
|
||||||
if let Some(lockfile) = self.lockfile.as_ref() {
|
if let Some(lockfile) = self.lockfile.as_ref() {
|
||||||
|
@ -1542,14 +1455,12 @@ impl Documents {
|
||||||
// Ensure a @types/node package exists when any module uses a node: specifier.
|
// Ensure a @types/node package exists when any module uses a node: specifier.
|
||||||
// Unlike on the command line, here we just add @types/node to the npm package
|
// Unlike on the command line, here we just add @types/node to the npm package
|
||||||
// requirements since this won't end up in the lockfile.
|
// requirements since this won't end up in the lockfile.
|
||||||
self.has_injected_types_node_package = doc_analyzer
|
self.has_injected_types_node_package = has_node_builtin_specifier
|
||||||
.has_node_builtin_specifier
|
|
||||||
&& !npm_reqs.iter().any(|r| r.name == "@types/node");
|
&& !npm_reqs.iter().any(|r| r.name == "@types/node");
|
||||||
if self.has_injected_types_node_package {
|
if self.has_injected_types_node_package {
|
||||||
npm_reqs.insert(PackageReq::from_str("@types/node").unwrap());
|
npm_reqs.insert(PackageReq::from_str("@types/node").unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
self.dependents_map = Arc::new(doc_analyzer.dependents_map);
|
|
||||||
self.npm_specifier_reqs = Arc::new({
|
self.npm_specifier_reqs = Arc::new({
|
||||||
let mut reqs = npm_reqs.into_iter().collect::<Vec<_>>();
|
let mut reqs = npm_reqs.into_iter().collect::<Vec<_>>();
|
||||||
reqs.sort();
|
reqs.sort();
|
||||||
|
|
|
@ -1235,9 +1235,7 @@ impl Inner {
|
||||||
Ok(document) => {
|
Ok(document) => {
|
||||||
if document.is_diagnosable() {
|
if document.is_diagnosable() {
|
||||||
self.refresh_npm_specifiers().await;
|
self.refresh_npm_specifiers().await;
|
||||||
self
|
self.diagnostics_server.invalidate(&[specifier]);
|
||||||
.diagnostics_server
|
|
||||||
.invalidate(&self.documents.dependents(&specifier));
|
|
||||||
self.send_diagnostics_update();
|
self.send_diagnostics_update();
|
||||||
self.send_testing_update();
|
self.send_testing_update();
|
||||||
}
|
}
|
||||||
|
@ -1279,9 +1277,7 @@ impl Inner {
|
||||||
.normalize_url(¶ms.text_document.uri, LspUrlKind::File);
|
.normalize_url(¶ms.text_document.uri, LspUrlKind::File);
|
||||||
if self.is_diagnosable(&specifier) {
|
if self.is_diagnosable(&specifier) {
|
||||||
self.refresh_npm_specifiers().await;
|
self.refresh_npm_specifiers().await;
|
||||||
let mut specifiers = self.documents.dependents(&specifier);
|
self.diagnostics_server.invalidate(&[specifier.clone()]);
|
||||||
specifiers.push(specifier.clone());
|
|
||||||
self.diagnostics_server.invalidate(&specifiers);
|
|
||||||
self.send_diagnostics_update();
|
self.send_diagnostics_update();
|
||||||
self.send_testing_update();
|
self.send_testing_update();
|
||||||
}
|
}
|
||||||
|
@ -3181,8 +3177,7 @@ impl tower_lsp::LanguageServer for LanguageServer {
|
||||||
let document = inner.did_open(&specifier, params);
|
let document = inner.did_open(&specifier, params);
|
||||||
if document.is_diagnosable() {
|
if document.is_diagnosable() {
|
||||||
inner.refresh_npm_specifiers().await;
|
inner.refresh_npm_specifiers().await;
|
||||||
let specifiers = inner.documents.dependents(&specifier);
|
inner.diagnostics_server.invalidate(&[specifier]);
|
||||||
inner.diagnostics_server.invalidate(&specifiers);
|
|
||||||
inner.send_diagnostics_update();
|
inner.send_diagnostics_update();
|
||||||
inner.send_testing_update();
|
inner.send_testing_update();
|
||||||
}
|
}
|
||||||
|
|
|
@ -7862,18 +7862,18 @@ fn lsp_npm_specifier_unopened_file() {
|
||||||
.use_http_server()
|
.use_http_server()
|
||||||
.use_temp_cwd()
|
.use_temp_cwd()
|
||||||
.build();
|
.build();
|
||||||
let mut client = context.new_lsp_command().build();
|
let temp_dir = context.temp_dir();
|
||||||
client.initialize_default();
|
|
||||||
|
|
||||||
// create other.ts, which re-exports an npm specifier
|
// create other.ts, which re-exports an npm specifier
|
||||||
client.deno_dir().write(
|
temp_dir.write(
|
||||||
"other.ts",
|
"other.ts",
|
||||||
"export { default as chalk } from 'npm:chalk@5';",
|
"export { default as chalk } from 'npm:chalk@5';",
|
||||||
);
|
);
|
||||||
|
let mut client = context.new_lsp_command().build();
|
||||||
|
client.initialize_default();
|
||||||
|
|
||||||
// cache the other.ts file to the DENO_DIR
|
// cache the other.ts file to the DENO_DIR
|
||||||
let deno = deno_cmd_with_deno_dir(client.deno_dir())
|
let deno = deno_cmd_with_deno_dir(client.deno_dir())
|
||||||
.current_dir(client.deno_dir().path())
|
.current_dir(temp_dir.path())
|
||||||
.arg("cache")
|
.arg("cache")
|
||||||
.arg("--quiet")
|
.arg("--quiet")
|
||||||
.arg("other.ts")
|
.arg("other.ts")
|
||||||
|
@ -7891,12 +7891,9 @@ fn lsp_npm_specifier_unopened_file() {
|
||||||
assert!(stderr.is_empty());
|
assert!(stderr.is_empty());
|
||||||
|
|
||||||
// open main.ts, which imports other.ts (unopened)
|
// open main.ts, which imports other.ts (unopened)
|
||||||
let main_url =
|
|
||||||
ModuleSpecifier::from_file_path(client.deno_dir().path().join("main.ts"))
|
|
||||||
.unwrap();
|
|
||||||
client.did_open(json!({
|
client.did_open(json!({
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": main_url,
|
"uri": temp_dir.uri().join("main.ts").unwrap(),
|
||||||
"languageId": "typescript",
|
"languageId": "typescript",
|
||||||
"version": 1,
|
"version": 1,
|
||||||
"text": "import { chalk } from './other.ts';\n\n",
|
"text": "import { chalk } from './other.ts';\n\n",
|
||||||
|
@ -7907,7 +7904,7 @@ fn lsp_npm_specifier_unopened_file() {
|
||||||
"textDocument/didChange",
|
"textDocument/didChange",
|
||||||
json!({
|
json!({
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": main_url,
|
"uri": temp_dir.uri().join("main.ts").unwrap(),
|
||||||
"version": 2
|
"version": 2
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -7925,7 +7922,7 @@ fn lsp_npm_specifier_unopened_file() {
|
||||||
|
|
||||||
// now ensure completions work
|
// now ensure completions work
|
||||||
let list = client.get_completion_list(
|
let list = client.get_completion_list(
|
||||||
main_url,
|
temp_dir.uri().join("main.ts").unwrap(),
|
||||||
(2, 6),
|
(2, 6),
|
||||||
json!({
|
json!({
|
||||||
"triggerKind": 2,
|
"triggerKind": 2,
|
||||||
|
|
Loading…
Reference in a new issue