1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-12-21 23:04:45 -05:00

fix(lsp): update document dependencies on configuration change (#17556)

This commit is contained in:
David Sherret 2023-01-28 10:18:32 -05:00 committed by GitHub
parent 1803df7d3a
commit fe11df09b1
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 284 additions and 89 deletions

View file

@ -28,6 +28,7 @@ use std::path::PathBuf;
pub type MaybeImportsResult =
Result<Option<Vec<(ModuleSpecifier, Vec<String>)>>, AnyError>;
#[derive(Hash)]
pub struct JsxImportSourceConfig {
pub default_specifier: Option<String>,
pub module: String,

View file

@ -6,7 +6,9 @@ use super::tsc;
use super::tsc::AssetDocument;
use crate::args::ConfigFile;
use crate::args::JsxImportSourceConfig;
use crate::cache::CachedUrlMetadata;
use crate::cache::FastInsecureHasher;
use crate::cache::HttpCache;
use crate::file_fetcher::get_source_from_bytes;
use crate::file_fetcher::map_content_type;
@ -30,6 +32,7 @@ use deno_core::futures::future;
use deno_core::parking_lot::Mutex;
use deno_core::url;
use deno_core::ModuleSpecifier;
use deno_graph::DefaultParsedSourceStore;
use deno_graph::GraphImport;
use deno_graph::Resolved;
use deno_runtime::deno_node::NodeResolutionMode;
@ -245,7 +248,7 @@ struct DocumentDependencies {
}
impl DocumentDependencies {
pub fn from_maybe_module(maybe_module: &MaybeModuleResult) -> Self {
pub fn from_maybe_module(maybe_module: &Option<ModuleResult>) -> Self {
if let Some(Ok(module)) = &maybe_module {
Self::from_module(module)
} else {
@ -261,10 +264,8 @@ impl DocumentDependencies {
}
}
type MaybeModuleResult =
Option<Result<deno_graph::Module, deno_graph::ModuleGraphError>>;
type MaybeParsedSourceResult =
Option<Result<ParsedSource, deno_ast::Diagnostic>>;
type ModuleResult = Result<deno_graph::Module, deno_graph::ModuleGraphError>;
type ParsedSourceResult = Result<ParsedSource, deno_ast::Diagnostic>;
#[derive(Debug)]
struct DocumentInner {
@ -272,13 +273,14 @@ struct DocumentInner {
dependencies: Arc<DocumentDependencies>,
fs_version: String,
line_index: Arc<LineIndex>,
maybe_headers: Option<HashMap<String, String>>,
maybe_language_id: Option<LanguageId>,
maybe_lsp_version: Option<i32>,
maybe_module: MaybeModuleResult,
maybe_module: Option<ModuleResult>,
// this is a lazily constructed value based on the state of the document,
// so having a mutex to hold it is ok
maybe_navigation_tree: Mutex<Option<Arc<tsc::NavigationTree>>>,
maybe_parsed_source: MaybeParsedSourceResult,
maybe_parsed_source: Option<ParsedSourceResult>,
specifier: ModuleSpecifier,
text_info: SourceTextInfo,
}
@ -290,28 +292,27 @@ impl Document {
fn new(
specifier: ModuleSpecifier,
fs_version: String,
maybe_headers: Option<&HashMap<String, String>>,
content: Arc<str>,
maybe_headers: Option<HashMap<String, String>>,
text_info: SourceTextInfo,
maybe_resolver: Option<&dyn deno_graph::source::Resolver>,
) -> Self {
// we only ever do `Document::new` on on disk resources that are supposed to
// be diagnosable, unlike `Document::open`, so it is safe to unconditionally
// parse the module.
let (maybe_module, maybe_parsed_source) = lsp_deno_graph_analyze(
let (maybe_parsed_source, maybe_module) = parse_and_analyze_module(
&specifier,
content.clone(),
maybe_headers,
text_info.clone(),
maybe_headers.as_ref(),
maybe_resolver,
);
let dependencies =
Arc::new(DocumentDependencies::from_maybe_module(&maybe_module));
// todo(dsherret): retrieve this from the parsed source if it exists
let text_info = SourceTextInfo::new(content);
let line_index = Arc::new(LineIndex::new(text_info.text_str()));
Self(Arc::new(DocumentInner {
dependencies,
fs_version,
line_index,
maybe_headers,
maybe_language_id: None,
maybe_lsp_version: None,
maybe_module,
@ -322,6 +323,39 @@ impl Document {
}))
}
fn maybe_with_new_resolver(
&self,
maybe_resolver: Option<&dyn deno_graph::source::Resolver>,
) -> Option<Self> {
let parsed_source_result = match &self.0.maybe_parsed_source {
Some(parsed_source_result) => parsed_source_result.clone(),
None => return None, // nothing to change
};
let maybe_module = Some(analyze_module(
&self.0.specifier,
&parsed_source_result,
self.0.maybe_headers.as_ref(),
maybe_resolver,
));
let dependencies =
Arc::new(DocumentDependencies::from_maybe_module(&maybe_module));
Some(Self(Arc::new(DocumentInner {
// updated properties
dependencies,
maybe_module,
maybe_navigation_tree: Mutex::new(None),
maybe_parsed_source: Some(parsed_source_result),
// maintain - this should all be copies/clones
fs_version: self.0.fs_version.clone(),
line_index: self.0.line_index.clone(),
maybe_headers: self.0.maybe_headers.clone(),
maybe_language_id: self.0.maybe_language_id,
maybe_lsp_version: self.0.maybe_lsp_version,
text_info: self.0.text_info.clone(),
specifier: self.0.specifier.clone(),
})))
}
fn open(
specifier: ModuleSpecifier,
version: i32,
@ -330,10 +364,11 @@ impl Document {
maybe_resolver: Option<&dyn deno_graph::source::Resolver>,
) -> Self {
let maybe_headers = language_id.as_headers();
let (maybe_module, maybe_parsed_source) = if language_id.is_diagnosable() {
lsp_deno_graph_analyze(
let text_info = SourceTextInfo::new(content);
let (maybe_parsed_source, maybe_module) = if language_id.is_diagnosable() {
parse_and_analyze_module(
&specifier,
content.clone(),
text_info.clone(),
maybe_headers,
maybe_resolver,
)
@ -342,18 +377,18 @@ impl Document {
};
let dependencies =
Arc::new(DocumentDependencies::from_maybe_module(&maybe_module));
let source = SourceTextInfo::new(content);
let line_index = Arc::new(LineIndex::new(source.text_str()));
let line_index = Arc::new(LineIndex::new(text_info.text_str()));
Self(Arc::new(DocumentInner {
dependencies,
fs_version: "1".to_string(),
line_index,
maybe_language_id: Some(language_id),
maybe_lsp_version: Some(version),
maybe_headers: maybe_headers.map(ToOwned::to_owned),
maybe_module,
maybe_navigation_tree: Mutex::new(None),
maybe_parsed_source,
text_info: source,
text_info,
specifier,
}))
}
@ -380,8 +415,8 @@ impl Document {
index_valid = IndexValid::UpTo(0);
}
}
let content: Arc<str> = content.into();
let (maybe_module, maybe_parsed_source) = if self
let text_info = SourceTextInfo::from_string(content);
let (maybe_parsed_source, maybe_module) = if self
.0
.maybe_language_id
.as_ref()
@ -393,9 +428,9 @@ impl Document {
.maybe_language_id
.as_ref()
.and_then(|li| li.as_headers());
lsp_deno_graph_analyze(
parse_and_analyze_module(
&self.0.specifier,
content.clone(),
text_info.clone(),
maybe_headers,
maybe_resolver,
)
@ -407,7 +442,6 @@ impl Document {
} else {
self.0.dependencies.clone() // use the last known good
};
let text_info = SourceTextInfo::new(content);
let line_index = if index_valid == IndexValid::All {
line_index
} else {
@ -420,6 +454,7 @@ impl Document {
dependencies,
text_info,
line_index,
maybe_headers: self.0.maybe_headers.clone(),
maybe_module,
maybe_parsed_source,
maybe_lsp_version: Some(version),
@ -712,21 +747,21 @@ impl FileSystemDocuments {
specifier.clone(),
fs_version,
None,
content.into(),
SourceTextInfo::from_string(content),
maybe_resolver,
)
} else {
let cache_filename = cache.get_cache_filename(specifier)?;
let specifier_metadata = CachedUrlMetadata::read(&cache_filename).ok()?;
let maybe_content_type = specifier_metadata.headers.get("content-type");
let maybe_headers = Some(&specifier_metadata.headers);
let (_, maybe_charset) = map_content_type(specifier, maybe_content_type);
let maybe_headers = Some(specifier_metadata.headers);
let content = get_source_from_bytes(bytes, maybe_charset).ok()?;
Document::new(
specifier.clone(),
fs_version,
maybe_headers,
content.into(),
SourceTextInfo::from_string(content),
maybe_resolver,
)
};
@ -734,6 +769,18 @@ impl FileSystemDocuments {
self.docs.insert(specifier.clone(), doc.clone());
Some(doc)
}
pub fn refresh_dependencies(
&mut self,
maybe_resolver: Option<&dyn deno_graph::source::Resolver>,
) {
for doc in self.docs.values_mut() {
if let Some(new_doc) = doc.maybe_with_new_resolver(maybe_resolver) {
*doc = new_doc;
}
}
self.dirty = true;
}
}
fn get_document_path(
@ -761,6 +808,9 @@ pub struct Documents {
open_docs: HashMap<ModuleSpecifier, Document>,
/// Documents stored on the file system.
file_system_docs: Arc<Mutex<FileSystemDocuments>>,
/// Hash of the config used for resolution. When the hash changes we update
/// dependencies.
resolver_config_hash: u64,
/// Any imports to the context supplied by configuration files. This is like
/// the imports into the a module graph in CLI.
imports: Arc<HashMap<ModuleSpecifier, GraphImport>>,
@ -784,6 +834,7 @@ impl Documents {
dependents_map: Default::default(),
open_docs: HashMap::default(),
file_system_docs: Default::default(),
resolver_config_hash: 0,
imports: Default::default(),
maybe_resolver: None,
npm_reqs: Default::default(),
@ -1112,9 +1163,27 @@ impl Documents {
maybe_import_map: Option<Arc<import_map::ImportMap>>,
maybe_config_file: Option<&ConfigFile>,
) {
// TODO(@kitsonk) update resolved dependencies?
fn calculate_resolver_config_hash(
maybe_import_map: Option<&import_map::ImportMap>,
maybe_jsx_config: Option<&JsxImportSourceConfig>,
) -> u64 {
let mut hasher = FastInsecureHasher::default();
if let Some(import_map) = maybe_import_map {
hasher.write_str(&import_map.to_json());
hasher.write_str(import_map.base_url().as_str());
}
if let Some(jsx_config) = maybe_jsx_config {
hasher.write_hashable(&jsx_config);
}
hasher.finish()
}
let maybe_jsx_config =
maybe_config_file.and_then(|cf| cf.to_maybe_jsx_import_source_config());
let new_resolver_config_hash = calculate_resolver_config_hash(
maybe_import_map.as_deref(),
maybe_jsx_config.as_ref(),
);
self.maybe_resolver =
CliResolver::maybe_new(maybe_jsx_config, maybe_import_map);
self.imports = Arc::new(
@ -1136,9 +1205,30 @@ impl Documents {
HashMap::new()
},
);
// only refresh the dependencies if the underlying configuration has changed
if self.resolver_config_hash != new_resolver_config_hash {
self.refresh_dependencies();
self.resolver_config_hash = new_resolver_config_hash;
}
self.dirty = true;
}
fn refresh_dependencies(&mut self) {
let maybe_resolver =
self.maybe_resolver.as_ref().map(|r| r.as_graph_resolver());
for doc in self.open_docs.values_mut() {
if let Some(new_doc) = doc.maybe_with_new_resolver(maybe_resolver) {
*doc = new_doc;
}
}
self
.file_system_docs
.lock()
.refresh_dependencies(maybe_resolver);
}
/// Iterate through the documents, building a map where the key is a unique
/// document and the value is a set of specifiers that depend on that
/// document.
@ -1313,67 +1403,88 @@ impl<'a> deno_graph::source::Loader for OpenDocumentsGraphLoader<'a> {
}
}
/// The default parser from `deno_graph` does not include the configuration
/// options we require for the lsp.
#[derive(Debug, Default)]
struct LspModuleParser;
impl deno_graph::ModuleParser for LspModuleParser {
fn parse_module(
&self,
specifier: &deno_graph::ModuleSpecifier,
source: Arc<str>,
media_type: MediaType,
) -> deno_core::anyhow::Result<ParsedSource, deno_ast::Diagnostic> {
deno_ast::parse_module(deno_ast::ParseParams {
specifier: specifier.to_string(),
text_info: SourceTextInfo::new(source),
media_type,
capture_tokens: true,
scope_analysis: true,
maybe_syntax: None,
})
}
}
fn lsp_deno_graph_analyze(
fn parse_and_analyze_module(
specifier: &ModuleSpecifier,
content: Arc<str>,
text_info: SourceTextInfo,
maybe_headers: Option<&HashMap<String, String>>,
maybe_resolver: Option<&dyn deno_graph::source::Resolver>,
) -> (MaybeModuleResult, MaybeParsedSourceResult) {
use deno_graph::ModuleParser;
let analyzer = deno_graph::CapturingModuleAnalyzer::new(
Some(Box::<LspModuleParser>::default()),
None,
);
let parsed_source_result = analyzer.parse_module(
) -> (Option<ParsedSourceResult>, Option<ModuleResult>) {
let parsed_source_result = parse_source(specifier, text_info, maybe_headers);
let module_result = analyze_module(
specifier,
content.clone(),
MediaType::from_specifier_and_headers(specifier, maybe_headers),
&parsed_source_result,
maybe_headers,
maybe_resolver,
);
let module_result = match &parsed_source_result {
Ok(_) => deno_graph::parse_module(
specifier,
maybe_headers,
content,
Some(deno_graph::ModuleKind::Esm),
maybe_resolver,
Some(&analyzer),
),
(Some(parsed_source_result), Some(module_result))
}
fn parse_source(
specifier: &ModuleSpecifier,
text_info: SourceTextInfo,
maybe_headers: Option<&HashMap<String, String>>,
) -> ParsedSourceResult {
deno_ast::parse_module(deno_ast::ParseParams {
specifier: specifier.to_string(),
text_info,
media_type: MediaType::from_specifier_and_headers(specifier, maybe_headers),
capture_tokens: true,
scope_analysis: true,
maybe_syntax: None,
})
}
fn analyze_module(
specifier: &ModuleSpecifier,
parsed_source_result: &ParsedSourceResult,
maybe_headers: Option<&HashMap<String, String>>,
maybe_resolver: Option<&dyn deno_graph::source::Resolver>,
) -> ModuleResult {
use deno_graph::ParsedSourceStore;
struct UnreachableParser;
impl deno_graph::ModuleParser for UnreachableParser {
fn parse_module(
&self,
_specifier: &deno_graph::ModuleSpecifier,
_source: Arc<str>,
_media_type: MediaType,
) -> deno_core::anyhow::Result<ParsedSource, deno_ast::Diagnostic> {
// should have re-used the parsed source from the store
unreachable!()
}
}
match parsed_source_result {
Ok(parsed_source) => {
let store = DefaultParsedSourceStore::default();
store.set_parsed_source(specifier.clone(), parsed_source.clone());
let analyzer = deno_graph::CapturingModuleAnalyzer::new(
// should never parse because it will get the parsed source from the store
Some(Box::new(UnreachableParser)),
Some(Box::new(store)),
);
deno_graph::parse_module(
specifier,
maybe_headers,
parsed_source.text_info().text(),
Some(deno_graph::ModuleKind::Esm),
maybe_resolver,
Some(&analyzer),
)
}
Err(err) => Err(deno_graph::ModuleGraphError::ParseErr(
specifier.clone(),
err.clone(),
)),
};
(Some(module_result), Some(parsed_source_result))
}
}
#[cfg(test)]
mod tests {
use super::*;
use import_map::ImportMap;
use test_util::TempDir;
fn setup(temp_dir: &TempDir) -> (Documents, PathBuf) {
@ -1469,4 +1580,85 @@ console.log(b, "hello deno");
// Now make sure that the original documents doesn't return both copies
assert_eq!(documents.documents(false, false).len(), 1);
}
#[test]
fn test_documents_refresh_dependencies_config_change() {
// it should never happen that a user of this API causes this to happen,
// but we'll guard against it anyway
let temp_dir = TempDir::new();
let (mut documents, documents_path) = setup(&temp_dir);
fs::create_dir_all(&documents_path).unwrap();
let file1_path = documents_path.join("file1.ts");
let file1_specifier = ModuleSpecifier::from_file_path(&file1_path).unwrap();
fs::write(&file1_path, "").unwrap();
let file2_path = documents_path.join("file2.ts");
let file2_specifier = ModuleSpecifier::from_file_path(&file2_path).unwrap();
fs::write(&file2_path, "").unwrap();
let file3_path = documents_path.join("file3.ts");
let file3_specifier = ModuleSpecifier::from_file_path(&file3_path).unwrap();
fs::write(&file3_path, "").unwrap();
// set the initial import map and point to file 2
{
let mut import_map = ImportMap::new(
ModuleSpecifier::from_file_path(documents_path.join("import_map.json"))
.unwrap(),
);
import_map
.imports_mut()
.append("test".to_string(), "./file2.ts".to_string())
.unwrap();
documents.update_config(Some(Arc::new(import_map)), None);
// open the document
let document = documents.open(
file1_specifier.clone(),
1,
LanguageId::TypeScript,
"import {} from 'test';".into(),
);
assert_eq!(
document
.dependencies()
.get("test")
.unwrap()
.maybe_code
.maybe_specifier()
.map(ToOwned::to_owned),
Some(file2_specifier),
);
}
// now point at file 3
{
let mut import_map = ImportMap::new(
ModuleSpecifier::from_file_path(documents_path.join("import_map.json"))
.unwrap(),
);
import_map
.imports_mut()
.append("test".to_string(), "./file3.ts".to_string())
.unwrap();
documents.update_config(Some(Arc::new(import_map)), None);
// check the document's dependencies
let document = documents.get(&file1_specifier).unwrap();
assert_eq!(
document
.dependencies()
.get("test")
.unwrap()
.maybe_code
.maybe_specifier()
.map(ToOwned::to_owned),
Some(file3_specifier),
);
}
}
}

View file

@ -1161,7 +1161,9 @@ impl Inner {
self.maybe_import_map.clone(),
self.maybe_config_file.as_ref(),
);
self.refresh_npm_specifiers().await;
self.diagnostics_server.invalidate_all();
self.restart_ts_server().await;
self.send_diagnostics_update();
self.send_testing_update();
}
@ -3024,15 +3026,19 @@ impl Inner {
// the language server for TypeScript (as it might hold to some stale
// documents).
self.diagnostics_server.invalidate_all();
self.restart_ts_server().await;
self.send_diagnostics_update();
self.send_testing_update();
self.performance.measure(mark);
}
async fn restart_ts_server(&self) {
let _: bool = self
.ts_server
.request(self.snapshot(), tsc::RequestMethod::Restart)
.await
.unwrap();
self.send_diagnostics_update();
self.send_testing_update();
self.performance.measure(mark);
}
fn get_performance(&self) -> Value {

View file

@ -508,15 +508,11 @@ fn lsp_import_map_config_file() {
map.insert("config".to_string(), json!("./deno.import_map.jsonc"));
params.initialization_options = Some(Value::Object(map));
}
let import_map =
serde_json::to_vec_pretty(&load_fixture("import-map.json")).unwrap();
fs::write(temp_dir.path().join("import-map.json"), import_map).unwrap();
fs::create_dir(temp_dir.path().join("lib")).unwrap();
fs::write(
temp_dir.path().join("lib").join("b.ts"),
r#"export const b = "b";"#,
)
.unwrap();
let import_map_text =
serde_json::to_string_pretty(&load_fixture("import-map.json")).unwrap();
temp_dir.write("import-map.json", import_map_text);
temp_dir.create_dir_all("lib");
temp_dir.write("lib/b.ts", r#"export const b = "b";"#);
let deno_exe = deno_exe_path();
let mut client = LspClient::new(&deno_exe, false).unwrap();