1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-12-24 08:09:08 -05:00

fix(lsp): handle mbc documents properly (#9151)

Co-authored-by: Ryan Dahl <ry@tinyclouds.org>
This commit is contained in:
Kitson Kelly 2021-01-22 21:03:16 +11:00 committed by GitHub
parent ffa920e4b9
commit 1a9209d1e3
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
18 changed files with 1499 additions and 870 deletions

7
Cargo.lock generated
View file

@ -453,6 +453,7 @@ dependencies = [
"tempfile",
"termcolor",
"test_util",
"text-size",
"tokio",
"tokio-rustls",
"tower-test",
@ -2948,6 +2949,12 @@ dependencies = [
"tokio-tungstenite",
]
[[package]]
name = "text-size"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "288cb548dbe72b652243ea797201f3d481a0609a967980fcc5b2315ea811560a"
[[package]]
name = "textwrap"
version = "0.11.0"

View file

@ -74,6 +74,7 @@ swc_common = { version = "0.10.8", features = ["sourcemap"] }
swc_ecmascript = { version = "0.17.1", features = ["codegen", "dep_graph", "parser", "proposal", "react", "transforms", "typescript", "visit"] }
tempfile = "3.1.0"
termcolor = "1.1.2"
text-size = "1.1.0"
tokio = { version = "1.0.1", features = ["full"] }
tokio-rustls = "0.22.0"
uuid = { version = "0.8.2", features = ["v4"] }

View file

@ -4,7 +4,6 @@ use super::analysis::get_lint_references;
use super::analysis::references_to_diagnostics;
use super::analysis::ResolvedDependency;
use super::language_server::StateSnapshot;
use super::memory_cache::FileId;
use super::tsc;
use crate::diagnostics;
@ -13,7 +12,7 @@ use crate::media_type::MediaType;
use deno_core::error::custom_error;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::serde_json::Value;
use deno_core::ModuleSpecifier;
use lspower::lsp_types;
use std::collections::HashMap;
use std::collections::HashSet;
@ -28,43 +27,47 @@ pub enum DiagnosticSource {
#[derive(Debug, Default, Clone)]
pub struct DiagnosticCollection {
map: HashMap<(FileId, DiagnosticSource), Vec<lsp_types::Diagnostic>>,
versions: HashMap<FileId, i32>,
changes: HashSet<FileId>,
map: HashMap<(ModuleSpecifier, DiagnosticSource), Vec<lsp_types::Diagnostic>>,
versions: HashMap<ModuleSpecifier, i32>,
changes: HashSet<ModuleSpecifier>,
}
impl DiagnosticCollection {
pub fn set(
&mut self,
file_id: FileId,
specifier: ModuleSpecifier,
source: DiagnosticSource,
version: Option<i32>,
diagnostics: Vec<lsp_types::Diagnostic>,
) {
self.map.insert((file_id, source), diagnostics);
self.map.insert((specifier.clone(), source), diagnostics);
if let Some(version) = version {
self.versions.insert(file_id, version);
self.versions.insert(specifier.clone(), version);
}
self.changes.insert(file_id);
self.changes.insert(specifier);
}
pub fn diagnostics_for(
&self,
file_id: FileId,
source: DiagnosticSource,
specifier: &ModuleSpecifier,
source: &DiagnosticSource,
) -> impl Iterator<Item = &lsp_types::Diagnostic> {
self.map.get(&(file_id, source)).into_iter().flatten()
self
.map
.get(&(specifier.clone(), source.clone()))
.into_iter()
.flatten()
}
pub fn get_version(&self, file_id: &FileId) -> Option<i32> {
self.versions.get(file_id).cloned()
pub fn get_version(&self, specifier: &ModuleSpecifier) -> Option<i32> {
self.versions.get(specifier).cloned()
}
pub fn invalidate(&mut self, file_id: &FileId) {
self.versions.remove(file_id);
pub fn invalidate(&mut self, specifier: &ModuleSpecifier) {
self.versions.remove(specifier);
}
pub fn take_changes(&mut self) -> Option<HashSet<FileId>> {
pub fn take_changes(&mut self) -> Option<HashSet<ModuleSpecifier>> {
if self.changes.is_empty() {
return None;
}
@ -72,7 +75,8 @@ impl DiagnosticCollection {
}
}
pub type DiagnosticVec = Vec<(FileId, Option<i32>, Vec<lsp_types::Diagnostic>)>;
pub type DiagnosticVec =
Vec<(ModuleSpecifier, Option<i32>, Vec<lsp_types::Diagnostic>)>;
pub async fn generate_lint_diagnostics(
state_snapshot: StateSnapshot,
@ -81,25 +85,24 @@ pub async fn generate_lint_diagnostics(
tokio::task::spawn_blocking(move || {
let mut diagnostic_list = Vec::new();
let file_cache = state_snapshot.file_cache.lock().unwrap();
for (specifier, doc_data) in state_snapshot.doc_data.iter() {
let file_id = file_cache.lookup(specifier).unwrap();
let version = doc_data.version;
let current_version = diagnostic_collection.get_version(&file_id);
let documents = state_snapshot.documents.lock().unwrap();
for specifier in documents.open_specifiers() {
let version = documents.version(specifier);
let current_version = diagnostic_collection.get_version(specifier);
if version != current_version {
let media_type = MediaType::from(specifier);
if let Ok(source_code) = file_cache.get_contents(file_id) {
if let Ok(Some(source_code)) = documents.content(specifier) {
if let Ok(references) =
get_lint_references(specifier, &media_type, &source_code)
{
if !references.is_empty() {
diagnostic_list.push((
file_id,
specifier.clone(),
version,
references_to_diagnostics(references),
));
} else {
diagnostic_list.push((file_id, version, Vec::new()));
diagnostic_list.push((specifier.clone(), version, Vec::new()));
}
}
} else {
@ -154,7 +157,7 @@ fn to_lsp_range(
}
}
type TsDiagnostics = Vec<diagnostics::Diagnostic>;
type TsDiagnostics = HashMap<String, Vec<diagnostics::Diagnostic>>;
fn get_diagnostic_message(diagnostic: &diagnostics::Diagnostic) -> String {
if let Some(message) = diagnostic.message_text.clone() {
@ -197,65 +200,70 @@ fn to_lsp_related_information(
}
fn ts_json_to_diagnostics(
value: Value,
) -> Result<Vec<lsp_types::Diagnostic>, AnyError> {
let ts_diagnostics: TsDiagnostics = serde_json::from_value(value)?;
Ok(
ts_diagnostics
.iter()
.filter_map(|d| {
if let (Some(start), Some(end)) = (&d.start, &d.end) {
Some(lsp_types::Diagnostic {
range: to_lsp_range(start, end),
severity: Some((&d.category).into()),
code: Some(lsp_types::NumberOrString::Number(d.code as i32)),
code_description: None,
source: Some("deno-ts".to_string()),
message: get_diagnostic_message(d),
related_information: to_lsp_related_information(
&d.related_information,
),
tags: match d.code {
// These are codes that indicate the variable is unused.
6133 | 6192 | 6196 => {
Some(vec![lsp_types::DiagnosticTag::Unnecessary])
}
_ => None,
},
data: None,
})
} else {
None
}
})
.collect(),
)
diagnostics: &[diagnostics::Diagnostic],
) -> Vec<lsp_types::Diagnostic> {
diagnostics
.iter()
.filter_map(|d| {
if let (Some(start), Some(end)) = (&d.start, &d.end) {
Some(lsp_types::Diagnostic {
range: to_lsp_range(start, end),
severity: Some((&d.category).into()),
code: Some(lsp_types::NumberOrString::Number(d.code as i32)),
code_description: None,
source: Some("deno-ts".to_string()),
message: get_diagnostic_message(d),
related_information: to_lsp_related_information(
&d.related_information,
),
tags: match d.code {
// These are codes that indicate the variable is unused.
6133 | 6192 | 6196 => {
Some(vec![lsp_types::DiagnosticTag::Unnecessary])
}
_ => None,
},
data: None,
})
} else {
None
}
})
.collect()
}
pub async fn generate_ts_diagnostics(
ts_server: &tsc::TsServer,
diagnostic_collection: &DiagnosticCollection,
state_snapshot: StateSnapshot,
diagnostic_collection: DiagnosticCollection,
ts_server: &tsc::TsServer,
) -> Result<DiagnosticVec, AnyError> {
let mut diagnostics = Vec::new();
let state_snapshot_ = state_snapshot.clone();
for (specifier, doc_data) in state_snapshot_.doc_data.iter() {
let file_id = {
// TODO(lucacasonato): this is highly inefficient
let file_cache = state_snapshot_.file_cache.lock().unwrap();
file_cache.lookup(specifier).unwrap()
};
let version = doc_data.version;
let current_version = diagnostic_collection.get_version(&file_id);
if version != current_version {
let req = tsc::RequestMethod::GetDiagnostics(specifier.clone());
let ts_diagnostics = ts_json_to_diagnostics(
ts_server.request(state_snapshot.clone(), req).await?,
)?;
diagnostics.push((file_id, version, ts_diagnostics));
let mut specifiers = Vec::new();
{
let documents = state_snapshot.documents.lock().unwrap();
for specifier in documents.open_specifiers() {
let version = documents.version(specifier);
let current_version = diagnostic_collection.get_version(specifier);
if version != current_version {
specifiers.push(specifier.clone());
}
}
}
if !specifiers.is_empty() {
let req = tsc::RequestMethod::GetDiagnostics(specifiers);
let res = ts_server.request(state_snapshot.clone(), req).await?;
let ts_diagnostic_map: TsDiagnostics = serde_json::from_value(res)?;
for (specifier_str, ts_diagnostics) in ts_diagnostic_map.iter() {
let specifier = ModuleSpecifier::resolve_url(specifier_str)?;
let version =
state_snapshot.documents.lock().unwrap().version(&specifier);
diagnostics.push((
specifier,
version,
ts_json_to_diagnostics(ts_diagnostics),
));
}
}
Ok(diagnostics)
}
@ -266,19 +274,18 @@ pub async fn generate_dependency_diagnostics(
tokio::task::spawn_blocking(move || {
let mut diagnostics = Vec::new();
let file_cache = state_snapshot.file_cache.lock().unwrap();
let mut sources = if let Ok(sources) = state_snapshot.sources.lock() {
sources
} else {
return Err(custom_error("Deadlock", "deadlock locking sources"));
};
for (specifier, doc_data) in state_snapshot.doc_data.iter() {
let file_id = file_cache.lookup(specifier).unwrap();
let version = doc_data.version;
let current_version = diagnostic_collection.get_version(&file_id);
let documents = state_snapshot.documents.lock().unwrap();
for specifier in documents.open_specifiers() {
let version = documents.version(specifier);
let current_version = diagnostic_collection.get_version(specifier);
if version != current_version {
let mut diagnostic_list = Vec::new();
if let Some(dependencies) = &doc_data.dependencies {
if let Some(dependencies) = documents.dependencies(specifier) {
for (_, dependency) in dependencies.iter() {
if let (Some(code), Some(range)) = (
&dependency.maybe_code,
@ -299,7 +306,7 @@ pub async fn generate_dependency_diagnostics(
})
}
ResolvedDependency::Resolved(specifier) => {
if !(state_snapshot.doc_data.contains_key(&specifier) || sources.contains(&specifier)) {
if !(documents.contains(&specifier) || sources.contains(&specifier)) {
let is_local = specifier.as_url().scheme() == "file";
diagnostic_list.push(lsp_types::Diagnostic {
range: *range,
@ -322,7 +329,7 @@ pub async fn generate_dependency_diagnostics(
}
}
}
diagnostics.push((file_id, version, diagnostic_list))
diagnostics.push((specifier.clone(), version, diagnostic_list))
}
}

315
cli/lsp/documents.rs Normal file
View file

@ -0,0 +1,315 @@
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
use super::analysis;
use super::text::LineIndex;
use crate::import_map::ImportMap;
use crate::media_type::MediaType;
use deno_core::error::custom_error;
use deno_core::error::AnyError;
use deno_core::error::Context;
use deno_core::ModuleSpecifier;
use lspower::lsp_types::TextDocumentContentChangeEvent;
use std::collections::HashMap;
use std::ops::Range;
#[derive(Debug, PartialEq, Eq)]
enum IndexValid {
All,
UpTo(u32),
}
impl IndexValid {
fn covers(&self, line: u32) -> bool {
match *self {
IndexValid::UpTo(to) => to > line,
IndexValid::All => true,
}
}
}
#[derive(Debug, Clone, Default)]
pub struct DocumentData {
bytes: Option<Vec<u8>>,
line_index: Option<LineIndex>,
dependencies: Option<HashMap<String, analysis::Dependency>>,
version: Option<i32>,
}
impl DocumentData {
pub fn apply_content_changes(
&mut self,
content_changes: Vec<TextDocumentContentChangeEvent>,
) -> Result<(), AnyError> {
if self.bytes.is_none() {
return Ok(());
}
let content = &mut String::from_utf8(self.bytes.clone().unwrap())
.context("unable to parse bytes to string")?;
let mut line_index = if let Some(line_index) = &self.line_index {
line_index.clone()
} else {
LineIndex::new(&content)
};
let mut index_valid = IndexValid::All;
for change in content_changes {
if let Some(range) = change.range {
if !index_valid.covers(range.start.line) {
line_index = LineIndex::new(&content);
}
index_valid = IndexValid::UpTo(range.start.line);
let range = line_index.get_text_range(range)?;
content.replace_range(Range::<usize>::from(range), &change.text);
} else {
*content = change.text;
index_valid = IndexValid::UpTo(0);
}
}
self.bytes = Some(content.as_bytes().to_owned());
self.line_index = if index_valid == IndexValid::All {
Some(line_index)
} else {
Some(LineIndex::new(&content))
};
Ok(())
}
pub fn content(&self) -> Result<Option<String>, AnyError> {
if let Some(bytes) = self.bytes.clone() {
Ok(Some(
String::from_utf8(bytes).context("cannot decode bytes to string")?,
))
} else {
Ok(None)
}
}
}
#[derive(Debug, Clone, Default)]
pub struct DocumentCache {
docs: HashMap<ModuleSpecifier, DocumentData>,
}
impl DocumentCache {
pub fn analyze_dependencies(
&mut self,
specifier: &ModuleSpecifier,
maybe_import_map: &Option<ImportMap>,
) -> Result<(), AnyError> {
if !self.contains(specifier) {
return Err(custom_error(
"NotFound",
format!(
"The specifier (\"{}\") does not exist in the document cache.",
specifier
),
));
}
let doc = self.docs.get_mut(specifier).unwrap();
if let Some(source) = &doc.content()? {
if let Some((dependencies, _)) = analysis::analyze_dependencies(
specifier,
source,
&MediaType::from(specifier),
maybe_import_map,
) {
doc.dependencies = Some(dependencies);
} else {
doc.dependencies = None;
}
} else {
doc.dependencies = None;
}
Ok(())
}
pub fn change(
&mut self,
specifier: &ModuleSpecifier,
version: i32,
content_changes: Vec<TextDocumentContentChangeEvent>,
) -> Result<(), AnyError> {
if !self.contains(specifier) {
return Err(custom_error(
"NotFound",
format!(
"The specifier (\"{}\") does not exist in the document cache.",
specifier
),
));
}
let doc = self.docs.get_mut(specifier).unwrap();
doc.apply_content_changes(content_changes)?;
doc.version = Some(version);
Ok(())
}
pub fn close(&mut self, specifier: &ModuleSpecifier) {
if let Some(mut doc) = self.docs.get_mut(specifier) {
doc.version = None;
doc.dependencies = None;
}
}
pub fn contains(&self, specifier: &ModuleSpecifier) -> bool {
self.docs.contains_key(specifier)
}
pub fn content(
&self,
specifier: &ModuleSpecifier,
) -> Result<Option<String>, AnyError> {
if let Some(doc) = self.docs.get(specifier) {
doc.content()
} else {
Ok(None)
}
}
pub fn dependencies(
&self,
specifier: &ModuleSpecifier,
) -> Option<HashMap<String, analysis::Dependency>> {
let doc = self.docs.get(specifier)?;
doc.dependencies.clone()
}
pub fn len(&self) -> usize {
self.docs.iter().count()
}
pub fn line_index(&self, specifier: &ModuleSpecifier) -> Option<LineIndex> {
let doc = self.docs.get(specifier)?;
doc.line_index.clone()
}
pub fn open(
&mut self,
specifier: ModuleSpecifier,
version: i32,
text: String,
) {
self.docs.insert(
specifier,
DocumentData {
bytes: Some(text.as_bytes().to_owned()),
version: Some(version),
line_index: Some(LineIndex::new(&text)),
..Default::default()
},
);
}
pub fn open_specifiers(&self) -> Vec<&ModuleSpecifier> {
self
.docs
.iter()
.filter_map(|(key, data)| {
if data.version.is_some() {
Some(key)
} else {
None
}
})
.collect()
}
pub fn version(&self, specifier: &ModuleSpecifier) -> Option<i32> {
self.docs.get(specifier).and_then(|doc| doc.version)
}
}
#[cfg(test)]
mod tests {
use super::*;
use lspower::lsp_types;
#[test]
fn test_document_cache_contains() {
let mut document_cache = DocumentCache::default();
let specifier = ModuleSpecifier::resolve_url("file:///a/b.ts").unwrap();
let missing_specifier =
ModuleSpecifier::resolve_url("file:///a/c.ts").unwrap();
document_cache.open(
specifier.clone(),
1,
"console.log(\"Hello Deno\");\n".to_owned(),
);
assert!(document_cache.contains(&specifier));
assert!(!document_cache.contains(&missing_specifier));
}
#[test]
fn test_document_cache_change() {
let mut document_cache = DocumentCache::default();
let specifier = ModuleSpecifier::resolve_url("file:///a/b.ts").unwrap();
document_cache.open(
specifier.clone(),
1,
"console.log(\"Hello deno\");\n".to_owned(),
);
document_cache
.change(
&specifier,
2,
vec![lsp_types::TextDocumentContentChangeEvent {
range: Some(lsp_types::Range {
start: lsp_types::Position {
line: 0,
character: 19,
},
end: lsp_types::Position {
line: 0,
character: 20,
},
}),
range_length: Some(1),
text: "D".to_string(),
}],
)
.expect("failed to make changes");
let actual = document_cache
.content(&specifier)
.expect("failed to get content");
assert_eq!(actual, Some("console.log(\"Hello Deno\");\n".to_string()));
}
#[test]
fn test_document_cache_change_utf16() {
let mut document_cache = DocumentCache::default();
let specifier = ModuleSpecifier::resolve_url("file:///a/b.ts").unwrap();
document_cache.open(
specifier.clone(),
1,
"console.log(\"Hello 🦕\");\n".to_owned(),
);
document_cache
.change(
&specifier,
2,
vec![lsp_types::TextDocumentContentChangeEvent {
range: Some(lsp_types::Range {
start: lsp_types::Position {
line: 0,
character: 19,
},
end: lsp_types::Position {
line: 0,
character: 21,
},
}),
range_length: Some(2),
text: "Deno".to_string(),
}],
)
.expect("failed to make changes");
let actual = document_cache
.content(&specifier)
.expect("failed to get content");
assert_eq!(actual, Some("console.log(\"Hello Deno\");\n".to_string()));
}
}

View file

@ -23,33 +23,31 @@ use tokio::fs;
use crate::deno_dir;
use crate::import_map::ImportMap;
use crate::media_type::MediaType;
use crate::tsc_config::parse_config;
use crate::tsc_config::TsConfig;
use super::analysis;
use super::capabilities;
use super::config::Config;
use super::diagnostics;
use super::diagnostics::DiagnosticCollection;
use super::diagnostics::DiagnosticSource;
use super::memory_cache::MemoryCache;
use super::documents::DocumentCache;
use super::sources;
use super::sources::Sources;
use super::text;
use super::text::apply_content_changes;
use super::text::LineIndex;
use super::tsc;
use super::tsc::AssetDocument;
use super::tsc::TsServer;
use super::utils;
#[derive(Debug, Clone)]
pub struct LanguageServer {
assets: Arc<Mutex<HashMap<ModuleSpecifier, Option<String>>>>,
assets: Arc<Mutex<HashMap<ModuleSpecifier, Option<AssetDocument>>>>,
client: Client,
ts_server: TsServer,
config: Arc<Mutex<Config>>,
doc_data: Arc<Mutex<HashMap<ModuleSpecifier, DocumentData>>>,
file_cache: Arc<Mutex<MemoryCache>>,
documents: Arc<Mutex<DocumentCache>>,
sources: Arc<Mutex<Sources>>,
diagnostics: Arc<Mutex<DiagnosticCollection>>,
maybe_config_uri: Arc<Mutex<Option<Url>>>,
@ -59,9 +57,8 @@ pub struct LanguageServer {
#[derive(Debug, Clone, Default)]
pub struct StateSnapshot {
pub assets: Arc<Mutex<HashMap<ModuleSpecifier, Option<String>>>>,
pub doc_data: HashMap<ModuleSpecifier, DocumentData>,
pub file_cache: Arc<Mutex<MemoryCache>>,
pub assets: Arc<Mutex<HashMap<ModuleSpecifier, Option<AssetDocument>>>>,
pub documents: Arc<Mutex<DocumentCache>>,
pub sources: Arc<Mutex<Sources>>,
}
@ -78,8 +75,7 @@ impl LanguageServer {
client,
ts_server: TsServer::new(),
config: Default::default(),
doc_data: Default::default(),
file_cache: Default::default(),
documents: Default::default(),
sources,
diagnostics: Default::default(),
maybe_config_uri: Default::default(),
@ -93,34 +89,65 @@ impl LanguageServer {
config.settings.enable
}
/// Searches assets, open documents and external sources for a line_index,
/// which might be performed asynchronously, hydrating in memory caches for
/// subsequent requests.
pub async fn get_line_index(
&self,
specifier: ModuleSpecifier,
) -> Result<Vec<u32>, AnyError> {
let line_index = if specifier.as_url().scheme() == "asset" {
let state_snapshot = self.snapshot();
if let Some(source) =
tsc::get_asset(&specifier, &self.ts_server, &state_snapshot).await?
{
text::index_lines(&source)
} else {
return Err(anyhow!("asset source missing: {}", specifier));
}
} else {
let file_cache = self.file_cache.lock().unwrap();
if let Some(file_id) = file_cache.lookup(&specifier) {
let file_text = file_cache.get_contents(file_id)?;
text::index_lines(&file_text)
} else {
let mut sources = self.sources.lock().unwrap();
if let Some(line_index) = sources.get_line_index(&specifier) {
line_index
) -> Result<LineIndex, AnyError> {
if specifier.as_url().scheme() == "asset" {
let maybe_asset =
{ self.assets.lock().unwrap().get(&specifier).cloned() };
if let Some(maybe_asset) = maybe_asset {
if let Some(asset) = maybe_asset {
Ok(asset.line_index)
} else {
return Err(anyhow!("source for specifier not found: {}", specifier));
Err(anyhow!("asset is missing: {}", specifier))
}
} else {
let state_snapshot = self.snapshot();
if let Some(asset) =
tsc::get_asset(&specifier, &self.ts_server, &state_snapshot).await?
{
Ok(asset.line_index)
} else {
Err(anyhow!("asset is missing: {}", specifier))
}
}
};
Ok(line_index)
} else if let Some(line_index) =
self.documents.lock().unwrap().line_index(&specifier)
{
Ok(line_index)
} else if let Some(line_index) =
self.sources.lock().unwrap().get_line_index(&specifier)
{
Ok(line_index)
} else {
Err(anyhow!("Unable to find line index for: {}", specifier))
}
}
/// Only searches already cached assets and documents for a line index. If
/// the line index cannot be found, `None` is returned.
pub fn get_line_index_sync(
&self,
specifier: &ModuleSpecifier,
) -> Option<LineIndex> {
if specifier.as_url().scheme() == "asset" {
if let Some(Some(asset)) = self.assets.lock().unwrap().get(specifier) {
Some(asset.line_index.clone())
} else {
None
}
} else {
let documents = self.documents.lock().unwrap();
if documents.contains(specifier) {
documents.line_index(specifier)
} else {
self.sources.lock().unwrap().get_line_index(specifier)
}
}
}
async fn prepare_diagnostics(&self) -> Result<(), AnyError> {
@ -130,6 +157,7 @@ impl LanguageServer {
};
let lint = async {
let mut disturbed = false;
if lint_enabled {
let diagnostic_collection = self.diagnostics.lock().unwrap().clone();
let diagnostics = diagnostics::generate_lint_diagnostics(
@ -137,59 +165,50 @@ impl LanguageServer {
diagnostic_collection,
)
.await;
disturbed = !diagnostics.is_empty();
{
let mut diagnostics_collection = self.diagnostics.lock().unwrap();
for (file_id, version, diagnostics) in diagnostics {
for (specifier, version, diagnostics) in diagnostics {
diagnostics_collection.set(
file_id,
specifier,
DiagnosticSource::Lint,
version,
diagnostics,
);
}
}
self.publish_diagnostics().await?
};
Ok::<(), AnyError>(())
Ok::<bool, AnyError>(disturbed)
};
let ts = async {
let mut disturbed = false;
if enabled {
let diagnostics = {
let diagnostic_collection = self.diagnostics.lock().unwrap().clone();
match diagnostics::generate_ts_diagnostics(
&self.ts_server,
&diagnostic_collection,
self.snapshot(),
)
.await
{
Ok(diagnostics) => diagnostics,
Err(err) => {
error!("Error processing TypeScript diagnostics:\n{}", err);
vec![]
}
}
};
let diagnostics_collection = self.diagnostics.lock().unwrap().clone();
let diagnostics = diagnostics::generate_ts_diagnostics(
self.snapshot(),
diagnostics_collection,
&self.ts_server,
)
.await?;
disturbed = !diagnostics.is_empty();
{
let mut diagnostics_collection = self.diagnostics.lock().unwrap();
for (file_id, version, diagnostics) in diagnostics {
for (specifier, version, diagnostics) in diagnostics {
diagnostics_collection.set(
file_id,
specifier,
DiagnosticSource::TypeScript,
version,
diagnostics,
);
}
};
self.publish_diagnostics().await?
}
Ok::<(), AnyError>(())
}
};
Ok::<bool, AnyError>(disturbed)
};
let deps = async {
let mut disturbed = false;
if enabled {
let diagnostics_collection = self.diagnostics.lock().unwrap().clone();
let diagnostics = diagnostics::generate_dependency_diagnostics(
@ -197,27 +216,26 @@ impl LanguageServer {
diagnostics_collection,
)
.await?;
disturbed = !diagnostics.is_empty();
{
let mut diagnostics_collection = self.diagnostics.lock().unwrap();
for (file_id, version, diagnostics) in diagnostics {
for (specifier, version, diagnostics) in diagnostics {
diagnostics_collection.set(
file_id,
specifier,
DiagnosticSource::Deno,
version,
diagnostics,
);
}
}
self.publish_diagnostics().await?
};
Ok::<(), AnyError>(())
Ok::<bool, AnyError>(disturbed)
};
let (lint_res, ts_res, deps_res) = tokio::join!(lint, ts, deps);
lint_res?;
ts_res?;
deps_res?;
if lint_res? || ts_res? || deps_res? {
self.publish_diagnostics().await?;
}
Ok(())
}
@ -230,7 +248,7 @@ impl LanguageServer {
};
if let Some(diagnostic_changes) = maybe_changes {
let settings = self.config.lock().unwrap().settings.clone();
for file_id in diagnostic_changes {
for specifier in diagnostic_changes {
// TODO(@kitsonk) not totally happy with the way we collect and store
// different types of diagnostics and offer them up to the client, we
// do need to send "empty" vectors though when a particular feature is
@ -238,7 +256,7 @@ impl LanguageServer {
// diagnostics
let mut diagnostics: Vec<Diagnostic> = if settings.lint {
diagnostics_collection
.diagnostics_for(file_id, DiagnosticSource::Lint)
.diagnostics_for(&specifier, &DiagnosticSource::Lint)
.cloned()
.collect()
} else {
@ -247,27 +265,17 @@ impl LanguageServer {
if self.enabled() {
diagnostics.extend(
diagnostics_collection
.diagnostics_for(file_id, DiagnosticSource::TypeScript)
.diagnostics_for(&specifier, &DiagnosticSource::TypeScript)
.cloned(),
);
diagnostics.extend(
diagnostics_collection
.diagnostics_for(file_id, DiagnosticSource::Deno)
.diagnostics_for(&specifier, &DiagnosticSource::Deno)
.cloned(),
);
}
let specifier = {
let file_cache = self.file_cache.lock().unwrap();
file_cache.get_specifier(file_id).clone()
};
let uri = specifier.as_url().clone();
let version = if let Some(doc_data) =
self.doc_data.lock().unwrap().get(&specifier)
{
doc_data.version
} else {
None
};
let version = self.documents.lock().unwrap().version(&specifier);
self
.client
.publish_diagnostics(uri, diagnostics, version)
@ -281,8 +289,7 @@ impl LanguageServer {
pub fn snapshot(&self) -> StateSnapshot {
StateSnapshot {
assets: self.assets.clone(),
doc_data: self.doc_data.lock().unwrap().clone(),
file_cache: self.file_cache.clone(),
documents: self.documents.clone(),
sources: self.sources.clone(),
}
}
@ -507,61 +514,48 @@ impl lspower::LanguageServer for LanguageServer {
return;
}
let specifier = utils::normalize_url(params.text_document.uri);
let maybe_import_map = self.maybe_import_map.lock().unwrap().clone();
if self
.doc_data
self.documents.lock().unwrap().open(
specifier.clone(),
params.text_document.version,
params.text_document.text,
);
if let Err(err) = self
.documents
.lock()
.unwrap()
.insert(
specifier.clone(),
DocumentData::new(
specifier.clone(),
params.text_document.version,
&params.text_document.text,
maybe_import_map,
),
)
.is_some()
.analyze_dependencies(&specifier, &self.maybe_import_map.lock().unwrap())
{
error!("duplicate DidOpenTextDocument: {}", specifier);
error!("{}", err);
}
self
.file_cache
.lock()
.unwrap()
.set_contents(specifier, Some(params.text_document.text.into_bytes()));
// TODO(@lucacasonato): error handling
self.prepare_diagnostics().await.unwrap();
// TODO(@kitsonk): how to better lazily do this?
if let Err(err) = self.prepare_diagnostics().await {
error!("{}", err);
}
}
async fn did_change(&self, params: DidChangeTextDocumentParams) {
let specifier = utils::normalize_url(params.text_document.uri);
let mut content = {
let file_cache = self.file_cache.lock().unwrap();
let file_id = file_cache.lookup(&specifier).unwrap();
file_cache.get_contents(file_id).unwrap()
};
apply_content_changes(&mut content, params.content_changes);
{
let mut doc_data = self.doc_data.lock().unwrap();
let doc_data = doc_data.get_mut(&specifier).unwrap();
let maybe_import_map = self.maybe_import_map.lock().unwrap();
doc_data.update(
params.text_document.version,
&content,
&maybe_import_map,
);
if let Err(err) = self.documents.lock().unwrap().change(
&specifier,
params.text_document.version,
params.content_changes,
) {
error!("{}", err);
}
self
.file_cache
if let Err(err) = self
.documents
.lock()
.unwrap()
.set_contents(specifier, Some(content.into_bytes()));
.analyze_dependencies(&specifier, &self.maybe_import_map.lock().unwrap())
{
error!("{}", err);
}
// TODO(@lucacasonato): error handling
self.prepare_diagnostics().await.unwrap();
// TODO(@kitsonk): how to better lazily do this?
if let Err(err) = self.prepare_diagnostics().await {
error!("{}", err);
}
}
async fn did_close(&self, params: DidCloseTextDocumentParams) {
@ -572,12 +566,12 @@ impl lspower::LanguageServer for LanguageServer {
return;
}
let specifier = utils::normalize_url(params.text_document.uri);
if self.doc_data.lock().unwrap().remove(&specifier).is_none() {
error!("orphaned document: {}", specifier);
self.documents.lock().unwrap().close(&specifier);
// TODO(@kitsonk): how to better lazily do this?
if let Err(err) = self.prepare_diagnostics().await {
error!("{}", err);
}
// TODO(@kitsonk) should we do garbage collection on the diagnostics?
// TODO(@lucacasonato): error handling
self.prepare_diagnostics().await.unwrap();
}
async fn did_save(&self, _params: DidSaveTextDocumentParams) {
@ -673,12 +667,17 @@ impl lspower::LanguageServer for LanguageServer {
params: DocumentFormattingParams,
) -> LspResult<Option<Vec<TextEdit>>> {
let specifier = utils::normalize_url(params.text_document.uri.clone());
let file_text = {
let file_cache = self.file_cache.lock().unwrap();
let file_id = file_cache.lookup(&specifier).unwrap();
// TODO(lucacasonato): handle error properly
file_cache.get_contents(file_id).unwrap()
};
let file_text = self
.documents
.lock()
.unwrap()
.content(&specifier)
.map_err(|_| {
LspError::invalid_params(
"The specified file could not be found in memory.",
)
})?
.unwrap();
let file_path =
if let Ok(file_path) = params.text_document.uri.to_file_path() {
@ -723,14 +722,18 @@ impl lspower::LanguageServer for LanguageServer {
let specifier = utils::normalize_url(
params.text_document_position_params.text_document.uri,
);
// TODO(lucacasonato): handle error correctly
let line_index = self.get_line_index(specifier.clone()).await.unwrap();
let line_index =
if let Some(line_index) = self.get_line_index_sync(&specifier) {
line_index
} else {
return Err(LspError::invalid_params(format!(
"An unexpected specifier ({}) was provided.",
specifier
)));
};
let req = tsc::RequestMethod::GetQuickInfo((
specifier,
text::to_char_pos(
&line_index,
params.text_document_position_params.position,
),
line_index.offset_tsc(params.text_document_position_params.position)?,
));
// TODO(lucacasonato): handle error correctly
let res = self.ts_server.request(self.snapshot(), req).await.unwrap();
@ -738,7 +741,8 @@ impl lspower::LanguageServer for LanguageServer {
let maybe_quick_info: Option<tsc::QuickInfo> =
serde_json::from_value(res).unwrap();
if let Some(quick_info) = maybe_quick_info {
Ok(Some(quick_info.to_hover(&line_index)))
let hover = quick_info.to_hover(&line_index);
Ok(Some(hover))
} else {
Ok(None)
}
@ -754,15 +758,19 @@ impl lspower::LanguageServer for LanguageServer {
let specifier = utils::normalize_url(
params.text_document_position_params.text_document.uri,
);
// TODO(lucacasonato): handle error correctly
let line_index = self.get_line_index(specifier.clone()).await.unwrap();
let line_index =
if let Some(line_index) = self.get_line_index_sync(&specifier) {
line_index
} else {
return Err(LspError::invalid_params(format!(
"An unexpected specifier ({}) was provided.",
specifier
)));
};
let files_to_search = vec![specifier.clone()];
let req = tsc::RequestMethod::GetDocumentHighlights((
specifier,
text::to_char_pos(
&line_index,
params.text_document_position_params.position,
),
line_index.offset_tsc(params.text_document_position_params.position)?,
files_to_search,
));
// TODO(lucacasonato): handle error correctly
@ -793,11 +801,18 @@ impl lspower::LanguageServer for LanguageServer {
}
let specifier =
utils::normalize_url(params.text_document_position.text_document.uri);
// TODO(lucacasonato): handle error correctly
let line_index = self.get_line_index(specifier.clone()).await.unwrap();
let line_index =
if let Some(line_index) = self.get_line_index_sync(&specifier) {
line_index
} else {
return Err(LspError::invalid_params(format!(
"An unexpected specifier ({}) was provided.",
specifier
)));
};
let req = tsc::RequestMethod::GetReferences((
specifier,
text::to_char_pos(&line_index, params.text_document_position.position),
line_index.offset_tsc(params.text_document_position.position)?,
));
// TODO(lucacasonato): handle error correctly
let res = self.ts_server.request(self.snapshot(), req).await.unwrap();
@ -836,14 +851,18 @@ impl lspower::LanguageServer for LanguageServer {
let specifier = utils::normalize_url(
params.text_document_position_params.text_document.uri,
);
// TODO(lucacasonato): handle error correctly
let line_index = self.get_line_index(specifier.clone()).await.unwrap();
let line_index =
if let Some(line_index) = self.get_line_index_sync(&specifier) {
line_index
} else {
return Err(LspError::invalid_params(format!(
"An unexpected specifier ({}) was provided.",
specifier
)));
};
let req = tsc::RequestMethod::GetDefinition((
specifier,
text::to_char_pos(
&line_index,
params.text_document_position_params.position,
),
line_index.offset_tsc(params.text_document_position_params.position)?,
));
// TODO(lucacasonato): handle error correctly
let res = self.ts_server.request(self.snapshot(), req).await.unwrap();
@ -872,10 +891,18 @@ impl lspower::LanguageServer for LanguageServer {
let specifier =
utils::normalize_url(params.text_document_position.text_document.uri);
// TODO(lucacasonato): handle error correctly
let line_index = self.get_line_index(specifier.clone()).await.unwrap();
let line_index =
if let Some(line_index) = self.get_line_index_sync(&specifier) {
line_index
} else {
return Err(LspError::invalid_params(format!(
"An unexpected specifier ({}) was provided.",
specifier
)));
};
let req = tsc::RequestMethod::GetCompletions((
specifier,
text::to_char_pos(&line_index, params.text_document_position.position),
line_index.offset_tsc(params.text_document_position.position)?,
tsc::UserPreferences {
// TODO(lucacasonato): enable this. see https://github.com/denoland/deno/pull/8651
include_completions_with_insert_text: Some(false),
@ -906,20 +933,18 @@ impl lspower::LanguageServer for LanguageServer {
params.text_document_position_params.text_document.uri,
);
let line_index =
self
.get_line_index(specifier.clone())
.await
.map_err(|err| {
error!("Failed to get line_index {:#?}", err);
LspError::internal_error()
})?;
if let Some(line_index) = self.get_line_index_sync(&specifier) {
line_index
} else {
return Err(LspError::invalid_params(format!(
"An unexpected specifier ({}) was provided.",
specifier
)));
};
let req = tsc::RequestMethod::GetImplementation((
specifier,
text::to_char_pos(
&line_index,
params.text_document_position_params.position,
),
line_index.offset_tsc(params.text_document_position_params.position)?,
));
let res =
self
@ -965,36 +990,36 @@ impl lspower::LanguageServer for LanguageServer {
if !self.enabled() {
return Ok(None);
}
let snapshot = self.snapshot();
let specifier =
utils::normalize_url(params.text_document_position.text_document.uri);
let line_index =
self
.get_line_index(specifier.clone())
.await
.map_err(|err| {
error!("Failed to get line_index {:#?}", err);
LspError::internal_error()
})?;
if let Some(line_index) = self.get_line_index_sync(&specifier) {
line_index
} else {
return Err(LspError::invalid_params(format!(
"An unexpected specifier ({}) was provided.",
specifier
)));
};
let req = tsc::RequestMethod::FindRenameLocations((
specifier,
text::to_char_pos(&line_index, params.text_document_position.position),
line_index.offset_tsc(params.text_document_position.position)?,
true,
true,
false,
));
let res = self
.ts_server
.request(snapshot.clone(), req)
.await
.map_err(|err| {
error!("Failed to request to tsserver {:#?}", err);
LspError::invalid_request()
})?;
let res =
self
.ts_server
.request(self.snapshot(), req)
.await
.map_err(|err| {
error!("Failed to request to tsserver {:#?}", err);
LspError::invalid_request()
})?;
let maybe_locations = serde_json::from_value::<
Option<Vec<tsc::RenameLocation>>,
@ -1007,26 +1032,22 @@ impl lspower::LanguageServer for LanguageServer {
LspError::internal_error()
})?;
match maybe_locations {
Some(locations) => {
let rename_locations = tsc::RenameLocations { locations };
let workpace_edits = rename_locations
.into_workspace_edit(
snapshot,
|s| self.get_line_index(s),
&params.new_name,
)
.await
.map_err(|err| {
error!(
"Failed to convert tsc::RenameLocations to WorkspaceEdit {:#?}",
err
);
LspError::internal_error()
})?;
Ok(Some(workpace_edits))
}
None => Ok(None),
if let Some(locations) = maybe_locations {
let rename_locations = tsc::RenameLocations { locations };
let workspace_edits = rename_locations
.into_workspace_edit(
&params.new_name,
|s| self.get_line_index(s),
|s| self.documents.lock().unwrap().version(&s),
)
.await
.map_err(|err| {
error!("Failed to get workspace edits: {:#?}", err);
LspError::internal_error()
})?;
Ok(Some(workspace_edits))
} else {
Ok(None)
}
}
@ -1090,12 +1111,8 @@ impl LanguageServer {
error!("{}", err);
LspError::internal_error()
})?;
{
let file_cache = self.file_cache.lock().unwrap();
if let Some(file_id) = file_cache.lookup(&specifier) {
let mut diagnostics_collection = self.diagnostics.lock().unwrap();
diagnostics_collection.invalidate(&file_id);
}
if self.documents.lock().unwrap().contains(&specifier) {
self.diagnostics.lock().unwrap().invalidate(&specifier);
}
self.prepare_diagnostics().await.map_err(|err| {
error!("{}", err);
@ -1111,28 +1128,38 @@ impl LanguageServer {
let specifier = utils::normalize_url(params.text_document.uri);
let url = specifier.as_url();
let contents = if url.as_str() == "deno:/status.md" {
let file_cache = self.file_cache.lock().unwrap();
let documents = self.documents.lock().unwrap();
Some(format!(
r#"# Deno Language Server Status
- Documents in memory: {}
"#,
file_cache.len()
documents.len()
))
} else {
match url.scheme() {
"asset" => {
let state_snapshot = self.snapshot();
if let Some(text) =
tsc::get_asset(&specifier, &self.ts_server, &state_snapshot)
.await
.map_err(|_| LspError::internal_error())?
{
Some(text)
let maybe_asset =
{ self.assets.lock().unwrap().get(&specifier).cloned() };
if let Some(maybe_asset) = maybe_asset {
if let Some(asset) = maybe_asset {
Some(asset.text)
} else {
None
}
} else {
error!("Missing asset: {}", specifier);
None
let state_snapshot = self.snapshot();
if let Some(asset) =
tsc::get_asset(&specifier, &self.ts_server, &state_snapshot)
.await
.map_err(|_| LspError::internal_error())?
{
Some(asset.text)
} else {
error!("Missing asset: {}", specifier);
None
}
}
}
_ => {
@ -1150,59 +1177,6 @@ impl LanguageServer {
}
}
#[derive(Debug, Clone)]
pub struct DocumentData {
pub dependencies: Option<HashMap<String, analysis::Dependency>>,
pub version: Option<i32>,
specifier: ModuleSpecifier,
}
impl DocumentData {
pub fn new(
specifier: ModuleSpecifier,
version: i32,
source: &str,
maybe_import_map: Option<ImportMap>,
) -> Self {
let dependencies = if let Some((dependencies, _)) =
analysis::analyze_dependencies(
&specifier,
source,
&MediaType::from(&specifier),
&maybe_import_map,
) {
Some(dependencies)
} else {
None
};
Self {
dependencies,
version: Some(version),
specifier,
}
}
pub fn update(
&mut self,
version: i32,
source: &str,
maybe_import_map: &Option<ImportMap>,
) {
self.dependencies = if let Some((dependencies, _)) =
analysis::analyze_dependencies(
&self.specifier,
source,
&MediaType::from(&self.specifier),
maybe_import_map,
) {
Some(dependencies)
} else {
None
};
self.version = Some(version)
}
}
#[cfg(test)]
mod tests {
use super::*;
@ -1211,6 +1185,7 @@ mod tests {
use lspower::LspService;
use std::fs;
use std::task::Poll;
use std::time::Instant;
use tower_test::mock::Spawn;
enum LspResponse {
@ -1410,6 +1385,69 @@ mod tests {
]);
harness.run().await;
}
#[tokio::test]
async fn test_hover_change_mbc() {
let mut harness = LspTestHarness::new(vec![
("initialize_request.json", LspResponse::RequestAny),
("initialized_notification.json", LspResponse::None),
("did_open_notification_mbc.json", LspResponse::None),
("did_change_notification_mbc.json", LspResponse::None),
(
"hover_request_mbc.json",
LspResponse::Request(
2,
json!({
"contents": [
{
"language": "typescript",
"value": "const b: \"😃\"",
},
"",
],
"range": {
"start": {
"line": 2,
"character": 13,
},
"end": {
"line": 2,
"character": 14,
},
}
}),
),
),
(
"shutdown_request.json",
LspResponse::Request(3, json!(null)),
),
("exit_notification.json", LspResponse::None),
]);
harness.run().await;
}
#[tokio::test]
async fn test_large_doc_change() {
let mut harness = LspTestHarness::new(vec![
("initialize_request.json", LspResponse::RequestAny),
("initialized_notification.json", LspResponse::None),
("did_open_notification_large.json", LspResponse::None),
("did_change_notification_large.json", LspResponse::None),
(
"shutdown_request.json",
LspResponse::Request(3, json!(null)),
),
("exit_notification.json", LspResponse::None),
]);
let time = Instant::now();
harness.run().await;
assert!(
time.elapsed().as_millis() <= 10000,
"the execution time exceeded 10000ms"
);
}
#[tokio::test]
async fn test_rename() {
let mut harness = LspTestHarness::new(vec![

View file

@ -1,121 +0,0 @@
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
use deno_core::error::AnyError;
use deno_core::ModuleSpecifier;
use std::collections::HashMap;
use std::fmt;
#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
pub struct FileId(pub u32);
#[derive(Eq, PartialEq, Copy, Clone, Debug)]
pub enum ChangeKind {
Create,
Modify,
Delete,
}
pub struct ChangedFile {
pub change_kind: ChangeKind,
pub file_id: FileId,
}
#[derive(Default)]
struct SpecifierInterner {
map: HashMap<ModuleSpecifier, FileId>,
vec: Vec<ModuleSpecifier>,
}
impl SpecifierInterner {
pub fn get(&self, specifier: &ModuleSpecifier) -> Option<FileId> {
self.map.get(specifier).copied()
}
pub fn intern(&mut self, specifier: ModuleSpecifier) -> FileId {
if let Some(id) = self.get(&specifier) {
return id;
}
let id = FileId(self.vec.len() as u32);
self.map.insert(specifier.clone(), id);
self.vec.push(specifier);
id
}
pub fn lookup(&self, id: FileId) -> &ModuleSpecifier {
&self.vec[id.0 as usize]
}
}
#[derive(Default)]
pub struct MemoryCache {
data: Vec<Option<Vec<u8>>>,
interner: SpecifierInterner,
changes: Vec<ChangedFile>,
}
impl MemoryCache {
fn alloc_file_id(&mut self, specifier: ModuleSpecifier) -> FileId {
let file_id = self.interner.intern(specifier);
let idx = file_id.0 as usize;
let len = self.data.len().max(idx + 1);
self.data.resize_with(len, || None);
file_id
}
fn get(&self, file_id: FileId) -> &Option<Vec<u8>> {
&self.data[file_id.0 as usize]
}
pub fn get_contents(&self, file_id: FileId) -> Result<String, AnyError> {
String::from_utf8(self.get(file_id).as_deref().unwrap().to_vec())
.map_err(|err| err.into())
}
fn get_mut(&mut self, file_id: FileId) -> &mut Option<Vec<u8>> {
&mut self.data[file_id.0 as usize]
}
pub fn get_specifier(&self, file_id: FileId) -> &ModuleSpecifier {
self.interner.lookup(file_id)
}
pub fn len(&self) -> usize {
self.data.len()
}
pub fn lookup(&self, specifier: &ModuleSpecifier) -> Option<FileId> {
self
.interner
.get(specifier)
.filter(|&it| self.get(it).is_some())
}
pub fn set_contents(
&mut self,
specifier: ModuleSpecifier,
contents: Option<Vec<u8>>,
) {
let file_id = self.alloc_file_id(specifier);
let change_kind = match (self.get(file_id), &contents) {
(None, None) => return,
(None, Some(_)) => ChangeKind::Create,
(Some(_), None) => ChangeKind::Delete,
(Some(old), Some(new)) if old == new => return,
(Some(_), Some(_)) => ChangeKind::Modify,
};
*self.get_mut(file_id) = contents;
self.changes.push(ChangedFile {
file_id,
change_kind,
})
}
}
impl fmt::Debug for MemoryCache {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("MemoryCache")
.field("no_files", &self.data.len())
.finish()
}
}

View file

@ -7,8 +7,8 @@ mod analysis;
mod capabilities;
mod config;
mod diagnostics;
mod documents;
mod language_server;
mod memory_cache;
mod sources;
mod text;
mod tsc;

View file

@ -1,7 +1,7 @@
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
use super::analysis;
use super::text;
use super::text::LineIndex;
use crate::file_fetcher::get_source_from_bytes;
use crate::file_fetcher::map_content_type;
@ -43,6 +43,7 @@ pub async fn cache(
#[derive(Debug, Clone, Default)]
struct Metadata {
dependencies: Option<HashMap<String, analysis::Dependency>>,
line_index: LineIndex,
maybe_types: Option<analysis::ResolvedDependency>,
media_type: MediaType,
source: String,
@ -75,19 +76,26 @@ impl Sources {
false
}
pub fn get_length(&mut self, specifier: &ModuleSpecifier) -> Option<usize> {
/// Provides the length of the source content, calculated in a way that should
/// match the behavior of JavaScript, where strings are stored effectively as
/// `&[u16]` and when counting "chars" we need to represent the string as a
/// UTF-16 string in Rust.
pub fn get_length_utf16(
&mut self,
specifier: &ModuleSpecifier,
) -> Option<usize> {
let specifier = self.resolve_specifier(specifier)?;
let metadata = self.get_metadata(&specifier)?;
Some(metadata.source.chars().count())
Some(metadata.source.encode_utf16().count())
}
pub fn get_line_index(
&mut self,
specifier: &ModuleSpecifier,
) -> Option<Vec<u32>> {
) -> Option<LineIndex> {
let specifier = self.resolve_specifier(specifier)?;
let metadata = self.get_metadata(&specifier)?;
Some(text::index_lines(&metadata.source))
Some(metadata.line_index)
}
pub fn get_media_type(
@ -127,8 +135,10 @@ impl Sources {
} else {
None
};
let line_index = LineIndex::new(&source);
let metadata = Metadata {
dependencies,
line_index,
maybe_types,
media_type,
source,
@ -169,8 +179,10 @@ impl Sources {
} else {
None
};
let line_index = LineIndex::new(&source);
let metadata = Metadata {
dependencies,
line_index,
maybe_types,
media_type,
source,
@ -388,7 +400,7 @@ mod tests {
}
#[test]
fn test_sources_get_length() {
fn test_sources_get_length_utf16() {
let (mut sources, _) = setup();
let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap());
let tests = c.join("tests");
@ -396,7 +408,7 @@ mod tests {
&tests.join("001_hello.js").to_string_lossy(),
)
.unwrap();
let actual = sources.get_length(&specifier);
let actual = sources.get_length_utf16(&specifier);
assert!(actual.is_some());
let actual = actual.unwrap();
assert_eq!(actual, 28);

View file

@ -1,123 +1,233 @@
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
use deno_core::error::custom_error;
use deno_core::error::AnyError;
use deno_core::serde_json::json;
use deno_core::serde_json::Value;
use dissimilar::diff;
use dissimilar::Chunk;
use lspower::jsonrpc;
use lspower::lsp_types;
use lspower::lsp_types::TextEdit;
use std::collections::HashMap;
use std::ops::Bound;
use std::ops::Range;
use std::ops::RangeBounds;
use text_size::TextRange;
use text_size::TextSize;
// TODO(@kitson) in general all of these text handling routines don't handle
// JavaScript encoding in the same way and likely cause issues when trying to
// arbitrate between chars and Unicode graphemes. There be dragons.
fn partition_point<T, P>(slice: &[T], mut predicate: P) -> usize
where
P: FnMut(&T) -> bool,
{
let mut left = 0;
let mut right = slice.len();
/// Generate a character position for the start of each line. For example:
///
/// ```rust
/// let actual = index_lines("a\nb\n");
/// assert_eq!(actual, vec![0, 2, 4]);
/// ```
///
pub fn index_lines(text: &str) -> Vec<u32> {
let mut indexes = vec![0_u32];
for (i, c) in text.chars().enumerate() {
if c == '\n' {
indexes.push((i + 1) as u32);
}
}
indexes
}
enum IndexValid {
All,
UpTo(u32),
}
impl IndexValid {
fn covers(&self, line: u32) -> bool {
match *self {
IndexValid::UpTo(to) => to > line,
IndexValid::All => true,
}
}
}
fn to_range(line_index: &[u32], range: lsp_types::Range) -> Range<usize> {
let start =
(line_index[range.start.line as usize] + range.start.character) as usize;
let end =
(line_index[range.end.line as usize] + range.end.character) as usize;
Range { start, end }
}
pub fn to_position(line_index: &[u32], char_pos: u32) -> lsp_types::Position {
let mut line = 0_usize;
let mut line_start = 0_u32;
for (pos, v) in line_index.iter().enumerate() {
if char_pos < *v {
break;
}
line_start = *v;
line = pos;
}
lsp_types::Position {
line: line as u32,
character: char_pos - line_start,
}
}
pub fn to_char_pos(line_index: &[u32], position: lsp_types::Position) -> u32 {
if let Some(line_start) = line_index.get(position.line as usize) {
line_start + position.character
} else {
0_u32
}
}
/// Apply a vector of document changes to the supplied string.
pub fn apply_content_changes(
content: &mut String,
content_changes: Vec<lsp_types::TextDocumentContentChangeEvent>,
) {
let mut line_index = index_lines(&content);
let mut index_valid = IndexValid::All;
for change in content_changes {
if let Some(range) = change.range {
if !index_valid.covers(range.start.line) {
line_index = index_lines(&content);
}
let range = to_range(&line_index, range);
content.replace_range(range, &change.text);
while left != right {
let mid = left + (right - left) / 2;
// SAFETY:
// When left < right, left <= mid < right.
// Therefore left always increases and right always decreases,
// and either of them is selected.
// In both cases left <= right is satisfied.
// Therefore if left < right in a step,
// left <= right is satisfied in the next step.
// Therefore as long as left != right, 0 <= left < right <= len is satisfied
// and if this case 0 <= mid < len is satisfied too.
let value = unsafe { slice.get_unchecked(mid) };
if predicate(value) {
left = mid + 1;
} else {
*content = change.text;
index_valid = IndexValid::UpTo(0);
right = mid;
}
}
left
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct Utf16Char {
pub start: TextSize,
pub end: TextSize,
}
impl Utf16Char {
fn len(&self) -> TextSize {
self.end - self.start
}
fn len_utf16(&self) -> usize {
if self.len() == TextSize::from(4) {
2
} else {
1
}
}
}
#[derive(Debug, Clone, Default, Eq, PartialEq)]
pub struct LineIndex {
utf8_offsets: Vec<TextSize>,
utf16_lines: HashMap<u32, Vec<Utf16Char>>,
utf16_offsets: Vec<TextSize>,
}
impl LineIndex {
pub fn new(text: &str) -> LineIndex {
let mut utf16_lines = HashMap::new();
let mut utf16_chars = Vec::new();
let mut utf8_offsets = vec![0.into()];
let mut utf16_offsets = vec![0.into()];
let mut curr_row = 0.into();
let mut curr_col = 0.into();
let mut curr_offset_u16 = 0.into();
let mut line = 0;
for c in text.chars() {
let c_len = TextSize::of(c);
curr_row += c_len;
curr_offset_u16 += TextSize::from(c.len_utf16() as u32);
if c == '\n' {
utf8_offsets.push(curr_row);
utf16_offsets.push(curr_offset_u16);
if !utf16_chars.is_empty() {
utf16_lines.insert(line, utf16_chars);
utf16_chars = Vec::new();
}
curr_col = 0.into();
line += 1;
continue;
}
if !c.is_ascii() {
utf16_chars.push(Utf16Char {
start: curr_col,
end: curr_col + c_len,
});
}
curr_col += c_len;
}
if !utf16_chars.is_empty() {
utf16_lines.insert(line, utf16_chars);
}
LineIndex {
utf8_offsets,
utf16_lines,
utf16_offsets,
}
}
/// Convert a u16 based range to a u8 TextRange.
pub fn get_text_range(
&self,
range: lsp_types::Range,
) -> Result<TextRange, AnyError> {
let start = self.offset(range.start)?;
let end = self.offset(range.end)?;
Ok(TextRange::new(start, end))
}
/// Return a u8 offset based on a u16 position.
pub fn offset(
&self,
position: lsp_types::Position,
) -> Result<TextSize, AnyError> {
let col = self.utf16_to_utf8_col(position.line, position.character);
if let Some(line_offset) = self.utf8_offsets.get(position.line as usize) {
Ok(line_offset + col)
} else {
Err(custom_error("OutOfRange", "The position is out of range."))
}
}
/// Convert an lsp Position into a tsc/TypeScript "position", which is really
/// an u16 byte offset from the start of the string represented as an u32.
pub fn offset_tsc(
&self,
position: lsp_types::Position,
) -> jsonrpc::Result<u32> {
self
.offset_utf16(position)
.map(|ts| ts.into())
.map_err(|err| jsonrpc::Error::invalid_params(err.to_string()))
}
fn offset_utf16(
&self,
position: lsp_types::Position,
) -> Result<TextSize, AnyError> {
if let Some(line_offset) = self.utf16_offsets.get(position.line as usize) {
Ok(line_offset + TextSize::from(position.character))
} else {
Err(custom_error("OutOfRange", "The position is out of range."))
}
}
/// Returns a u16 position based on a u16 offset, which TypeScript offsets are
/// returned as u16.
pub fn position_tsc(&self, offset: TextSize) -> lsp_types::Position {
let line = partition_point(&self.utf16_offsets, |&it| it <= offset) - 1;
let line_start_offset = self.utf16_offsets[line];
let col = offset - line_start_offset;
lsp_types::Position {
line: line as u32,
character: col.into(),
}
}
/// Returns a u16 position based on a u8 offset.
pub fn position_utf16(&self, offset: TextSize) -> lsp_types::Position {
let line = partition_point(&self.utf8_offsets, |&it| it <= offset) - 1;
let line_start_offset = self.utf8_offsets[line];
let col = offset - line_start_offset;
lsp_types::Position {
line: line as u32,
character: col.into(),
}
}
fn utf16_to_utf8_col(&self, line: u32, mut col: u32) -> TextSize {
if let Some(utf16_chars) = self.utf16_lines.get(&line) {
for c in utf16_chars {
if col > u32::from(c.start) {
col += u32::from(c.len()) - c.len_utf16() as u32;
} else {
break;
}
}
}
col.into()
}
}
/// Compare two strings and return a vector of text edit records which are
/// supported by the Language Server Protocol.
pub fn get_edits(a: &str, b: &str) -> Vec<TextEdit> {
if a == b {
return vec![];
}
let chunks = diff(a, b);
let mut text_edits = Vec::<TextEdit>::new();
let line_index = index_lines(a);
let line_index = LineIndex::new(a);
let mut iter = chunks.iter().peekable();
let mut a_pos = 0_u32;
let mut a_pos = TextSize::from(0);
loop {
let chunk = iter.next();
match chunk {
None => break,
Some(Chunk::Equal(e)) => {
a_pos += e.chars().count() as u32;
a_pos += TextSize::from(e.encode_utf16().count() as u32);
}
Some(Chunk::Delete(d)) => {
let start = to_position(&line_index, a_pos);
a_pos += d.chars().count() as u32;
let end = to_position(&line_index, a_pos);
let start = line_index.position_utf16(a_pos);
a_pos += TextSize::from(d.encode_utf16().count() as u32);
let end = line_index.position_utf16(a_pos);
let range = lsp_types::Range { start, end };
match iter.peek() {
Some(Chunk::Insert(i)) => {
@ -134,7 +244,7 @@ pub fn get_edits(a: &str, b: &str) -> Vec<TextEdit> {
}
}
Some(Chunk::Insert(i)) => {
let pos = to_position(&line_index, a_pos);
let pos = line_index.position_utf16(a_pos);
let range = lsp_types::Range {
start: pos,
end: pos,
@ -153,6 +263,9 @@ pub fn get_edits(a: &str, b: &str) -> Vec<TextEdit> {
/// Convert a difference between two strings into a change range used by the
/// TypeScript Language Service.
pub fn get_range_change(a: &str, b: &str) -> Value {
if a == b {
return json!(null);
}
let chunks = diff(a, b);
let mut iter = chunks.iter().peekable();
let mut started = false;
@ -162,12 +275,12 @@ pub fn get_range_change(a: &str, b: &str) -> Value {
let mut equal = 0;
let mut a_pos = 0;
loop {
let chunk = iter.next();
match chunk {
let diff = iter.next();
match diff {
None => break,
Some(Chunk::Equal(e)) => {
a_pos += e.chars().count();
equal += e.chars().count();
a_pos += e.encode_utf16().count();
equal += e.encode_utf16().count();
}
Some(Chunk::Delete(d)) => {
if !started {
@ -175,7 +288,7 @@ pub fn get_range_change(a: &str, b: &str) -> Value {
started = true;
equal = 0;
}
a_pos += d.chars().count();
a_pos += d.encode_utf16().count();
if started {
end = a_pos;
new_length += equal;
@ -191,7 +304,7 @@ pub fn get_range_change(a: &str, b: &str) -> Value {
} else {
end += equal;
}
new_length += i.chars().count() + equal;
new_length += i.encode_utf16().count() + equal;
equal = 0;
}
}
@ -215,7 +328,7 @@ pub fn slice(s: &str, range: impl RangeBounds<usize>) -> &str {
let len = match range.end_bound() {
Bound::Included(bound) => *bound + 1,
Bound::Excluded(bound) => *bound,
Bound::Unbounded => s.len(),
Bound::Unbounded => s.encode_utf16().count(),
} - start;
substring(s, start, start + len)
}
@ -231,7 +344,7 @@ pub fn substring(s: &str, start: usize, end: usize) -> &str {
break;
}
if let Some(c) = it.next() {
char_pos += 1;
char_pos += c.len_utf16();
byte_start += c.len_utf8();
} else {
break;
@ -244,7 +357,7 @@ pub fn substring(s: &str, start: usize, end: usize) -> &str {
break;
}
if let Some(c) = it.next() {
char_pos += 1;
char_pos += c.len_utf16();
byte_end += c.len_utf8();
} else {
break;
@ -258,24 +371,194 @@ mod tests {
use super::*;
#[test]
fn test_apply_content_changes() {
let mut content = "a\nb\nc\nd".to_string();
let content_changes = vec![lsp_types::TextDocumentContentChangeEvent {
range: Some(lsp_types::Range {
start: lsp_types::Position {
line: 1,
character: 0,
},
end: lsp_types::Position {
line: 1,
character: 1,
},
}),
range_length: Some(1),
text: "e".to_string(),
}];
apply_content_changes(&mut content, content_changes);
assert_eq!(content, "a\ne\nc\nd");
fn test_line_index() {
let text = "hello\nworld";
let index = LineIndex::new(text);
assert_eq!(
index.position_utf16(0.into()),
lsp_types::Position {
line: 0,
character: 0
}
);
assert_eq!(
index.position_utf16(1.into()),
lsp_types::Position {
line: 0,
character: 1
}
);
assert_eq!(
index.position_utf16(5.into()),
lsp_types::Position {
line: 0,
character: 5
}
);
assert_eq!(
index.position_utf16(6.into()),
lsp_types::Position {
line: 1,
character: 0
}
);
assert_eq!(
index.position_utf16(7.into()),
lsp_types::Position {
line: 1,
character: 1
}
);
assert_eq!(
index.position_utf16(8.into()),
lsp_types::Position {
line: 1,
character: 2
}
);
assert_eq!(
index.position_utf16(10.into()),
lsp_types::Position {
line: 1,
character: 4
}
);
assert_eq!(
index.position_utf16(11.into()),
lsp_types::Position {
line: 1,
character: 5
}
);
assert_eq!(
index.position_utf16(12.into()),
lsp_types::Position {
line: 1,
character: 6
}
);
let text = "\nhello\nworld";
let index = LineIndex::new(text);
assert_eq!(
index.position_utf16(0.into()),
lsp_types::Position {
line: 0,
character: 0
}
);
assert_eq!(
index.position_utf16(1.into()),
lsp_types::Position {
line: 1,
character: 0
}
);
assert_eq!(
index.position_utf16(2.into()),
lsp_types::Position {
line: 1,
character: 1
}
);
assert_eq!(
index.position_utf16(6.into()),
lsp_types::Position {
line: 1,
character: 5
}
);
assert_eq!(
index.position_utf16(7.into()),
lsp_types::Position {
line: 2,
character: 0
}
);
}
#[test]
fn test_char_len() {
assert_eq!('メ'.len_utf8(), 3);
assert_eq!('メ'.len_utf16(), 1);
assert_eq!('编'.len_utf8(), 3);
assert_eq!('编'.len_utf16(), 1);
assert_eq!('🦕'.len_utf8(), 4);
assert_eq!('🦕'.len_utf16(), 2);
}
#[test]
fn test_empty_index() {
let col_index = LineIndex::new(
"
const C: char = 'x';
",
);
assert_eq!(col_index.utf16_lines.len(), 0);
}
#[test]
fn test_single_char() {
let col_index = LineIndex::new(
"
const C: char = 'メ';
",
);
assert_eq!(col_index.utf16_lines.len(), 1);
assert_eq!(col_index.utf16_lines[&1].len(), 1);
assert_eq!(
col_index.utf16_lines[&1][0],
Utf16Char {
start: 17.into(),
end: 20.into()
}
);
// UTF-16 to UTF-8, no changes
assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15));
// UTF-16 to UTF-8
assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21));
let col_index = LineIndex::new("a𐐏b");
assert_eq!(col_index.utf16_to_utf8_col(0, 3), TextSize::from(5));
}
#[test]
fn test_string() {
let col_index = LineIndex::new(
"
const C: char = \"メ メ\";
",
);
assert_eq!(col_index.utf16_lines.len(), 1);
assert_eq!(col_index.utf16_lines[&1].len(), 2);
assert_eq!(
col_index.utf16_lines[&1][0],
Utf16Char {
start: 17.into(),
end: 20.into()
}
);
assert_eq!(
col_index.utf16_lines[&1][1],
Utf16Char {
start: 21.into(),
end: 24.into()
}
);
// UTF-16 to UTF-8
assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15));
// メ UTF-8: 0xE3 0x83 0xA1, UTF-16: 0x30E1
assert_eq!(col_index.utf16_to_utf8_col(1, 17), TextSize::from(17)); // first メ at 17..20
assert_eq!(col_index.utf16_to_utf8_col(1, 18), TextSize::from(20)); // space
assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21)); // second メ at 21..24
assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextSize::from(15));
}
#[test]
@ -318,6 +601,11 @@ mod tests {
#[test]
fn test_get_range_change() {
let a = "abcdefg";
let b = "abcdefg";
let actual = get_range_change(a, b);
assert_eq!(actual, json!(null));
let a = "abcdefg";
let b = "abedcfg";
let actual = get_range_change(a, b);
@ -401,108 +689,56 @@ mod tests {
"newLength": 3
})
);
}
#[test]
fn test_index_lines() {
let actual = index_lines("a\nb\r\nc");
assert_eq!(actual, vec![0, 2, 5]);
}
let a = "hello 🦕!";
let b = "hello deno!";
let actual = get_range_change(a, b);
assert_eq!(
actual,
json!({
"span": {
"start": 6,
"length": 2,
},
"newLength": 4
})
);
#[test]
fn test_to_position() {
let line_index = index_lines("a\nb\r\nc\n");
let a = "hello deno!";
let b = "hello deno🦕!";
let actual = get_range_change(a, b);
assert_eq!(
to_position(&line_index, 6),
lsp_types::Position {
line: 2,
character: 1,
}
actual,
json!({
"span": {
"start": 10,
"length": 0,
},
"newLength": 2
})
);
assert_eq!(
to_position(&line_index, 0),
lsp_types::Position {
line: 0,
character: 0,
}
);
assert_eq!(
to_position(&line_index, 3),
lsp_types::Position {
line: 1,
character: 1,
}
);
}
#[test]
fn test_to_position_mbc() {
let line_index = index_lines("\n😱🦕\n🤯\n");
assert_eq!(
to_position(&line_index, 0),
lsp_types::Position {
line: 0,
character: 0,
}
);
assert_eq!(
to_position(&line_index, 2),
lsp_types::Position {
line: 0,
character: 2,
}
);
assert_eq!(
to_position(&line_index, 3),
lsp_types::Position {
line: 1,
character: 0,
}
);
assert_eq!(
to_position(&line_index, 4),
lsp_types::Position {
line: 1,
character: 1,
}
);
assert_eq!(
to_position(&line_index, 5),
lsp_types::Position {
line: 1,
character: 2,
}
);
assert_eq!(
to_position(&line_index, 6),
lsp_types::Position {
line: 2,
character: 0,
}
);
assert_eq!(
to_position(&line_index, 7),
lsp_types::Position {
line: 2,
character: 1,
}
);
assert_eq!(
to_position(&line_index, 8),
lsp_types::Position {
line: 3,
character: 0,
}
);
// TODO(@kitsonk): https://github.com/dtolnay/dissimilar/issues/5
// let a = r#" 🦕🇺🇸👍 "#;
// let b = r#" 🇺🇸👍 "#;
// let actual = get_range_change(a, b);
// assert_eq!(
// actual,
// json!({
// "span": {
// "start": 1,
// "length": 2,
// },
// "newLength": 0
// })
// );
}
#[test]
fn test_substring() {
assert_eq!(substring("Deno", 1, 3), "en");
assert_eq!(substring("y̆y̆", 2, 4), "");
// this doesn't work like JavaScript, as 🦕 is treated as a single char in
// Rust, but as two chars in JavaScript.
// assert_eq!(substring("🦕🦕", 2, 4), "🦕");
assert_eq!(substring("🦕🦕", 2, 4), "🦕");
}
#[test]
@ -511,5 +747,6 @@ mod tests {
assert_eq!(slice("Deno", 1..=3), "eno");
assert_eq!(slice("Deno Land", 1..), "eno Land");
assert_eq!(slice("Deno", ..3), "Den");
assert_eq!(slice("Hello 🦕", 6..8), "🦕");
}
}

View file

@ -3,6 +3,7 @@
use super::analysis::ResolvedDependency;
use super::language_server::StateSnapshot;
use super::text;
use super::text::LineIndex;
use super::utils;
use crate::media_type::MediaType;
@ -32,6 +33,7 @@ use regex::Regex;
use std::borrow::Cow;
use std::collections::HashMap;
use std::thread;
use text_size::TextSize;
use tokio::sync::mpsc;
use tokio::sync::oneshot;
@ -80,6 +82,14 @@ impl TsServer {
}
}
/// An lsp representation of an asset in memory, that has either been retrieved
/// from static assets built into Rust, or static assets built into tsc.
#[derive(Debug, Clone)]
pub struct AssetDocument {
pub text: String,
pub line_index: LineIndex,
}
/// Optionally returns an internal asset, first checking for any static assets
/// in Rust, then checking any previously retrieved static assets from the
/// isolate, and then finally, the tsc isolate itself.
@ -87,28 +97,41 @@ pub async fn get_asset(
specifier: &ModuleSpecifier,
ts_server: &TsServer,
state_snapshot: &StateSnapshot,
) -> Result<Option<String>, AnyError> {
) -> Result<Option<AssetDocument>, AnyError> {
let specifier_str = specifier.to_string().replace("asset:///", "");
if let Some(asset_text) = tsc::get_asset(&specifier_str) {
Ok(Some(asset_text.to_string()))
if let Some(text) = tsc::get_asset(&specifier_str) {
let maybe_asset = Some(AssetDocument {
line_index: LineIndex::new(text),
text: text.to_string(),
});
state_snapshot
.assets
.lock()
.unwrap()
.insert(specifier.clone(), maybe_asset.clone());
Ok(maybe_asset)
} else {
{
let assets = state_snapshot.assets.lock().unwrap();
if let Some(asset) = assets.get(specifier) {
return Ok(asset.clone());
}
}
let asset: Option<String> = serde_json::from_value(
ts_server
.request(
state_snapshot.clone(),
RequestMethod::GetAsset(specifier.clone()),
)
.await?,
)?;
let mut assets = state_snapshot.assets.lock().unwrap();
assets.insert(specifier.clone(), asset.clone());
Ok(asset)
let res = ts_server
.request(
state_snapshot.clone(),
RequestMethod::GetAsset(specifier.clone()),
)
.await?;
let maybe_text: Option<String> = serde_json::from_value(res)?;
let maybe_asset = if let Some(text) = maybe_text {
Some(AssetDocument {
line_index: LineIndex::new(&text),
text,
})
} else {
None
};
state_snapshot
.assets
.lock()
.unwrap()
.insert(specifier.clone(), maybe_asset.clone());
Ok(maybe_asset)
}
}
@ -342,10 +365,10 @@ pub struct TextSpan {
}
impl TextSpan {
pub fn to_range(&self, line_index: &[u32]) -> lsp_types::Range {
pub fn to_range(&self, line_index: &LineIndex) -> lsp_types::Range {
lsp_types::Range {
start: text::to_position(line_index, self.start),
end: text::to_position(line_index, self.start + self.length),
start: line_index.position_tsc(self.start.into()),
end: line_index.position_tsc(TextSize::from(self.start + self.length)),
}
}
}
@ -376,7 +399,7 @@ pub struct QuickInfo {
}
impl QuickInfo {
pub fn to_hover(&self, line_index: &[u32]) -> lsp_types::Hover {
pub fn to_hover(&self, line_index: &LineIndex) -> lsp_types::Hover {
let mut contents = Vec::<lsp_types::MarkedString>::new();
if let Some(display_string) =
display_parts_to_string(self.display_parts.clone())
@ -425,12 +448,12 @@ pub struct DocumentSpan {
impl DocumentSpan {
pub async fn to_link<F, Fut>(
&self,
line_index: &[u32],
line_index: &LineIndex,
index_provider: F,
) -> Option<lsp_types::LocationLink>
where
F: Fn(ModuleSpecifier) -> Fut,
Fut: Future<Output = Result<Vec<u32>, AnyError>>,
Fut: Future<Output = Result<LineIndex, AnyError>>,
{
let target_specifier =
ModuleSpecifier::resolve_url(&self.file_name).unwrap();
@ -486,15 +509,16 @@ pub struct RenameLocations {
}
impl RenameLocations {
pub async fn into_workspace_edit<F, Fut>(
pub async fn into_workspace_edit<F, Fut, V>(
self,
snapshot: StateSnapshot,
index_provider: F,
new_name: &str,
index_provider: F,
version_provider: V,
) -> Result<lsp_types::WorkspaceEdit, AnyError>
where
F: Fn(ModuleSpecifier) -> Fut,
Fut: Future<Output = Result<Vec<u32>, AnyError>>,
Fut: Future<Output = Result<LineIndex, AnyError>>,
V: Fn(ModuleSpecifier) -> Option<i32>,
{
let mut text_document_edit_map: HashMap<Url, lsp_types::TextDocumentEdit> =
HashMap::new();
@ -510,10 +534,7 @@ impl RenameLocations {
lsp_types::TextDocumentEdit {
text_document: lsp_types::OptionalVersionedTextDocumentIdentifier {
uri: uri.clone(),
version: snapshot
.doc_data
.get(&specifier)
.map_or_else(|| None, |data| data.version),
version: version_provider(specifier.clone()),
},
edits: Vec::<
lsp_types::OneOf<
@ -592,12 +613,12 @@ pub struct DefinitionInfoAndBoundSpan {
impl DefinitionInfoAndBoundSpan {
pub async fn to_definition<F, Fut>(
&self,
line_index: &[u32],
line_index: &LineIndex,
index_provider: F,
) -> Option<lsp_types::GotoDefinitionResponse>
where
F: Fn(ModuleSpecifier) -> Fut + Clone,
Fut: Future<Output = Result<Vec<u32>, AnyError>>,
Fut: Future<Output = Result<LineIndex, AnyError>>,
{
if let Some(definitions) = &self.definitions {
let mut location_links = Vec::<lsp_types::LocationLink>::new();
@ -627,7 +648,7 @@ pub struct DocumentHighlights {
impl DocumentHighlights {
pub fn to_highlight(
&self,
line_index: &[u32],
line_index: &LineIndex,
) -> Vec<lsp_types::DocumentHighlight> {
self
.highlight_spans
@ -656,7 +677,7 @@ pub struct ReferenceEntry {
}
impl ReferenceEntry {
pub fn to_location(&self, line_index: &[u32]) -> lsp_types::Location {
pub fn to_location(&self, line_index: &LineIndex) -> lsp_types::Location {
let uri =
utils::normalize_file_name(&self.document_span.file_name).unwrap();
lsp_types::Location {
@ -676,7 +697,7 @@ pub struct CompletionInfo {
impl CompletionInfo {
pub fn into_completion_response(
self,
line_index: &[u32],
line_index: &LineIndex,
) -> lsp_types::CompletionResponse {
let items = self
.entries
@ -704,7 +725,7 @@ pub struct CompletionEntry {
impl CompletionEntry {
pub fn into_completion_item(
self,
line_index: &[u32],
line_index: &LineIndex,
) -> lsp_types::CompletionItem {
let mut item = lsp_types::CompletionItem {
label: self.name,
@ -801,11 +822,13 @@ fn cache_snapshot(
.contains_key(&(specifier.clone().into(), version.clone().into()))
{
let s = ModuleSpecifier::resolve_url(&specifier)?;
let content = {
let file_cache = state.state_snapshot.file_cache.lock().unwrap();
let file_id = file_cache.lookup(&s).unwrap();
file_cache.get_contents(file_id)?
};
let content = state
.state_snapshot
.documents
.lock()
.unwrap()
.content(&s)?
.unwrap();
state
.snapshots
.insert((specifier.into(), version.into()), content);
@ -873,7 +896,7 @@ fn get_change_range(state: &mut State, args: Value) -> Result<Value, AnyError> {
"start": 0,
"length": v.old_length,
},
"newLength": current.chars().count(),
"newLength": current.encode_utf16().count(),
}))
}
} else {
@ -890,16 +913,22 @@ fn get_change_range(state: &mut State, args: Value) -> Result<Value, AnyError> {
fn get_length(state: &mut State, args: Value) -> Result<Value, AnyError> {
let v: SourceSnapshotArgs = serde_json::from_value(args)?;
let specifier = ModuleSpecifier::resolve_url(&v.specifier)?;
if state.state_snapshot.doc_data.contains_key(&specifier) {
if state
.state_snapshot
.documents
.lock()
.unwrap()
.contains(&specifier)
{
cache_snapshot(state, v.specifier.clone(), v.version.clone())?;
let content = state
.snapshots
.get(&(v.specifier.into(), v.version.into()))
.unwrap();
Ok(json!(content.chars().count()))
Ok(json!(content.encode_utf16().count()))
} else {
let mut sources = state.state_snapshot.sources.lock().unwrap();
Ok(json!(sources.get_length(&specifier).unwrap()))
Ok(json!(sources.get_length_utf16(&specifier).unwrap()))
}
}
@ -915,7 +944,13 @@ struct GetTextArgs {
fn get_text(state: &mut State, args: Value) -> Result<Value, AnyError> {
let v: GetTextArgs = serde_json::from_value(args)?;
let specifier = ModuleSpecifier::resolve_url(&v.specifier)?;
let content = if state.state_snapshot.doc_data.contains_key(&specifier) {
let content = if state
.state_snapshot
.documents
.lock()
.unwrap()
.contains(&specifier)
{
cache_snapshot(state, v.specifier.clone(), v.version.clone())?;
state
.snapshots
@ -939,8 +974,9 @@ fn resolve(state: &mut State, args: Value) -> Result<Value, AnyError> {
return Err(custom_error("Deadlock", "deadlock locking sources"));
};
if let Some(doc_data) = state.state_snapshot.doc_data.get(&referrer) {
if let Some(dependencies) = &doc_data.dependencies {
let documents = state.state_snapshot.documents.lock().unwrap();
if documents.contains(&referrer) {
if let Some(dependencies) = documents.dependencies(&referrer) {
for specifier in &v.specifiers {
if specifier.starts_with("asset:///") {
resolved.push(Some((
@ -959,10 +995,7 @@ fn resolve(state: &mut State, args: Value) -> Result<Value, AnyError> {
if let ResolvedDependency::Resolved(resolved_specifier) =
resolved_import
{
if state
.state_snapshot
.doc_data
.contains_key(&resolved_specifier)
if documents.contains(&resolved_specifier)
|| sources.contains(&resolved_specifier)
{
let media_type = if let Some(media_type) =
@ -1001,7 +1034,10 @@ fn resolve(state: &mut State, args: Value) -> Result<Value, AnyError> {
} else {
return Err(custom_error(
"NotFound",
"the referring specifier is unexpectedly missing",
format!(
"the referring ({}) specifier is unexpectedly missing",
referrer
),
));
}
@ -1014,8 +1050,8 @@ fn respond(state: &mut State, args: Value) -> Result<Value, AnyError> {
}
fn script_names(state: &mut State, _args: Value) -> Result<Value, AnyError> {
let script_names: Vec<&ModuleSpecifier> =
state.state_snapshot.doc_data.keys().collect();
let documents = state.state_snapshot.documents.lock().unwrap();
let script_names = documents.open_specifiers();
Ok(json!(script_names))
}
@ -1028,11 +1064,14 @@ struct ScriptVersionArgs {
fn script_version(state: &mut State, args: Value) -> Result<Value, AnyError> {
let v: ScriptVersionArgs = serde_json::from_value(args)?;
let specifier = ModuleSpecifier::resolve_url(&v.specifier)?;
let maybe_doc_data = state.state_snapshot.doc_data.get(&specifier);
if let Some(doc_data) = maybe_doc_data {
if let Some(version) = doc_data.version {
return Ok(json!(version.to_string()));
}
if let Some(version) = state
.state_snapshot
.documents
.lock()
.unwrap()
.version(&specifier)
{
return Ok(json!(version.to_string()));
} else {
let mut sources = state.state_snapshot.sources.lock().unwrap();
if let Some(version) = sources.get_script_version(&specifier) {
@ -1153,13 +1192,14 @@ pub struct UserPreferences {
}
/// Methods that are supported by the Language Service in the compiler isolate.
#[derive(Debug)]
pub enum RequestMethod {
/// Configure the compilation settings for the server.
Configure(TsConfig),
/// Retrieve the text of an assets that exists in memory in the isolate.
GetAsset(ModuleSpecifier),
/// Return diagnostics for given file.
GetDiagnostics(ModuleSpecifier),
GetDiagnostics(Vec<ModuleSpecifier>),
/// Return quick info at position (hover information).
GetQuickInfo((ModuleSpecifier, u32)),
/// Return document highlights at position.
@ -1189,10 +1229,10 @@ impl RequestMethod {
"method": "getAsset",
"specifier": specifier,
}),
RequestMethod::GetDiagnostics(specifier) => json!({
RequestMethod::GetDiagnostics(specifiers) => json!({
"id": id,
"method": "getDiagnostics",
"specifier": specifier,
"specifiers": specifiers,
}),
RequestMethod::GetQuickInfo((specifier, position)) => json!({
"id": id,
@ -1294,30 +1334,21 @@ pub fn request(
#[cfg(test)]
mod tests {
use super::super::memory_cache::MemoryCache;
use super::*;
use crate::lsp::language_server::DocumentData;
use std::collections::HashMap;
use crate::lsp::documents::DocumentCache;
use std::sync::Arc;
use std::sync::Mutex;
fn mock_state_snapshot(sources: Vec<(&str, &str, i32)>) -> StateSnapshot {
let mut doc_data = HashMap::new();
let mut file_cache = MemoryCache::default();
let mut documents = DocumentCache::default();
for (specifier, content, version) in sources {
let specifier = ModuleSpecifier::resolve_url(specifier)
.expect("failed to create specifier");
doc_data.insert(
specifier.clone(),
DocumentData::new(specifier.clone(), version, content, None),
);
file_cache.set_contents(specifier, Some(content.as_bytes().to_vec()));
documents.open(specifier, version, content.to_string());
}
let file_cache = Arc::new(Mutex::new(file_cache));
StateSnapshot {
assets: Default::default(),
doc_data,
file_cache,
documents: Arc::new(Mutex::new(documents)),
sources: Default::default(),
}
}
@ -1413,29 +1444,31 @@ mod tests {
let result = request(
&mut runtime,
state_snapshot,
RequestMethod::GetDiagnostics(specifier),
RequestMethod::GetDiagnostics(vec![specifier]),
);
assert!(result.is_ok());
let response = result.unwrap();
assert_eq!(
response,
json!([
{
"start": {
"line": 0,
"character": 0,
},
"end": {
"line": 0,
"character": 7
},
"fileName": "file:///a.ts",
"messageText": "Cannot find name 'console'. Do you need to change your target library? Try changing the `lib` compiler option to include 'dom'.",
"sourceLine": "console.log(\"hello deno\");",
"category": 1,
"code": 2584
}
])
json!({
"file:///a.ts": [
{
"start": {
"line": 0,
"character": 0,
},
"end": {
"line": 0,
"character": 7
},
"fileName": "file:///a.ts",
"messageText": "Cannot find name 'console'. Do you need to change your target library? Try changing the `lib` compiler option to include 'dom'.",
"sourceLine": "console.log(\"hello deno\");",
"category": 1,
"code": 2584
}
]
})
);
}
@ -1466,11 +1499,11 @@ mod tests {
let result = request(
&mut runtime,
state_snapshot,
RequestMethod::GetDiagnostics(specifier),
RequestMethod::GetDiagnostics(vec![specifier]),
);
assert!(result.is_ok());
let response = result.unwrap();
assert_eq!(response, json!([]));
assert_eq!(response, json!({ "file:///a.ts": [] }));
}
#[test]
@ -1496,28 +1529,30 @@ mod tests {
let result = request(
&mut runtime,
state_snapshot,
RequestMethod::GetDiagnostics(specifier),
RequestMethod::GetDiagnostics(vec![specifier]),
);
assert!(result.is_ok());
let response = result.unwrap();
assert_eq!(
response,
json!([{
"start": {
"line": 1,
"character": 8
},
"end": {
"line": 1,
"character": 30
},
"fileName": "file:///a.ts",
"messageText": "\'A\' is declared but its value is never read.",
"sourceLine": " import { A } from \".\";",
"category": 2,
"code": 6133,
"reportsUnnecessary": true,
}])
json!({
"file:///a.ts": [{
"start": {
"line": 1,
"character": 8
},
"end": {
"line": 1,
"character": 30
},
"fileName": "file:///a.ts",
"messageText": "\'A\' is declared but its value is never read.",
"sourceLine": " import { A } from \".\";",
"category": 2,
"code": 6133,
"reportsUnnecessary": true,
}]
})
);
}
@ -1548,11 +1583,11 @@ mod tests {
let result = request(
&mut runtime,
state_snapshot,
RequestMethod::GetDiagnostics(specifier),
RequestMethod::GetDiagnostics(vec![specifier]),
);
assert!(result.is_ok());
let response = result.unwrap();
assert_eq!(response, json!([]));
assert_eq!(response, json!({ "file:///a.ts": [] }));
}
#[test]
@ -1585,42 +1620,44 @@ mod tests {
let result = request(
&mut runtime,
state_snapshot,
RequestMethod::GetDiagnostics(specifier),
RequestMethod::GetDiagnostics(vec![specifier]),
);
assert!(result.is_ok());
let response = result.unwrap();
assert_eq!(
response,
json!([{
"start": {
"line": 1,
"character": 8
},
"end": {
"line": 6,
"character": 55,
},
"fileName": "file:///a.ts",
"messageText": "All imports in import declaration are unused.",
"sourceLine": " import {",
"category": 2,
"code": 6192,
"reportsUnnecessary": true
}, {
"start": {
"line": 8,
"character": 29
},
"end": {
"line": 8,
"character": 29
},
"fileName": "file:///a.ts",
"messageText": "Expression expected.",
"sourceLine": " import * as test from",
"category": 1,
"code": 1109
}])
json!({
"file:///a.ts": [{
"start": {
"line": 1,
"character": 8
},
"end": {
"line": 6,
"character": 55,
},
"fileName": "file:///a.ts",
"messageText": "All imports in import declaration are unused.",
"sourceLine": " import {",
"category": 2,
"code": 6192,
"reportsUnnecessary": true
}, {
"start": {
"line": 8,
"character": 29
},
"end": {
"line": 8,
"character": 29
},
"fileName": "file:///a.ts",
"messageText": "Expression expected.",
"sourceLine": " import * as test from",
"category": 1,
"code": 1109
}]
})
);
}
@ -1641,11 +1678,11 @@ mod tests {
let result = request(
&mut runtime,
state_snapshot,
RequestMethod::GetDiagnostics(specifier),
RequestMethod::GetDiagnostics(vec![specifier]),
);
assert!(result.is_ok());
let response = result.unwrap();
assert_eq!(response, json!([]));
assert_eq!(response, json!({}));
}
#[test]

View file

@ -0,0 +1,25 @@
{
"jsonrpc": "2.0",
"method": "textDocument/didChange",
"params": {
"textDocument": {
"uri": "file:///a/file.ts",
"version": 2
},
"contentChanges": [
{
"range": {
"start": {
"line": 444,
"character": 11
},
"end": {
"line": 444,
"character": 14
}
},
"text": "+++"
}
]
}
}

View file

@ -0,0 +1,25 @@
{
"jsonrpc": "2.0",
"method": "textDocument/didChange",
"params": {
"textDocument": {
"uri": "file:///a/file.ts",
"version": 2
},
"contentChanges": [
{
"range": {
"start": {
"line": 1,
"character": 11
},
"end": {
"line": 1,
"character": 13
}
},
"text": ""
}
]
}
}

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,12 @@
{
"jsonrpc": "2.0",
"method": "textDocument/didOpen",
"params": {
"textDocument": {
"uri": "file:///a/file.ts",
"languageId": "typescript",
"version": 1,
"text": "const a = `编写软件很难`;\nconst b = `👍🦕😃`;\nconsole.log(a, b);\n"
}
}
}

View file

@ -0,0 +1,14 @@
{
"jsonrpc": "2.0",
"id": 2,
"method": "textDocument/hover",
"params": {
"textDocument": {
"uri": "file:///a/file.ts"
},
"position": {
"line": 2,
"character": 14
}
}
}

View file

@ -7,8 +7,8 @@
"uri": "file:///a/file.ts"
},
"position": {
"line": 5,
"character": 19
"line": 0,
"character": 4
},
"newName": "variable_modified"
}

View file

@ -511,15 +511,23 @@ delete Object.prototype.__proto__;
}
case "getDiagnostics": {
try {
const diagnostics = [
...languageService.getSemanticDiagnostics(request.specifier),
...languageService.getSuggestionDiagnostics(request.specifier),
...languageService.getSyntacticDiagnostics(request.specifier),
].filter(({ code }) => !IGNORED_DIAGNOSTICS.includes(code));
return respond(id, fromTypeScriptDiagnostic(diagnostics));
/** @type {Record<string, any[]>} */
const diagnosticMap = {};
for (const specifier of request.specifiers) {
diagnosticMap[specifier] = fromTypeScriptDiagnostic([
...languageService.getSemanticDiagnostics(specifier),
...languageService.getSuggestionDiagnostics(specifier),
...languageService.getSyntacticDiagnostics(specifier),
].filter(({ code }) => !IGNORED_DIAGNOSTICS.includes(code)));
}
return respond(id, diagnosticMap);
} catch (e) {
error(e);
return respond(id, []);
if ("stack" in e) {
error(e.stack);
} else {
error(e);
}
return respond(id, {});
}
}
case "getQuickInfo": {

View file

@ -70,7 +70,7 @@ declare global {
interface GetDiagnosticsRequest extends BaseLanguageServerRequest {
method: "getDiagnostics";
specifier: string;
specifiers: string[];
}
interface GetQuickInfoRequest extends BaseLanguageServerRequest {