From 973743b7511510f33d85be6e4e3111c6db31cecf Mon Sep 17 00:00:00 2001 From: Nayeem Rahman Date: Sat, 4 May 2024 21:48:06 +0100 Subject: [PATCH] fix(lsp): handle multiline semantic tokens (#23691) --- cli/lsp/documents.rs | 11 ------- cli/lsp/language_server.rs | 4 +-- cli/lsp/text.rs | 4 +++ cli/lsp/tsc.rs | 60 ++++++++++++++++++++++++++++---------- 4 files changed, 50 insertions(+), 29 deletions(-) diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs index 71cc63f836..ee7adc83a9 100644 --- a/cli/lsp/documents.rs +++ b/cli/lsp/documents.rs @@ -144,13 +144,6 @@ pub enum AssetOrDocument { } impl AssetOrDocument { - pub fn specifier(&self) -> &ModuleSpecifier { - match self { - AssetOrDocument::Asset(asset) => asset.specifier(), - AssetOrDocument::Document(doc) => doc.specifier(), - } - } - pub fn document(&self) -> Option<&Arc> { match self { AssetOrDocument::Asset(_) => None, @@ -204,10 +197,6 @@ impl AssetOrDocument { pub fn document_lsp_version(&self) -> Option { self.document().and_then(|d| d.maybe_lsp_version()) } - - pub fn is_open(&self) -> bool { - self.document().map(|d| d.is_open()).unwrap_or(false) - } } type ModuleResult = Result; diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index 01c9ac02f6..367be2c3b3 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -2604,7 +2604,7 @@ impl Inner { .await?; let semantic_tokens = - semantic_classification.to_semantic_tokens(&asset_or_doc, line_index)?; + semantic_classification.to_semantic_tokens(line_index)?; let response = if !semantic_tokens.data.is_empty() { Some(SemanticTokensResult::Tokens(semantic_tokens)) } else { @@ -2642,7 +2642,7 @@ impl Inner { .await?; let semantic_tokens = - semantic_classification.to_semantic_tokens(&asset_or_doc, line_index)?; + semantic_classification.to_semantic_tokens(line_index)?; let response = if !semantic_tokens.data.is_empty() { Some(SemanticTokensRangeResult::Tokens(semantic_tokens)) } else { diff --git a/cli/lsp/text.rs b/cli/lsp/text.rs index 6a5f36fb0f..88f27915bb 100644 --- a/cli/lsp/text.rs +++ b/cli/lsp/text.rs @@ -185,6 +185,10 @@ impl LineIndex { } } + pub fn line_length_utf16(&self, line: u32) -> TextSize { + self.utf16_offsets[(line + 1) as usize] - self.utf16_offsets[line as usize] + } + pub fn text_content_length_utf16(&self) -> TextSize { *self.utf16_offsets.last().unwrap() } diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs index 606e47d72c..0f7ec2b6cc 100644 --- a/cli/lsp/tsc.rs +++ b/cli/lsp/tsc.rs @@ -2496,9 +2496,9 @@ pub struct Classifications { impl Classifications { pub fn to_semantic_tokens( &self, - asset_or_doc: &AssetOrDocument, line_index: Arc, ) -> LspResult { + // https://github.com/microsoft/vscode/blob/1.89.0/extensions/typescript-language-features/src/languageFeatures/semanticTokens.ts#L89-L115 let token_count = self.spans.len() / 3; let mut builder = SemanticTokensBuilder::new(); for i in 0..token_count { @@ -2517,25 +2517,24 @@ impl Classifications { let start_pos = line_index.position_tsc(offset.into()); let end_pos = line_index.position_tsc(TextSize::from(offset + length)); - if start_pos.line == end_pos.line - && start_pos.character <= end_pos.character - { + for line in start_pos.line..(end_pos.line + 1) { + let start_character = if line == start_pos.line { + start_pos.character + } else { + 0 + }; + let end_character = if line == end_pos.line { + end_pos.character + } else { + line_index.line_length_utf16(line).into() + }; builder.push( - start_pos.line, - start_pos.character, - end_pos.character - start_pos.character, + line, + start_character, + end_character - start_character, token_type, token_modifiers, ); - } else { - log::error!( - "unexpected positions\nspecifier: {}\nopen: {}\nstart_pos: {:?}\nend_pos: {:?}", - asset_or_doc.specifier(), - asset_or_doc.is_open(), - start_pos, - end_pos - ); - return Err(LspError::internal_error()); } } Ok(builder.build(None)) @@ -5867,6 +5866,35 @@ mod tests { ); } + #[test] + fn test_classification_to_semantic_tokens_multiline_tokens() { + let line_index = Arc::new(LineIndex::new(" to\nken \n")); + let classifications = Classifications { + spans: vec![2, 6, 2057], + }; + let semantic_tokens = + classifications.to_semantic_tokens(line_index).unwrap(); + assert_eq!( + &semantic_tokens.data, + &[ + lsp::SemanticToken { + delta_line: 0, + delta_start: 2, + length: 3, + token_type: 7, + token_modifiers_bitset: 9, + }, + lsp::SemanticToken { + delta_line: 1, + delta_start: 0, + length: 3, + token_type: 7, + token_modifiers_bitset: 9, + }, + ] + ); + } + #[tokio::test] async fn test_get_edits_for_file_rename() { let temp_dir = TempDir::new();