1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-11-21 15:04:11 -05:00

fix(lsp): handle multiline semantic tokens (#23691)

This commit is contained in:
Nayeem Rahman 2024-05-04 21:48:06 +01:00 committed by GitHub
parent d81e97f92f
commit 973743b751
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 50 additions and 29 deletions

View file

@ -144,13 +144,6 @@ pub enum AssetOrDocument {
} }
impl AssetOrDocument { impl AssetOrDocument {
pub fn specifier(&self) -> &ModuleSpecifier {
match self {
AssetOrDocument::Asset(asset) => asset.specifier(),
AssetOrDocument::Document(doc) => doc.specifier(),
}
}
pub fn document(&self) -> Option<&Arc<Document>> { pub fn document(&self) -> Option<&Arc<Document>> {
match self { match self {
AssetOrDocument::Asset(_) => None, AssetOrDocument::Asset(_) => None,
@ -204,10 +197,6 @@ impl AssetOrDocument {
pub fn document_lsp_version(&self) -> Option<i32> { pub fn document_lsp_version(&self) -> Option<i32> {
self.document().and_then(|d| d.maybe_lsp_version()) self.document().and_then(|d| d.maybe_lsp_version())
} }
pub fn is_open(&self) -> bool {
self.document().map(|d| d.is_open()).unwrap_or(false)
}
} }
type ModuleResult = Result<deno_graph::JsModule, deno_graph::ModuleGraphError>; type ModuleResult = Result<deno_graph::JsModule, deno_graph::ModuleGraphError>;

View file

@ -2604,7 +2604,7 @@ impl Inner {
.await?; .await?;
let semantic_tokens = let semantic_tokens =
semantic_classification.to_semantic_tokens(&asset_or_doc, line_index)?; semantic_classification.to_semantic_tokens(line_index)?;
let response = if !semantic_tokens.data.is_empty() { let response = if !semantic_tokens.data.is_empty() {
Some(SemanticTokensResult::Tokens(semantic_tokens)) Some(SemanticTokensResult::Tokens(semantic_tokens))
} else { } else {
@ -2642,7 +2642,7 @@ impl Inner {
.await?; .await?;
let semantic_tokens = let semantic_tokens =
semantic_classification.to_semantic_tokens(&asset_or_doc, line_index)?; semantic_classification.to_semantic_tokens(line_index)?;
let response = if !semantic_tokens.data.is_empty() { let response = if !semantic_tokens.data.is_empty() {
Some(SemanticTokensRangeResult::Tokens(semantic_tokens)) Some(SemanticTokensRangeResult::Tokens(semantic_tokens))
} else { } else {

View file

@ -185,6 +185,10 @@ impl LineIndex {
} }
} }
pub fn line_length_utf16(&self, line: u32) -> TextSize {
self.utf16_offsets[(line + 1) as usize] - self.utf16_offsets[line as usize]
}
pub fn text_content_length_utf16(&self) -> TextSize { pub fn text_content_length_utf16(&self) -> TextSize {
*self.utf16_offsets.last().unwrap() *self.utf16_offsets.last().unwrap()
} }

View file

@ -2496,9 +2496,9 @@ pub struct Classifications {
impl Classifications { impl Classifications {
pub fn to_semantic_tokens( pub fn to_semantic_tokens(
&self, &self,
asset_or_doc: &AssetOrDocument,
line_index: Arc<LineIndex>, line_index: Arc<LineIndex>,
) -> LspResult<lsp::SemanticTokens> { ) -> LspResult<lsp::SemanticTokens> {
// https://github.com/microsoft/vscode/blob/1.89.0/extensions/typescript-language-features/src/languageFeatures/semanticTokens.ts#L89-L115
let token_count = self.spans.len() / 3; let token_count = self.spans.len() / 3;
let mut builder = SemanticTokensBuilder::new(); let mut builder = SemanticTokensBuilder::new();
for i in 0..token_count { for i in 0..token_count {
@ -2517,25 +2517,24 @@ impl Classifications {
let start_pos = line_index.position_tsc(offset.into()); let start_pos = line_index.position_tsc(offset.into());
let end_pos = line_index.position_tsc(TextSize::from(offset + length)); let end_pos = line_index.position_tsc(TextSize::from(offset + length));
if start_pos.line == end_pos.line for line in start_pos.line..(end_pos.line + 1) {
&& start_pos.character <= end_pos.character let start_character = if line == start_pos.line {
{ start_pos.character
} else {
0
};
let end_character = if line == end_pos.line {
end_pos.character
} else {
line_index.line_length_utf16(line).into()
};
builder.push( builder.push(
start_pos.line, line,
start_pos.character, start_character,
end_pos.character - start_pos.character, end_character - start_character,
token_type, token_type,
token_modifiers, token_modifiers,
); );
} else {
log::error!(
"unexpected positions\nspecifier: {}\nopen: {}\nstart_pos: {:?}\nend_pos: {:?}",
asset_or_doc.specifier(),
asset_or_doc.is_open(),
start_pos,
end_pos
);
return Err(LspError::internal_error());
} }
} }
Ok(builder.build(None)) Ok(builder.build(None))
@ -5867,6 +5866,35 @@ mod tests {
); );
} }
#[test]
fn test_classification_to_semantic_tokens_multiline_tokens() {
let line_index = Arc::new(LineIndex::new(" to\nken \n"));
let classifications = Classifications {
spans: vec![2, 6, 2057],
};
let semantic_tokens =
classifications.to_semantic_tokens(line_index).unwrap();
assert_eq!(
&semantic_tokens.data,
&[
lsp::SemanticToken {
delta_line: 0,
delta_start: 2,
length: 3,
token_type: 7,
token_modifiers_bitset: 9,
},
lsp::SemanticToken {
delta_line: 1,
delta_start: 0,
length: 3,
token_type: 7,
token_modifiers_bitset: 9,
},
]
);
}
#[tokio::test] #[tokio::test]
async fn test_get_edits_for_file_rename() { async fn test_get_edits_for_file_rename() {
let temp_dir = TempDir::new(); let temp_dir = TempDir::new();