mirror of
https://github.com/denoland/deno.git
synced 2024-11-21 15:04:11 -05:00
fix(lsp): handle multiline semantic tokens (#23691)
This commit is contained in:
parent
d81e97f92f
commit
973743b751
4 changed files with 50 additions and 29 deletions
|
@ -144,13 +144,6 @@ pub enum AssetOrDocument {
|
|||
}
|
||||
|
||||
impl AssetOrDocument {
|
||||
pub fn specifier(&self) -> &ModuleSpecifier {
|
||||
match self {
|
||||
AssetOrDocument::Asset(asset) => asset.specifier(),
|
||||
AssetOrDocument::Document(doc) => doc.specifier(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn document(&self) -> Option<&Arc<Document>> {
|
||||
match self {
|
||||
AssetOrDocument::Asset(_) => None,
|
||||
|
@ -204,10 +197,6 @@ impl AssetOrDocument {
|
|||
pub fn document_lsp_version(&self) -> Option<i32> {
|
||||
self.document().and_then(|d| d.maybe_lsp_version())
|
||||
}
|
||||
|
||||
pub fn is_open(&self) -> bool {
|
||||
self.document().map(|d| d.is_open()).unwrap_or(false)
|
||||
}
|
||||
}
|
||||
|
||||
type ModuleResult = Result<deno_graph::JsModule, deno_graph::ModuleGraphError>;
|
||||
|
|
|
@ -2604,7 +2604,7 @@ impl Inner {
|
|||
.await?;
|
||||
|
||||
let semantic_tokens =
|
||||
semantic_classification.to_semantic_tokens(&asset_or_doc, line_index)?;
|
||||
semantic_classification.to_semantic_tokens(line_index)?;
|
||||
let response = if !semantic_tokens.data.is_empty() {
|
||||
Some(SemanticTokensResult::Tokens(semantic_tokens))
|
||||
} else {
|
||||
|
@ -2642,7 +2642,7 @@ impl Inner {
|
|||
.await?;
|
||||
|
||||
let semantic_tokens =
|
||||
semantic_classification.to_semantic_tokens(&asset_or_doc, line_index)?;
|
||||
semantic_classification.to_semantic_tokens(line_index)?;
|
||||
let response = if !semantic_tokens.data.is_empty() {
|
||||
Some(SemanticTokensRangeResult::Tokens(semantic_tokens))
|
||||
} else {
|
||||
|
|
|
@ -185,6 +185,10 @@ impl LineIndex {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn line_length_utf16(&self, line: u32) -> TextSize {
|
||||
self.utf16_offsets[(line + 1) as usize] - self.utf16_offsets[line as usize]
|
||||
}
|
||||
|
||||
pub fn text_content_length_utf16(&self) -> TextSize {
|
||||
*self.utf16_offsets.last().unwrap()
|
||||
}
|
||||
|
|
|
@ -2496,9 +2496,9 @@ pub struct Classifications {
|
|||
impl Classifications {
|
||||
pub fn to_semantic_tokens(
|
||||
&self,
|
||||
asset_or_doc: &AssetOrDocument,
|
||||
line_index: Arc<LineIndex>,
|
||||
) -> LspResult<lsp::SemanticTokens> {
|
||||
// https://github.com/microsoft/vscode/blob/1.89.0/extensions/typescript-language-features/src/languageFeatures/semanticTokens.ts#L89-L115
|
||||
let token_count = self.spans.len() / 3;
|
||||
let mut builder = SemanticTokensBuilder::new();
|
||||
for i in 0..token_count {
|
||||
|
@ -2517,25 +2517,24 @@ impl Classifications {
|
|||
let start_pos = line_index.position_tsc(offset.into());
|
||||
let end_pos = line_index.position_tsc(TextSize::from(offset + length));
|
||||
|
||||
if start_pos.line == end_pos.line
|
||||
&& start_pos.character <= end_pos.character
|
||||
{
|
||||
for line in start_pos.line..(end_pos.line + 1) {
|
||||
let start_character = if line == start_pos.line {
|
||||
start_pos.character
|
||||
} else {
|
||||
0
|
||||
};
|
||||
let end_character = if line == end_pos.line {
|
||||
end_pos.character
|
||||
} else {
|
||||
line_index.line_length_utf16(line).into()
|
||||
};
|
||||
builder.push(
|
||||
start_pos.line,
|
||||
start_pos.character,
|
||||
end_pos.character - start_pos.character,
|
||||
line,
|
||||
start_character,
|
||||
end_character - start_character,
|
||||
token_type,
|
||||
token_modifiers,
|
||||
);
|
||||
} else {
|
||||
log::error!(
|
||||
"unexpected positions\nspecifier: {}\nopen: {}\nstart_pos: {:?}\nend_pos: {:?}",
|
||||
asset_or_doc.specifier(),
|
||||
asset_or_doc.is_open(),
|
||||
start_pos,
|
||||
end_pos
|
||||
);
|
||||
return Err(LspError::internal_error());
|
||||
}
|
||||
}
|
||||
Ok(builder.build(None))
|
||||
|
@ -5867,6 +5866,35 @@ mod tests {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_classification_to_semantic_tokens_multiline_tokens() {
|
||||
let line_index = Arc::new(LineIndex::new(" to\nken \n"));
|
||||
let classifications = Classifications {
|
||||
spans: vec![2, 6, 2057],
|
||||
};
|
||||
let semantic_tokens =
|
||||
classifications.to_semantic_tokens(line_index).unwrap();
|
||||
assert_eq!(
|
||||
&semantic_tokens.data,
|
||||
&[
|
||||
lsp::SemanticToken {
|
||||
delta_line: 0,
|
||||
delta_start: 2,
|
||||
length: 3,
|
||||
token_type: 7,
|
||||
token_modifiers_bitset: 9,
|
||||
},
|
||||
lsp::SemanticToken {
|
||||
delta_line: 1,
|
||||
delta_start: 0,
|
||||
length: 3,
|
||||
token_type: 7,
|
||||
token_modifiers_bitset: 9,
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_get_edits_for_file_rename() {
|
||||
let temp_dir = TempDir::new();
|
||||
|
|
Loading…
Reference in a new issue