feat(lsp): Implement textDocument/semanticTokens/full (#10233)

Co-authored-by: Kitson Kelly <me@kitsonkelly.com>
This commit is contained in:
Jean Pierre 2021-04-19 20:26:36 -05:00 committed by GitHub
parent b6203cb465
commit 6d404ec54b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 648 additions and 3 deletions

View file

@ -18,6 +18,9 @@ use lspower::lsp::ImplementationProviderCapability;
use lspower::lsp::OneOf;
use lspower::lsp::SaveOptions;
use lspower::lsp::SelectionRangeProviderCapability;
use lspower::lsp::SemanticTokensFullOptions;
use lspower::lsp::SemanticTokensOptions;
use lspower::lsp::SemanticTokensServerCapabilities;
use lspower::lsp::ServerCapabilities;
use lspower::lsp::SignatureHelpOptions;
use lspower::lsp::TextDocumentSyncCapability;
@ -25,6 +28,8 @@ use lspower::lsp::TextDocumentSyncKind;
use lspower::lsp::TextDocumentSyncOptions;
use lspower::lsp::WorkDoneProgressOptions;
use super::semantic_tokens::get_legend;
fn code_action_capabilities(
client_capabilities: &ClientCapabilities,
) -> CodeActionProviderCapability {
@ -116,7 +121,16 @@ pub fn server_capabilities(
color_provider: None,
execute_command_provider: None,
call_hierarchy_provider: Some(CallHierarchyServerCapability::Simple(true)),
semantic_tokens_provider: None,
semantic_tokens_provider: Some(
SemanticTokensServerCapabilities::SemanticTokensOptions(
SemanticTokensOptions {
legend: get_legend(),
range: Some(true),
full: Some(SemanticTokensFullOptions::Bool(true)),
..Default::default()
},
),
),
workspace: None,
experimental: None,
linked_editing_range_provider: None,

View file

@ -1961,6 +1961,99 @@ impl Inner {
Ok(Some(selection_ranges))
}
async fn semantic_tokens_full(
&self,
params: SemanticTokensParams,
) -> LspResult<Option<SemanticTokensResult>> {
if !self.enabled() {
return Ok(None);
}
let mark = self.performance.mark("semantic_tokens_full");
let specifier = self.url_map.normalize_url(&params.text_document.uri);
let line_index =
if let Some(line_index) = self.get_line_index_sync(&specifier) {
line_index
} else {
return Err(LspError::invalid_params(format!(
"An unexpected specifier ({}) was provided.",
specifier
)));
};
let req = tsc::RequestMethod::GetEncodedSemanticClassifications((
specifier.clone(),
tsc::TextSpan {
start: 0,
length: line_index.text_content_length_utf16().into(),
},
));
let semantic_classification: tsc::Classifications = self
.ts_server
.request(self.snapshot(), req)
.await
.map_err(|err| {
error!("Failed to request to tsserver {}", err);
LspError::invalid_request()
})?;
let semantic_tokens: SemanticTokens =
semantic_classification.to_semantic_tokens(&line_index);
let response = if !semantic_tokens.data.is_empty() {
Some(SemanticTokensResult::Tokens(semantic_tokens))
} else {
None
};
self.performance.measure(mark);
Ok(response)
}
async fn semantic_tokens_range(
&self,
params: SemanticTokensRangeParams,
) -> LspResult<Option<SemanticTokensRangeResult>> {
if !self.enabled() {
return Ok(None);
}
let mark = self.performance.mark("semantic_tokens_range");
let specifier = self.url_map.normalize_url(&params.text_document.uri);
let line_index =
if let Some(line_index) = self.get_line_index_sync(&specifier) {
line_index
} else {
return Err(LspError::invalid_params(format!(
"An unexpected specifier ({}) was provided.",
specifier
)));
};
let start = line_index.offset_tsc(params.range.start)?;
let length = line_index.offset_tsc(params.range.end)? - start;
let req = tsc::RequestMethod::GetEncodedSemanticClassifications((
specifier.clone(),
tsc::TextSpan { start, length },
));
let semantic_classification: tsc::Classifications = self
.ts_server
.request(self.snapshot(), req)
.await
.map_err(|err| {
error!("Failed to request to tsserver {}", err);
LspError::invalid_request()
})?;
let semantic_tokens: SemanticTokens =
semantic_classification.to_semantic_tokens(&line_index);
let response = if !semantic_tokens.data.is_empty() {
Some(SemanticTokensRangeResult::Tokens(semantic_tokens))
} else {
None
};
self.performance.measure(mark);
Ok(response)
}
async fn signature_help(
&self,
params: SignatureHelpParams,
@ -2200,6 +2293,20 @@ impl lspower::LanguageServer for LanguageServer {
self.0.lock().await.selection_range(params).await
}
async fn semantic_tokens_full(
&self,
params: SemanticTokensParams,
) -> LspResult<Option<SemanticTokensResult>> {
self.0.lock().await.semantic_tokens_full(params).await
}
async fn semantic_tokens_range(
&self,
params: SemanticTokensRangeParams,
) -> LspResult<Option<SemanticTokensRangeResult>> {
self.0.lock().await.semantic_tokens_range(params).await
}
async fn signature_help(
&self,
params: SignatureHelpParams,
@ -3539,6 +3646,43 @@ mod tests {
harness.run().await;
}
#[tokio::test]
#[rustfmt::skip]
async fn test_semantic_tokens() {
let mut harness = LspTestHarness::new(vec![
(LspFixture::Path("initialize_request.json"), LspResponse::RequestAny),
(LspFixture::Path("initialized_notification.json"), LspResponse::None),
(
LspFixture::Path("semantic_tokens_did_open_notification.json"),
LspResponse::None,
),
(
LspFixture::Path("semantic_tokens_full_request.json"),
LspResponse::Request(
2,
json!({
"data": [0, 5, 6, 1, 1, 0, 9, 6, 8, 9, 0, 8, 6, 8, 9, 2, 15 ,3, 10 ,5, 0, 4, 1, 6, 1, 0, 12 ,7, 2, 16 ,1, 8, 1, 7, 41 ,0, 4, 1, 6, 0, 0, 2, 5, 11 ,16 ,1, 9, 1, 7, 40 ,3, 10 ,4, 2, 1, 1, 11 ,1, 9, 9, 1, 2, 3, 11 ,1, 3, 6, 3, 0, 1, 0, 15 ,4, 2, 0, 1, 30 ,1, 6, 9, 1, 2, 3, 11 ,1, 1, 9, 9, 9, 3, 0, 16 ,3, 0, 0, 1, 17 ,12 ,11 ,3, 0, 24 ,3, 0, 0, 0, 4, 9, 9, 2]
}),
),
),
(
LspFixture::Path("semantic_tokens_range_request.json"),
LspResponse::Request(
4,
json!({
"data": [0, 5, 6, 1, 1, 0, 9, 6, 8, 9, 0, 8, 6, 8, 9, 2, 15 ,3, 10 ,5, 0, 4, 1, 6, 1, 0, 12 ,7, 2, 16 ,1, 8, 1, 7, 41 ,0, 4, 1, 6, 0, 0, 2, 5, 11 ,16 ,1, 9, 1, 7, 40]
}),
),
),
(
LspFixture::Path("shutdown_request.json"),
LspResponse::Request(3, json!(null)),
),
(LspFixture::Path("exit_notification.json"), LspResponse::None),
]);
harness.run().await;
}
#[tokio::test]
async fn test_code_lens_request() {
let mut harness = LspTestHarness::new(vec![

View file

@ -13,6 +13,7 @@ pub(crate) mod language_server;
mod path_to_regex;
mod performance;
mod registries;
mod semantic_tokens;
mod sources;
mod text;
mod tsc;

356
cli/lsp/semantic_tokens.rs Normal file
View file

@ -0,0 +1,356 @@
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
// The logic of this module is heavily influenced by
// https://github.com/microsoft/vscode/blob/main/extensions/typescript-language-features/src/languageFeatures/semanticTokens.ts
// and https://github.com/microsoft/vscode/blob/main/src/vs/workbench/api/common/extHostTypes.ts
// for the SemanticTokensBuilder implementation.
use lspower::lsp::SemanticToken;
use lspower::lsp::SemanticTokenModifier;
use lspower::lsp::SemanticTokenType;
use lspower::lsp::SemanticTokens;
use lspower::lsp::SemanticTokensLegend;
use std::ops::{Index, IndexMut};
enum TokenType {
Class = 0,
Enum = 1,
Interface = 2,
Namespace = 3,
TypeParameter = 4,
Type = 5,
Parameter = 6,
Variable = 7,
EnumMember = 8,
Property = 9,
Function = 10,
Method = 11,
}
impl<T> Index<TokenType> for Vec<T> {
type Output = T;
fn index(&self, idx: TokenType) -> &T {
&self[idx as usize]
}
}
impl<T> IndexMut<TokenType> for Vec<T> {
fn index_mut(&mut self, idx: TokenType) -> &mut T {
&mut self[idx as usize]
}
}
enum TokenModifier {
Declaration = 0,
Static = 1,
Async = 2,
Readonly = 3,
DefaultLibrary = 4,
Local = 5,
}
impl<T> Index<TokenModifier> for Vec<T> {
type Output = T;
fn index(&self, idx: TokenModifier) -> &T {
&self[idx as usize]
}
}
impl<T> IndexMut<TokenModifier> for Vec<T> {
fn index_mut(&mut self, idx: TokenModifier) -> &mut T {
&mut self[idx as usize]
}
}
pub fn get_legend() -> SemanticTokensLegend {
let mut token_types = vec![SemanticTokenType::from(""); 12];
token_types[TokenType::Class] = "class".into();
token_types[TokenType::Enum] = "enum".into();
token_types[TokenType::Interface] = "interface".into();
token_types[TokenType::Namespace] = "namespace".into();
token_types[TokenType::TypeParameter] = "typeParameter".into();
token_types[TokenType::Type] = "type".into();
token_types[TokenType::Parameter] = "parameter".into();
token_types[TokenType::Variable] = "variable".into();
token_types[TokenType::EnumMember] = "enumMember".into();
token_types[TokenType::Property] = "property".into();
token_types[TokenType::Function] = "function".into();
token_types[TokenType::Method] = "method".into();
let mut token_modifiers = vec![SemanticTokenModifier::from(""); 6];
token_modifiers[TokenModifier::Declaration] = "declaration".into();
token_modifiers[TokenModifier::Static] = "static".into();
token_modifiers[TokenModifier::Async] = "async".into();
token_modifiers[TokenModifier::Readonly] = "readonly".into();
token_modifiers[TokenModifier::DefaultLibrary] = "defaultLibrary".into();
token_modifiers[TokenModifier::Local] = "local".into();
SemanticTokensLegend {
token_types,
token_modifiers,
}
}
pub enum TsTokenEncodingConsts {
TypeOffset = 8,
ModifierMask = 255,
}
pub struct SemanticTokensBuilder {
prev_line: u32,
prev_char: u32,
data_is_sorted_and_delta_encoded: bool,
data: Vec<u32>,
}
impl SemanticTokensBuilder {
pub fn new() -> Self {
Self {
prev_line: 0,
prev_char: 0,
data_is_sorted_and_delta_encoded: true,
data: Vec::new(),
}
}
pub fn push(
&mut self,
line: u32,
char: u32,
length: u32,
token_type: u32,
token_modifiers: u32,
) {
if self.data_is_sorted_and_delta_encoded
&& (line < self.prev_line
|| (line == self.prev_line && char < self.prev_char))
{
// push calls were ordered and are no longer ordered
self.data_is_sorted_and_delta_encoded = false;
// Remove delta encoding from data
let token_count = self.data.len() / 5;
let mut prev_line = 0;
let mut prev_char = 0;
for i in 0..token_count {
let mut line = self.data[5 * i];
let mut char = self.data[5 * i + 1];
if line == 0 {
// on the same line as previous token
line = prev_line;
char += prev_char;
} else {
// on a different line than previous token
line += prev_line;
}
self.data[5 * i] = line;
self.data[5 * i + 1] = char;
prev_line = line;
prev_char = char;
}
}
let mut push_line = line;
let mut push_char = char;
if self.data_is_sorted_and_delta_encoded && !self.data.is_empty() {
push_line -= self.prev_line;
if push_line == 0 {
push_char -= self.prev_char;
}
}
self.data.reserve(5);
self.data.push(push_line);
self.data.push(push_char);
self.data.push(length);
self.data.push(token_type);
self.data.push(token_modifiers);
self.prev_line = line;
self.prev_char = char;
}
fn data_to_semantic_token_vec(
data: &[u32],
data_is_sorted_and_delta_encoded: bool,
) -> Vec<SemanticToken> {
let token_count = data.len() / 5;
let mut result: Vec<SemanticToken> = Vec::with_capacity(token_count);
if data_is_sorted_and_delta_encoded {
for i in 0..token_count {
let src_offset = 5 * i;
result.push(SemanticToken {
delta_line: data[src_offset],
delta_start: data[src_offset + 1],
length: data[src_offset + 2],
token_type: data[src_offset + 3],
token_modifiers_bitset: data[src_offset + 4],
});
}
return result;
}
let mut pos: Vec<usize> = (0..token_count).collect();
pos.sort_by(|a, b| {
let a_line = data[5 * a];
let b_line = data[5 * b];
if a_line == b_line {
let a_char = data[5 * a + 1];
let b_char = data[5 * b + 1];
return a_char.cmp(&b_char);
}
a_line.cmp(&b_line)
});
let mut prev_line = 0;
let mut prev_char = 0;
for i in pos.iter() {
let src_offset = 5 * i;
let line = data[src_offset];
let char = data[src_offset + 1];
let length = data[src_offset + 2];
let token_type = data[src_offset + 3];
let token_modifiers_bitset = data[src_offset + 4];
let delta_line = line - prev_line;
let delta_start = if delta_line == 0 {
char - prev_char
} else {
char
};
result.push(SemanticToken {
delta_line,
delta_start,
length,
token_type,
token_modifiers_bitset,
});
prev_line = line;
prev_char = char;
}
result
}
pub fn build(&self, result_id: Option<String>) -> SemanticTokens {
SemanticTokens {
result_id,
data: SemanticTokensBuilder::data_to_semantic_token_vec(
&self.data,
self.data_is_sorted_and_delta_encoded,
),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_semantic_tokens_builder_simple() {
let mut builder = SemanticTokensBuilder::new();
builder.push(1, 0, 5, 1, 1);
builder.push(1, 10, 4, 2, 2);
builder.push(2, 2, 3, 2, 2);
assert_eq!(
builder.build(None).data,
vec![
SemanticToken {
delta_line: 1,
delta_start: 0,
length: 5,
token_type: 1,
token_modifiers_bitset: 1
},
SemanticToken {
delta_line: 0,
delta_start: 10,
length: 4,
token_type: 2,
token_modifiers_bitset: 2
},
SemanticToken {
delta_line: 1,
delta_start: 2,
length: 3,
token_type: 2,
token_modifiers_bitset: 2
}
]
);
}
#[test]
fn test_semantic_tokens_builder_out_of_order_1() {
let mut builder = SemanticTokensBuilder::new();
builder.push(2, 0, 5, 1, 1);
builder.push(2, 10, 1, 2, 2);
builder.push(2, 15, 2, 3, 3);
builder.push(1, 0, 4, 4, 4);
assert_eq!(
builder.build(None).data,
vec![
SemanticToken {
delta_line: 1,
delta_start: 0,
length: 4,
token_type: 4,
token_modifiers_bitset: 4
},
SemanticToken {
delta_line: 1,
delta_start: 0,
length: 5,
token_type: 1,
token_modifiers_bitset: 1
},
SemanticToken {
delta_line: 0,
delta_start: 10,
length: 1,
token_type: 2,
token_modifiers_bitset: 2
},
SemanticToken {
delta_line: 0,
delta_start: 5,
length: 2,
token_type: 3,
token_modifiers_bitset: 3
}
]
);
}
#[test]
fn test_semantic_tokens_builder_out_of_order_2() {
let mut builder = SemanticTokensBuilder::new();
builder.push(2, 10, 5, 1, 1);
builder.push(2, 2, 4, 2, 2);
assert_eq!(
builder.build(None).data,
vec![
SemanticToken {
delta_line: 2,
delta_start: 2,
length: 4,
token_type: 2,
token_modifiers_bitset: 2
},
SemanticToken {
delta_line: 0,
delta_start: 8,
length: 5,
token_type: 1,
token_modifiers_bitset: 1
}
]
);
}
}

View file

@ -20,7 +20,7 @@ where
P: FnMut(&T) -> bool,
{
let mut left = 0;
let mut right = slice.len();
let mut right = slice.len() - 1;
while left != right {
let mid = left + (right - left) / 2;
@ -31,7 +31,7 @@ where
// In both cases left <= right is satisfied.
// Therefore if left < right in a step,
// left <= right is satisfied in the next step.
// Therefore as long as left != right, 0 <= left < right <= len is satisfied
// Therefore as long as left != right, 0 <= left < right < len is satisfied
// and if this case 0 <= mid < len is satisfied too.
let value = unsafe { slice.get_unchecked(mid) };
if predicate(value) {
@ -109,6 +109,10 @@ impl LineIndex {
curr_col += c_len;
}
// utf8_offsets and utf16_offsets length is equal to (# of lines + 1)
utf8_offsets.push(curr_row);
utf16_offsets.push(curr_offset_u16);
if !utf16_chars.is_empty() {
utf16_lines.insert(line, utf16_chars);
}
@ -185,6 +189,10 @@ impl LineIndex {
}
}
pub fn text_content_length_utf16(&self) -> TextSize {
*self.utf16_offsets.last().unwrap()
}
fn utf16_to_utf8_col(&self, line: u32, mut col: u32) -> TextSize {
if let Some(utf16_chars) = self.utf16_lines.get(&line) {
for c in utf16_chars {

View file

@ -6,6 +6,8 @@ use super::analysis::ResolvedDependencyErr;
use super::config;
use super::language_server;
use super::language_server::StateSnapshot;
use super::semantic_tokens::SemanticTokensBuilder;
use super::semantic_tokens::TsTokenEncodingConsts;
use super::text;
use super::text::LineIndex;
@ -886,6 +888,56 @@ impl FileTextChanges {
}
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Classifications {
spans: Vec<u32>,
}
impl Classifications {
pub fn to_semantic_tokens(
&self,
line_index: &LineIndex,
) -> lsp::SemanticTokens {
let token_count = self.spans.len() / 3;
let mut builder = SemanticTokensBuilder::new();
for i in 0..token_count {
let src_offset = 3 * i;
let offset = self.spans[src_offset];
let length = self.spans[src_offset + 1];
let ts_classification = self.spans[src_offset + 2];
let token_type =
Classifications::get_token_type_from_classification(ts_classification);
let token_modifiers =
Classifications::get_token_modifier_from_classification(
ts_classification,
);
let start_pos = line_index.position_tsc(offset.into());
let end_pos = line_index.position_tsc(TextSize::from(offset + length));
// start_pos.line == end_pos.line is always true as there are no multiline tokens
builder.push(
start_pos.line,
start_pos.character,
end_pos.character - start_pos.character,
token_type,
token_modifiers,
);
}
builder.build(None)
}
fn get_token_type_from_classification(ts_classification: u32) -> u32 {
(ts_classification >> (TsTokenEncodingConsts::TypeOffset as u32)) - 1
}
fn get_token_modifier_from_classification(ts_classification: u32) -> u32 {
ts_classification & (TsTokenEncodingConsts::ModifierMask as u32)
}
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CodeAction {
@ -2150,6 +2202,8 @@ pub enum RequestMethod {
GetDiagnostics(Vec<ModuleSpecifier>),
/// Return document highlights at position.
GetDocumentHighlights((ModuleSpecifier, u32, Vec<ModuleSpecifier>)),
/// Get semantic highlights information for a particular file.
GetEncodedSemanticClassifications((ModuleSpecifier, TextSpan)),
/// Get implementation information for a specific position.
GetImplementation((ModuleSpecifier, u32)),
/// Get a "navigation tree" for a specifier.
@ -2259,6 +2313,14 @@ impl RequestMethod {
"position": position,
"filesToSearch": files_to_search,
}),
RequestMethod::GetEncodedSemanticClassifications((specifier, span)) => {
json!({
"id": id,
"method": "getEncodedSemanticClassifications",
"specifier": specifier,
"span": span,
})
}
RequestMethod::GetImplementation((specifier, position)) => json!({
"id": id,
"method": "getImplementation",

View file

@ -0,0 +1,12 @@
{
"jsonrpc": "2.0",
"method": "textDocument/didOpen",
"params": {
"textDocument": {
"uri": "file:///a/file.ts",
"languageId": "typescript",
"version": 1,
"text": "enum Values { value1, value2 }\n\nasync function baz(s: string): Promise<string> {\n const r = s.slice(0);\n return r;\n}\n\ninterface IFoo {\n readonly x: number;\n foo(): boolean;\n}\n\nclass Bar implements IFoo {\n constructor(public readonly x: number) { }\n foo() { return true; }\n static staticBar = new Bar(0);\n private static getStaticBar() { return Bar.staticBar; }\n}\n"
}
}
}

View file

@ -0,0 +1,10 @@
{
"jsonrpc": "2.0",
"id": 2,
"method": "textDocument/semanticTokens/full",
"params": {
"textDocument": {
"uri": "file:///a/file.ts"
}
}
}

View file

@ -0,0 +1,20 @@
{
"jsonrpc": "2.0",
"id": 4,
"method": "textDocument/semanticTokens/range",
"params": {
"textDocument": {
"uri": "file:///a/file.ts"
},
"range": {
"start": {
"line": 0,
"character": 0
},
"end": {
"line": 6,
"character": 0
}
}
}
}

View file

@ -660,6 +660,16 @@ delete Object.prototype.__proto__;
),
);
}
case "getEncodedSemanticClassifications": {
return respond(
id,
languageService.getEncodedSemanticClassifications(
request.specifier,
request.span,
ts.SemanticClassificationFormat.TwentyTwenty,
),
);
}
case "getImplementation": {
return respond(
id,

View file

@ -56,6 +56,7 @@ declare global {
| GetDefinitionRequest
| GetDiagnosticsRequest
| GetDocumentHighlightsRequest
| GetEncodedSemanticClassifications
| GetImplementationRequest
| GetNavigationTree
| GetOutliningSpans
@ -144,6 +145,13 @@ declare global {
filesToSearch: string[];
}
interface GetEncodedSemanticClassifications
extends BaseLanguageServerRequest {
method: "getEncodedSemanticClassifications";
specifier: string;
span: ts.TextSpan;
}
interface GetImplementationRequest extends BaseLanguageServerRequest {
method: "getImplementation";
specifier: string;