mirror of
https://github.com/dart-lang/sdk
synced 2024-10-08 14:44:10 +00:00
[Analyzer] Add LSP support for textDocument/semanticTokens/full
Fixes https://github.com/Dart-Code/Dart-Code/issues/2202 + many more. Change-Id: Ia2e8ec2415836a77618821144699971e97b4fc5c Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/175722 Reviewed-by: Brian Wilkerson <brianwilkerson@google.com>
This commit is contained in:
parent
3cb0f09360
commit
cb2ede57b7
|
@ -38,7 +38,11 @@ class DartUnitHighlightsComputer {
|
|||
}
|
||||
}
|
||||
if (commentToken.type == TokenType.SINGLE_LINE_COMMENT) {
|
||||
highlightType = HighlightRegionType.COMMENT_END_OF_LINE;
|
||||
if (commentToken.lexeme.startsWith('///')) {
|
||||
highlightType = HighlightRegionType.COMMENT_DOCUMENTATION;
|
||||
} else {
|
||||
highlightType = HighlightRegionType.COMMENT_END_OF_LINE;
|
||||
}
|
||||
}
|
||||
if (highlightType != null) {
|
||||
_addRegion_token(commentToken, highlightType);
|
||||
|
|
|
@ -88,8 +88,20 @@ abstract class CustomMethods {
|
|||
static const super_ = Method('dart/textDocument/super');
|
||||
|
||||
// TODO(dantup): Remove custom AnalyzerStatus status method soon as no clients
|
||||
// should be relying on it and we now support proper $/progress events.
|
||||
// should be relying on it as we now support proper $/progress events.
|
||||
static const analyzerStatus = Method(r'$/analyzerStatus');
|
||||
|
||||
/// Semantic tokens are dynamically registered using a single string
|
||||
/// "textDocument/semanticTokens" instead of for each individual method
|
||||
/// (full, range, full/delta) so the built-in Method class does not contain
|
||||
/// the required constant.
|
||||
static const semanticTokenDynamicRegistration =
|
||||
Method('textDocument/semanticTokens');
|
||||
}
|
||||
|
||||
abstract class CustomSemanticTokenTypes {
|
||||
static const annotation = SemanticTokenTypes('annotation');
|
||||
static const boolean = SemanticTokenTypes('boolean');
|
||||
}
|
||||
|
||||
/// CodeActionKinds supported by the server that are not declared in the LSP spec.
|
||||
|
|
|
@ -0,0 +1,84 @@
|
|||
// Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
|
||||
// for details. All rights reserved. Use of this source code is governed by a
|
||||
// BSD-style license that can be found in the LICENSE file.
|
||||
|
||||
import 'dart:async';
|
||||
|
||||
import 'package:analysis_server/lsp_protocol/protocol_generated.dart';
|
||||
import 'package:analysis_server/lsp_protocol/protocol_special.dart';
|
||||
import 'package:analysis_server/src/computer/computer_highlights.dart';
|
||||
import 'package:analysis_server/src/lsp/handlers/handlers.dart';
|
||||
import 'package:analysis_server/src/lsp/lsp_analysis_server.dart';
|
||||
import 'package:analysis_server/src/lsp/mapping.dart';
|
||||
import 'package:analysis_server/src/lsp/semantic_tokens/encoder.dart';
|
||||
import 'package:analysis_server/src/plugin/result_merger.dart';
|
||||
import 'package:analyzer/dart/analysis/results.dart';
|
||||
import 'package:analyzer_plugin/protocol/protocol_common.dart';
|
||||
|
||||
class SemanticTokensHandler
|
||||
extends MessageHandler<SemanticTokensParams, SemanticTokens>
|
||||
with LspPluginRequestHandlerMixin {
|
||||
SemanticTokensHandler(LspAnalysisServer server) : super(server);
|
||||
|
||||
@override
|
||||
Method get handlesMessage => Method.textDocument_semanticTokens_full;
|
||||
|
||||
@override
|
||||
LspJsonHandler<SemanticTokensParams> get jsonHandler =>
|
||||
SemanticTokensParams.jsonHandler;
|
||||
|
||||
List<List<HighlightRegion>> getPluginResults(String path) {
|
||||
final notificationManager = server.notificationManager;
|
||||
return notificationManager.highlights.getResults(path);
|
||||
}
|
||||
|
||||
Future<List<HighlightRegion>> getServerResult(String path) async {
|
||||
final result = await server.getResolvedUnit(path);
|
||||
if (result?.state == ResultState.VALID) {
|
||||
final computer = DartUnitHighlightsComputer(result.unit);
|
||||
return computer.compute();
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
@override
|
||||
Future<ErrorOr<SemanticTokens>> handle(
|
||||
SemanticTokensParams params, CancellationToken token) async {
|
||||
final path = pathOfDoc(params.textDocument);
|
||||
|
||||
return path.mapResult((path) async {
|
||||
final lineInfo = server.getLineInfo(path);
|
||||
// If there is no lineInfo, the request cannot be translated from LSP
|
||||
// line/col to server offset/length.
|
||||
if (lineInfo == null) {
|
||||
return success(null);
|
||||
}
|
||||
|
||||
// We need to be able to split multiline tokens up if a client does not
|
||||
// support them. Doing this correctly requires access to the line endings
|
||||
// and indenting so we must get a copy of the file contents. Although this
|
||||
// is on the Dart unit result, we may also need this for files being
|
||||
// handled by plugins.
|
||||
final file = server.resourceProvider.getFile(path);
|
||||
if (!file.exists) {
|
||||
return success(null);
|
||||
}
|
||||
final fileContents = file.readAsStringSync();
|
||||
|
||||
final allResults = [
|
||||
await getServerResult(path),
|
||||
...getPluginResults(path),
|
||||
];
|
||||
|
||||
final merger = ResultMerger();
|
||||
final mergedResults = merger.mergeHighlightRegions(allResults);
|
||||
|
||||
final encoder = SemanticTokenEncoder();
|
||||
final tokens =
|
||||
encoder.convertHighlights(mergedResults, lineInfo, fileContents);
|
||||
final semanticTokens = encoder.encodeTokens(tokens);
|
||||
|
||||
return success(semanticTokens);
|
||||
});
|
||||
}
|
||||
}
|
|
@ -29,6 +29,7 @@ import 'package:analysis_server/src/lsp/handlers/handler_initialize.dart';
|
|||
import 'package:analysis_server/src/lsp/handlers/handler_initialized.dart';
|
||||
import 'package:analysis_server/src/lsp/handlers/handler_references.dart';
|
||||
import 'package:analysis_server/src/lsp/handlers/handler_rename.dart';
|
||||
import 'package:analysis_server/src/lsp/handlers/handler_semantic_tokens.dart';
|
||||
import 'package:analysis_server/src/lsp/handlers/handler_shutdown.dart';
|
||||
import 'package:analysis_server/src/lsp/handlers/handler_signature_help.dart';
|
||||
import 'package:analysis_server/src/lsp/handlers/handler_text_document_changes.dart';
|
||||
|
@ -102,6 +103,7 @@ class InitializedStateMessageHandler extends ServerStateMessageHandler {
|
|||
registerHandler(WorkspaceDidChangeConfigurationMessageHandler(server));
|
||||
registerHandler(ReanalyzeHandler(server));
|
||||
registerHandler(WillRenameFilesHandler(server));
|
||||
registerHandler(SemanticTokensHandler(server));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
195
pkg/analysis_server/lib/src/lsp/semantic_tokens/encoder.dart
Normal file
195
pkg/analysis_server/lib/src/lsp/semantic_tokens/encoder.dart
Normal file
|
@ -0,0 +1,195 @@
|
|||
// Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
|
||||
// for details. All rights reserved. Use of this source code is governed by a
|
||||
// BSD-style license that can be found in the LICENSE file.
|
||||
|
||||
import 'dart:collection';
|
||||
import 'dart:math' as math;
|
||||
|
||||
import 'package:analysis_server/lsp_protocol/protocol_generated.dart';
|
||||
import 'package:analysis_server/src/lsp/semantic_tokens/legend.dart';
|
||||
import 'package:analysis_server/src/lsp/semantic_tokens/mapping.dart';
|
||||
import 'package:analyzer/source/line_info.dart';
|
||||
import 'package:analyzer_plugin/protocol/protocol_common.dart';
|
||||
|
||||
/// Collects information about Semantic Tokens using absolute line/columns and
|
||||
/// token types/modifiers and encodes them into a [List<int>] in a
|
||||
/// [SemanticTokens] (a [List<int>]) as described by the LSP spec .
|
||||
class SemanticTokenEncoder {
|
||||
/// Converts [regions]s into LSP [SemanticTokenInfo], splitting multiline tokens
|
||||
/// and nested tokens if required.
|
||||
List<SemanticTokenInfo> convertHighlights(
|
||||
List<HighlightRegion> regions, LineInfo lineInfo, String fileContent) {
|
||||
// LSP is zero-based but server is 1-based.
|
||||
const lspPositionOffset = -1;
|
||||
|
||||
final tokens = <SemanticTokenInfo>[];
|
||||
|
||||
// Capabilities exist for supporting multiline/overlapping tokens. These
|
||||
// could be used if any clients take it up (VS Code does not).
|
||||
// - clientCapabilities?.multilineTokenSupport
|
||||
// - clientCapabilities?.overlappingTokenSupport
|
||||
final allowMultilineTokens = false;
|
||||
final allowOverlappingTokens = false;
|
||||
|
||||
Iterable<HighlightRegion> translatedRegions = regions;
|
||||
|
||||
if (!allowMultilineTokens) {
|
||||
translatedRegions = translatedRegions.expand(
|
||||
(region) => _splitMultilineRegions(region, lineInfo, fileContent));
|
||||
}
|
||||
if (!allowOverlappingTokens) {
|
||||
translatedRegions = _splitOverlappingTokens(translatedRegions);
|
||||
}
|
||||
|
||||
for (final region in translatedRegions) {
|
||||
final tokenType = highlightRegionTokenTypes[region.type];
|
||||
if (tokenType == null) {
|
||||
// Skip over tokens we don't have mappings for.
|
||||
continue;
|
||||
}
|
||||
|
||||
final start = lineInfo.getLocation(region.offset);
|
||||
|
||||
tokens.add(SemanticTokenInfo(
|
||||
start.lineNumber + lspPositionOffset,
|
||||
start.columnNumber + lspPositionOffset,
|
||||
region.length,
|
||||
tokenType,
|
||||
highlightRegionTokenModifiers[region.type],
|
||||
));
|
||||
}
|
||||
|
||||
return tokens;
|
||||
}
|
||||
|
||||
SemanticTokens encodeTokens(List<SemanticTokenInfo> tokens) {
|
||||
final encodedTokens = <int>[];
|
||||
var lastLine = 0;
|
||||
var lastColumn = 0;
|
||||
|
||||
// Ensure tokens are all sorted by location in file regardless of the order
|
||||
// they were registered.
|
||||
tokens.sort(SemanticTokenInfo.offsetSort);
|
||||
|
||||
for (final token in tokens) {
|
||||
var relativeLine = token.line - lastLine;
|
||||
// Column is relative to last only if on the same line.
|
||||
var relativeColumn =
|
||||
relativeLine == 0 ? token.column - lastColumn : token.column;
|
||||
|
||||
// The resulting array is groups of 5 items as described in the LSP spec:
|
||||
// https://github.com/microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-16.md#textDocument_semanticTokens
|
||||
encodedTokens.addAll([
|
||||
relativeLine,
|
||||
relativeColumn,
|
||||
token.length,
|
||||
semanticTokenLegend.indexForType(token.type),
|
||||
semanticTokenLegend.bitmaskForModifiers(token.modifiers) ?? 0
|
||||
]);
|
||||
|
||||
lastLine = token.line;
|
||||
lastColumn = token.column;
|
||||
}
|
||||
|
||||
return SemanticTokens(data: encodedTokens);
|
||||
}
|
||||
|
||||
/// Splits multiline regions into multiple regions for clients that do not support
|
||||
/// multiline tokens.
|
||||
Iterable<HighlightRegion> _splitMultilineRegions(
|
||||
HighlightRegion region, LineInfo lineInfo, String fileContent) sync* {
|
||||
final start = lineInfo.getLocation(region.offset);
|
||||
final end = lineInfo.getLocation(region.offset + region.length);
|
||||
|
||||
// Create a region for each line in the original region.
|
||||
for (var lineNumber = start.lineNumber;
|
||||
lineNumber <= end.lineNumber;
|
||||
lineNumber++) {
|
||||
final isFirstLine = lineNumber == start.lineNumber;
|
||||
final isLastLine = lineNumber == end.lineNumber;
|
||||
final isSingleLine = start.lineNumber == end.lineNumber;
|
||||
final lineOffset = lineInfo.getOffsetOfLine(lineNumber - 1);
|
||||
|
||||
var startOffset = isFirstLine ? start.columnNumber - 1 : 0;
|
||||
var endOffset = isLastLine
|
||||
? end.columnNumber - 1
|
||||
: lineInfo.getOffsetOfLine(lineNumber) - lineOffset;
|
||||
var length = endOffset - startOffset;
|
||||
|
||||
// When we split multiline tokens, we may end up with leading/trailing
|
||||
// whitespace which doesn't make sense to include in the token. Examine
|
||||
// the content to remove this.
|
||||
if (!isSingleLine) {
|
||||
final tokenContent = fileContent.substring(
|
||||
lineOffset + startOffset, lineOffset + endOffset);
|
||||
final leadingWhitespaceCount =
|
||||
tokenContent.length - tokenContent.trimLeft().length;
|
||||
final trailingWhitespaceCount =
|
||||
tokenContent.length - tokenContent.trimRight().length;
|
||||
|
||||
startOffset += leadingWhitespaceCount;
|
||||
endOffset -= trailingWhitespaceCount;
|
||||
length = endOffset - startOffset;
|
||||
}
|
||||
|
||||
yield HighlightRegion(region.type, lineOffset + startOffset, length);
|
||||
}
|
||||
}
|
||||
|
||||
Iterable<HighlightRegion> _splitOverlappingTokens(
|
||||
Iterable<HighlightRegion> regions) sync* {
|
||||
if (regions.isEmpty) {
|
||||
return;
|
||||
}
|
||||
|
||||
final sortedRegions = regions.toList()
|
||||
..sort((r1, r2) => r1.offset.compareTo(r2.offset));
|
||||
|
||||
final firstRegion = sortedRegions.first;
|
||||
final stack = ListQueue<HighlightRegion>()..add(firstRegion);
|
||||
var pos = firstRegion.offset;
|
||||
|
||||
for (final current in sortedRegions.skip(1)) {
|
||||
if (stack.last != null) {
|
||||
final last = stack.last;
|
||||
final newPos = current.offset;
|
||||
if (newPos - pos > 0) {
|
||||
// The previous region ends at either its original end or
|
||||
// the position of this next region, whichever is shorter.
|
||||
final end = math.min(last.offset + last.length, newPos);
|
||||
final length = end - pos;
|
||||
yield HighlightRegion(last.type, pos, length);
|
||||
pos = newPos;
|
||||
}
|
||||
}
|
||||
|
||||
stack.addLast(current);
|
||||
}
|
||||
|
||||
// Process any remaining stack after the last region.
|
||||
while (stack.isNotEmpty) {
|
||||
final last = stack.removeLast();
|
||||
final newPos = last.offset + last.length;
|
||||
final length = newPos - pos;
|
||||
if (length > 0) {
|
||||
yield HighlightRegion(last.type, pos, length);
|
||||
pos = newPos;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class SemanticTokenInfo {
|
||||
final int line;
|
||||
final int column;
|
||||
final int length;
|
||||
final SemanticTokenTypes type;
|
||||
final Set<SemanticTokenModifiers> modifiers;
|
||||
|
||||
SemanticTokenInfo(
|
||||
this.line, this.column, this.length, this.type, this.modifiers);
|
||||
|
||||
static int offsetSort(t1, t2) => t1.line == t2.line
|
||||
? t1.column.compareTo(t2.column)
|
||||
: t1.line.compareTo(t2.line);
|
||||
}
|
78
pkg/analysis_server/lib/src/lsp/semantic_tokens/legend.dart
Normal file
78
pkg/analysis_server/lib/src/lsp/semantic_tokens/legend.dart
Normal file
|
@ -0,0 +1,78 @@
|
|||
// Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
|
||||
// for details. All rights reserved. Use of this source code is governed by a
|
||||
// BSD-style license that can be found in the LICENSE file.
|
||||
|
||||
import 'dart:math' as math;
|
||||
|
||||
import 'package:analysis_server/lsp_protocol/protocol_generated.dart';
|
||||
import 'package:analysis_server/src/lsp/semantic_tokens/mapping.dart';
|
||||
import 'package:meta/meta.dart';
|
||||
|
||||
final semanticTokenLegend = SemanticTokenLegendLookup();
|
||||
|
||||
/// A helper for looking up indexes and bitmasks of [SemanticTokenTypes] and
|
||||
/// [SemanticTokenModifiers].
|
||||
class SemanticTokenLegendLookup {
|
||||
/// An LSP [SemanticTokensLegend] describing all supported tokens and modifiers.
|
||||
SemanticTokensLegend lspLegend;
|
||||
|
||||
/// All [SemanticTokenModifiers] the server may generate. The order of these
|
||||
/// items is important as the indexes will be used in communication between
|
||||
/// server and client.
|
||||
List<SemanticTokenModifiers> _usedTokenModifiers;
|
||||
|
||||
/// All [SemanticTokenTypes] the server may generate. The order of these
|
||||
/// items is important as the indexes will be used in communication betewen
|
||||
/// server and client.
|
||||
List<SemanticTokenTypes> _usedTokenTypes;
|
||||
|
||||
SemanticTokenLegendLookup() {
|
||||
// Build lists of all tokens and modifiers that exist in our mappings. These will
|
||||
// be used to determine the indexes used for communication.
|
||||
_usedTokenTypes = Set.of(highlightRegionTokenTypes.values).toList();
|
||||
_usedTokenModifiers =
|
||||
Set.of(highlightRegionTokenModifiers.values.expand((v) => v)).toList();
|
||||
|
||||
// Build the LSP Legend which tells the client all of the tokens and modifiers
|
||||
// we will use in the order they should be accessed by index/bit.
|
||||
lspLegend = SemanticTokensLegend(
|
||||
tokenTypes:
|
||||
_usedTokenTypes.map((tokenType) => tokenType.toString()).toList(),
|
||||
tokenModifiers: _usedTokenModifiers
|
||||
.map((tokenModifier) => tokenModifier.toString())
|
||||
.toList(),
|
||||
);
|
||||
}
|
||||
|
||||
int bitmaskForModifiers(Set<SemanticTokenModifiers> modifiers) {
|
||||
// Modifiers use a bit mask where each bit represents the index of a modifier.
|
||||
// 001001 would indicate the 1st and 4th modifiers are applied.
|
||||
return modifiers
|
||||
?.map(_usedTokenModifiers.indexOf)
|
||||
?.map((index) => math.pow(2, index))
|
||||
?.reduce((a, b) => a + b) ??
|
||||
0;
|
||||
}
|
||||
|
||||
int indexForType(SemanticTokenTypes type) {
|
||||
return _usedTokenTypes.indexOf(type);
|
||||
}
|
||||
|
||||
/// Gets the [SemanticTokenModifiers] for a given index.
|
||||
@visibleForTesting
|
||||
List<SemanticTokenModifiers> modifiersForBitmask(int mask) {
|
||||
final modifiers = <SemanticTokenModifiers>[];
|
||||
for (var i = 0; i < _usedTokenModifiers.length; i++) {
|
||||
// Check if the i'th bit is set
|
||||
final modifierBit = 1 << i;
|
||||
if (mask & modifierBit != 0) {
|
||||
modifiers.add(_usedTokenModifiers[i]);
|
||||
}
|
||||
}
|
||||
return modifiers;
|
||||
}
|
||||
|
||||
/// Gets the [SemanticTokenTypes] for a given index.
|
||||
@visibleForTesting
|
||||
SemanticTokenTypes typeForIndex(int index) => _usedTokenTypes[index];
|
||||
}
|
173
pkg/analysis_server/lib/src/lsp/semantic_tokens/mapping.dart
Normal file
173
pkg/analysis_server/lib/src/lsp/semantic_tokens/mapping.dart
Normal file
|
@ -0,0 +1,173 @@
|
|||
// Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
|
||||
// for details. All rights reserved. Use of this source code is governed by a
|
||||
// BSD-style license that can be found in the LICENSE file.
|
||||
|
||||
import 'package:analysis_server/lsp_protocol/protocol_generated.dart';
|
||||
import 'package:analysis_server/src/lsp/constants.dart';
|
||||
import 'package:analysis_server/src/lsp/semantic_tokens/legend.dart';
|
||||
import 'package:analyzer_plugin/protocol/protocol_common.dart';
|
||||
|
||||
final highlightRegionMapper = RegionTypeMapper();
|
||||
|
||||
/// A mapping from [HighlightRegionType] to a set of [SemanticTokenModifiers].
|
||||
final highlightRegionTokenModifiers =
|
||||
<HighlightRegionType, Set<SemanticTokenModifiers>>{
|
||||
HighlightRegionType.COMMENT_DOCUMENTATION: {
|
||||
SemanticTokenModifiers.documentation
|
||||
},
|
||||
HighlightRegionType.DYNAMIC_LOCAL_VARIABLE_DECLARATION: {
|
||||
SemanticTokenModifiers.declaration
|
||||
},
|
||||
HighlightRegionType.DYNAMIC_PARAMETER_DECLARATION: {
|
||||
SemanticTokenModifiers.declaration
|
||||
},
|
||||
HighlightRegionType.INSTANCE_FIELD_DECLARATION: {
|
||||
SemanticTokenModifiers.declaration
|
||||
},
|
||||
HighlightRegionType.INSTANCE_GETTER_DECLARATION: {
|
||||
SemanticTokenModifiers.declaration
|
||||
},
|
||||
HighlightRegionType.INSTANCE_METHOD_DECLARATION: {
|
||||
SemanticTokenModifiers.declaration
|
||||
},
|
||||
HighlightRegionType.INSTANCE_SETTER_DECLARATION: {
|
||||
SemanticTokenModifiers.declaration
|
||||
},
|
||||
HighlightRegionType.LOCAL_FUNCTION_DECLARATION: {
|
||||
SemanticTokenModifiers.declaration
|
||||
},
|
||||
HighlightRegionType.LOCAL_VARIABLE_DECLARATION: {
|
||||
SemanticTokenModifiers.declaration
|
||||
},
|
||||
HighlightRegionType.PARAMETER_DECLARATION: {
|
||||
SemanticTokenModifiers.declaration
|
||||
},
|
||||
HighlightRegionType.STATIC_FIELD_DECLARATION: {
|
||||
SemanticTokenModifiers.declaration,
|
||||
SemanticTokenModifiers.static,
|
||||
},
|
||||
HighlightRegionType.STATIC_GETTER_DECLARATION: {
|
||||
SemanticTokenModifiers.declaration,
|
||||
SemanticTokenModifiers.static,
|
||||
},
|
||||
HighlightRegionType.STATIC_GETTER_REFERENCE: {SemanticTokenModifiers.static},
|
||||
HighlightRegionType.STATIC_METHOD_DECLARATION: {
|
||||
SemanticTokenModifiers.declaration,
|
||||
SemanticTokenModifiers.static,
|
||||
},
|
||||
HighlightRegionType.STATIC_METHOD_REFERENCE: {SemanticTokenModifiers.static},
|
||||
HighlightRegionType.STATIC_SETTER_DECLARATION: {
|
||||
SemanticTokenModifiers.declaration,
|
||||
SemanticTokenModifiers.static,
|
||||
},
|
||||
HighlightRegionType.STATIC_SETTER_REFERENCE: {SemanticTokenModifiers.static},
|
||||
HighlightRegionType.TOP_LEVEL_FUNCTION_DECLARATION: {
|
||||
SemanticTokenModifiers.declaration,
|
||||
SemanticTokenModifiers.static,
|
||||
},
|
||||
HighlightRegionType.TOP_LEVEL_GETTER_DECLARATION: {
|
||||
SemanticTokenModifiers.declaration
|
||||
},
|
||||
HighlightRegionType.TOP_LEVEL_SETTER_DECLARATION: {
|
||||
SemanticTokenModifiers.declaration
|
||||
},
|
||||
HighlightRegionType.TOP_LEVEL_VARIABLE_DECLARATION: {
|
||||
SemanticTokenModifiers.declaration
|
||||
},
|
||||
};
|
||||
|
||||
/// A mapping from [HighlightRegionType] to [SemanticTokenTypes].
|
||||
final highlightRegionTokenTypes = {
|
||||
HighlightRegionType.ANNOTATION: CustomSemanticTokenTypes.annotation,
|
||||
HighlightRegionType.BUILT_IN: SemanticTokenTypes.keyword,
|
||||
HighlightRegionType.CLASS: SemanticTokenTypes.class_,
|
||||
HighlightRegionType.COMMENT_BLOCK: SemanticTokenTypes.comment,
|
||||
HighlightRegionType.COMMENT_DOCUMENTATION: SemanticTokenTypes.comment,
|
||||
HighlightRegionType.COMMENT_END_OF_LINE: SemanticTokenTypes.comment,
|
||||
HighlightRegionType.CONSTRUCTOR: SemanticTokenTypes.class_,
|
||||
HighlightRegionType.DYNAMIC_LOCAL_VARIABLE_DECLARATION:
|
||||
SemanticTokenTypes.variable,
|
||||
HighlightRegionType.DYNAMIC_LOCAL_VARIABLE_REFERENCE:
|
||||
SemanticTokenTypes.variable,
|
||||
HighlightRegionType.DYNAMIC_PARAMETER_DECLARATION:
|
||||
SemanticTokenTypes.parameter,
|
||||
HighlightRegionType.DYNAMIC_PARAMETER_REFERENCE: SemanticTokenTypes.parameter,
|
||||
HighlightRegionType.ENUM: SemanticTokenTypes.enum_,
|
||||
HighlightRegionType.ENUM_CONSTANT: SemanticTokenTypes.enumMember,
|
||||
HighlightRegionType.FUNCTION_TYPE_ALIAS: SemanticTokenTypes.type,
|
||||
HighlightRegionType.INSTANCE_FIELD_DECLARATION: SemanticTokenTypes.property,
|
||||
HighlightRegionType.INSTANCE_FIELD_REFERENCE: SemanticTokenTypes.property,
|
||||
HighlightRegionType.INSTANCE_GETTER_DECLARATION: SemanticTokenTypes.property,
|
||||
HighlightRegionType.INSTANCE_GETTER_REFERENCE: SemanticTokenTypes.property,
|
||||
HighlightRegionType.INSTANCE_METHOD_DECLARATION: SemanticTokenTypes.method,
|
||||
HighlightRegionType.INSTANCE_METHOD_REFERENCE: SemanticTokenTypes.method,
|
||||
HighlightRegionType.INSTANCE_SETTER_DECLARATION: SemanticTokenTypes.property,
|
||||
HighlightRegionType.INSTANCE_SETTER_REFERENCE: SemanticTokenTypes.property,
|
||||
HighlightRegionType.KEYWORD: SemanticTokenTypes.keyword,
|
||||
HighlightRegionType.LIBRARY_NAME: SemanticTokenTypes.namespace,
|
||||
HighlightRegionType.LITERAL_BOOLEAN: CustomSemanticTokenTypes.boolean,
|
||||
HighlightRegionType.LITERAL_DOUBLE: SemanticTokenTypes.number,
|
||||
HighlightRegionType.LITERAL_INTEGER: SemanticTokenTypes.number,
|
||||
HighlightRegionType.LITERAL_STRING: SemanticTokenTypes.string,
|
||||
HighlightRegionType.LOCAL_FUNCTION_DECLARATION: SemanticTokenTypes.function,
|
||||
HighlightRegionType.LOCAL_FUNCTION_REFERENCE: SemanticTokenTypes.function,
|
||||
HighlightRegionType.LOCAL_VARIABLE_DECLARATION: SemanticTokenTypes.variable,
|
||||
HighlightRegionType.LOCAL_VARIABLE_REFERENCE: SemanticTokenTypes.variable,
|
||||
HighlightRegionType.PARAMETER_DECLARATION: SemanticTokenTypes.parameter,
|
||||
HighlightRegionType.PARAMETER_REFERENCE: SemanticTokenTypes.parameter,
|
||||
HighlightRegionType.STATIC_FIELD_DECLARATION: SemanticTokenTypes.variable,
|
||||
HighlightRegionType.STATIC_GETTER_DECLARATION: SemanticTokenTypes.property,
|
||||
HighlightRegionType.STATIC_GETTER_REFERENCE: SemanticTokenTypes.property,
|
||||
HighlightRegionType.STATIC_METHOD_DECLARATION: SemanticTokenTypes.method,
|
||||
HighlightRegionType.STATIC_METHOD_REFERENCE: SemanticTokenTypes.method,
|
||||
HighlightRegionType.STATIC_SETTER_DECLARATION: SemanticTokenTypes.property,
|
||||
HighlightRegionType.STATIC_SETTER_REFERENCE: SemanticTokenTypes.property,
|
||||
HighlightRegionType.TOP_LEVEL_FUNCTION_DECLARATION:
|
||||
SemanticTokenTypes.function,
|
||||
HighlightRegionType.TOP_LEVEL_FUNCTION_REFERENCE: SemanticTokenTypes.function,
|
||||
HighlightRegionType.TOP_LEVEL_GETTER_DECLARATION: SemanticTokenTypes.property,
|
||||
HighlightRegionType.TOP_LEVEL_GETTER_REFERENCE: SemanticTokenTypes.property,
|
||||
HighlightRegionType.TOP_LEVEL_SETTER_DECLARATION: SemanticTokenTypes.property,
|
||||
HighlightRegionType.TOP_LEVEL_SETTER_REFERENCE: SemanticTokenTypes.property,
|
||||
HighlightRegionType.TOP_LEVEL_VARIABLE: SemanticTokenTypes.variable,
|
||||
HighlightRegionType.TOP_LEVEL_VARIABLE_DECLARATION:
|
||||
SemanticTokenTypes.variable,
|
||||
HighlightRegionType.TYPE_NAME_DYNAMIC: SemanticTokenTypes.type,
|
||||
HighlightRegionType.TYPE_PARAMETER: SemanticTokenTypes.typeParameter,
|
||||
HighlightRegionType.UNRESOLVED_INSTANCE_MEMBER_REFERENCE:
|
||||
SemanticTokenTypes.variable,
|
||||
};
|
||||
|
||||
/// A helper for converting from Server highlight regions to LSP semantic tokens.
|
||||
class RegionTypeMapper {
|
||||
/// A map to get the [SemanticTokenTypes] index directly from a [HighlightRegionType].
|
||||
final Map<HighlightRegionType, int> _tokenTypeIndexForHighlightRegion = {};
|
||||
|
||||
/// A map to get the [SemanticTokenModifiers] bitmask directly from a [HighlightRegionType].
|
||||
final Map<HighlightRegionType, int> _tokenModifierBitmaskForHighlightRegion =
|
||||
{};
|
||||
|
||||
RegionTypeMapper() {
|
||||
// Build mappings that go directly from [HighlightRegionType] to index/bitmask
|
||||
// for faster lookups.
|
||||
for (final regionType in highlightRegionTokenTypes.keys) {
|
||||
_tokenTypeIndexForHighlightRegion[regionType] = semanticTokenLegend
|
||||
.indexForType(highlightRegionTokenTypes[regionType]);
|
||||
}
|
||||
|
||||
for (final regionType in highlightRegionTokenTypes.keys) {
|
||||
_tokenModifierBitmaskForHighlightRegion[regionType] = semanticTokenLegend
|
||||
.bitmaskForModifiers(highlightRegionTokenModifiers[regionType]);
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets the [SemanticTokenModifiers] bitmask for a [HighlightRegionType]. Returns
|
||||
/// null if the region type has not been mapped.
|
||||
int bitmaskForModifier(HighlightRegionType type) =>
|
||||
_tokenModifierBitmaskForHighlightRegion[type];
|
||||
|
||||
/// Gets the [SemanticTokenTypes] index for a [HighlightRegionType]. Returns
|
||||
/// null if the region type has not been mapped.
|
||||
int indexForToken(HighlightRegionType type) =>
|
||||
_tokenTypeIndexForHighlightRegion[type];
|
||||
}
|
|
@ -6,6 +6,7 @@ import 'package:analysis_server/lsp_protocol/protocol_generated.dart';
|
|||
import 'package:analysis_server/lsp_protocol/protocol_special.dart';
|
||||
import 'package:analysis_server/src/lsp/constants.dart';
|
||||
import 'package:analysis_server/src/lsp/lsp_analysis_server.dart';
|
||||
import 'package:analysis_server/src/lsp/semantic_tokens/legend.dart';
|
||||
|
||||
/// Helper for reading client dynamic registrations which may be ommitted by the
|
||||
/// client.
|
||||
|
@ -38,6 +39,9 @@ class ClientDynamicRegistrations {
|
|||
// workspace.fileOperations covers all file operation methods but we only
|
||||
// support this one.
|
||||
Method.workspace_willRenameFiles,
|
||||
// Sematic tokens are all registered under a single "method" as the
|
||||
// actual methods are controlled by the server capabilities.
|
||||
CustomMethods.semanticTokenDynamicRegistration,
|
||||
];
|
||||
final ClientCapabilities _capabilities;
|
||||
|
||||
|
@ -87,6 +91,9 @@ class ClientDynamicRegistrations {
|
|||
bool get rename =>
|
||||
_capabilities.textDocument?.rename?.dynamicRegistration ?? false;
|
||||
|
||||
bool get semanticTokens =>
|
||||
_capabilities.textDocument?.semanticTokens?.dynamicRegistration ?? false;
|
||||
|
||||
bool get signatureHelp =>
|
||||
_capabilities.textDocument?.signatureHelp?.dynamicRegistration ?? false;
|
||||
|
||||
|
@ -226,6 +233,17 @@ class ServerCapabilitiesComputer {
|
|||
FoldingRangeRegistrationOptions>.t1(
|
||||
true,
|
||||
),
|
||||
semanticTokensProvider: dynamicRegistrations.semanticTokens
|
||||
? null
|
||||
: Either2<SemanticTokensOptions,
|
||||
SemanticTokensRegistrationOptions>.t1(
|
||||
SemanticTokensOptions(
|
||||
legend: semanticTokenLegend.lspLegend,
|
||||
full: Either2<bool, SemanticTokensOptionsFull>.t2(
|
||||
SemanticTokensOptionsFull(delta: false),
|
||||
),
|
||||
),
|
||||
),
|
||||
executeCommandProvider: ExecuteCommandOptions(
|
||||
commands: Commands.serverSupportedCommands,
|
||||
workDoneProgress: true,
|
||||
|
@ -427,6 +445,17 @@ class ServerCapabilitiesComputer {
|
|||
dynamicRegistrations.didChangeConfiguration,
|
||||
Method.workspace_didChangeConfiguration,
|
||||
);
|
||||
register(
|
||||
dynamicRegistrations.semanticTokens,
|
||||
CustomMethods.semanticTokenDynamicRegistration,
|
||||
SemanticTokensRegistrationOptions(
|
||||
documentSelector: fullySupportedTypes,
|
||||
legend: semanticTokenLegend.lspLegend,
|
||||
full: Either2<bool, SemanticTokensOptionsFull>.t2(
|
||||
SemanticTokensOptionsFull(delta: false),
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
await _applyRegistrations(registrations);
|
||||
}
|
||||
|
|
|
@ -131,6 +131,7 @@ class InitializationTest extends AbstractLspAnalysisServerTest {
|
|||
expect(initResult.capabilities.foldingRangeProvider, isNotNull);
|
||||
expect(initResult.capabilities.workspace.fileOperations.willRename,
|
||||
equals(ServerCapabilitiesComputer.fileOperationRegistrationOptions));
|
||||
expect(initResult.capabilities.semanticTokensProvider, isNotNull);
|
||||
|
||||
expect(didGetRegisterCapabilityRequest, isFalse);
|
||||
}
|
||||
|
@ -186,6 +187,7 @@ class InitializationTest extends AbstractLspAnalysisServerTest {
|
|||
expect(initResult.capabilities.renameProvider, isNull);
|
||||
expect(initResult.capabilities.foldingRangeProvider, isNull);
|
||||
expect(initResult.capabilities.workspace.fileOperations, isNull);
|
||||
expect(initResult.capabilities.semanticTokensProvider, isNull);
|
||||
|
||||
// Ensure all expected dynamic registrations.
|
||||
for (final expectedRegistration in ClientDynamicRegistrations.supported) {
|
||||
|
|
580
pkg/analysis_server/test/lsp/semantic_tokens_test.dart
Normal file
580
pkg/analysis_server/test/lsp/semantic_tokens_test.dart
Normal file
|
@ -0,0 +1,580 @@
|
|||
// Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
|
||||
// for details. All rights reserved. Use of this source code is governed by a
|
||||
// BSD-style license that can be found in the LICENSE file.
|
||||
|
||||
import 'package:analysis_server/lsp_protocol/protocol_generated.dart';
|
||||
import 'package:analysis_server/src/lsp/constants.dart';
|
||||
import 'package:analysis_server/src/lsp/semantic_tokens/legend.dart';
|
||||
import 'package:analysis_server/src/protocol/protocol_internal.dart';
|
||||
import 'package:analyzer_plugin/protocol/protocol_common.dart' as plugin;
|
||||
import 'package:analyzer_plugin/protocol/protocol_generated.dart' as plugin;
|
||||
import 'package:test/test.dart';
|
||||
import 'package:test_reflective_loader/test_reflective_loader.dart';
|
||||
|
||||
import 'server_abstract.dart';
|
||||
|
||||
void main() {
|
||||
defineReflectiveSuite(() {
|
||||
defineReflectiveTests(SemanticTokensTest);
|
||||
});
|
||||
}
|
||||
|
||||
@reflectiveTest
|
||||
class SemanticTokensTest extends AbstractLspAnalysisServerTest {
|
||||
/// Decode tokens according to the LSP spec and pair with relevant file contents.
|
||||
List<_Token> decodeSemanticTokens(String content, SemanticTokens tokens) {
|
||||
final contentLines = content.split('\n').map((line) => '$line\n').toList();
|
||||
final results = <_Token>[];
|
||||
|
||||
var lastLine = 0;
|
||||
var lastColumn = 0;
|
||||
for (var i = 0; i < tokens.data.length; i += 5) {
|
||||
final lineDelta = tokens.data[i];
|
||||
final columnDelta = tokens.data[i + 1];
|
||||
final length = tokens.data[i + 2];
|
||||
final tokenTypeIndex = tokens.data[i + 3];
|
||||
final modifierBitmask = tokens.data[i + 4];
|
||||
|
||||
// Calculate the actual line/col from the deltas.
|
||||
final line = lastLine + lineDelta;
|
||||
final column = lineDelta == 0 ? lastColumn + columnDelta : columnDelta;
|
||||
|
||||
final tokenContent =
|
||||
contentLines[line].substring(column, column + length);
|
||||
results.add(_Token(
|
||||
tokenContent,
|
||||
semanticTokenLegend.typeForIndex(tokenTypeIndex),
|
||||
semanticTokenLegend.modifiersForBitmask(modifierBitmask),
|
||||
));
|
||||
|
||||
lastLine = line;
|
||||
lastColumn = column;
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
Future<void> test_class() async {
|
||||
final content = '''
|
||||
/// class docs
|
||||
class MyClass<T> {
|
||||
// class comment
|
||||
}
|
||||
|
||||
// Trailing comment
|
||||
''';
|
||||
|
||||
final expected = [
|
||||
_Token('/// class docs', SemanticTokenTypes.comment,
|
||||
[SemanticTokenModifiers.documentation]),
|
||||
_Token('class', SemanticTokenTypes.keyword),
|
||||
_Token('MyClass', SemanticTokenTypes.class_),
|
||||
_Token('T', SemanticTokenTypes.typeParameter),
|
||||
_Token('// class comment', SemanticTokenTypes.comment),
|
||||
_Token('// Trailing comment', SemanticTokenTypes.comment),
|
||||
];
|
||||
|
||||
await initialize();
|
||||
await openFile(mainFileUri, withoutMarkers(content));
|
||||
|
||||
final tokens = await getSemanticTokens(mainFileUri);
|
||||
final decoded = decodeSemanticTokens(content, tokens);
|
||||
expect(decoded, equals(expected));
|
||||
}
|
||||
|
||||
Future<void> test_class_fields() async {
|
||||
final content = '''
|
||||
class MyClass {
|
||||
/// field docs
|
||||
String myField = 'FieldVal';
|
||||
/// static field docs
|
||||
static String myStaticField = 'StaticFieldVal';
|
||||
}
|
||||
|
||||
main() {
|
||||
final a = MyClass();
|
||||
print(a.myField);
|
||||
MyClass.myStaticField = 'a';
|
||||
}
|
||||
''';
|
||||
|
||||
final expected = [
|
||||
_Token('class', SemanticTokenTypes.keyword),
|
||||
_Token('MyClass', SemanticTokenTypes.class_),
|
||||
_Token('/// field docs', SemanticTokenTypes.comment,
|
||||
[SemanticTokenModifiers.documentation]),
|
||||
_Token('String', SemanticTokenTypes.class_),
|
||||
_Token('myField', SemanticTokenTypes.property,
|
||||
[SemanticTokenModifiers.declaration]),
|
||||
_Token("'FieldVal'", SemanticTokenTypes.string),
|
||||
_Token('/// static field docs', SemanticTokenTypes.comment,
|
||||
[SemanticTokenModifiers.documentation]),
|
||||
_Token('static', SemanticTokenTypes.keyword),
|
||||
_Token('String', SemanticTokenTypes.class_),
|
||||
_Token('myStaticField', SemanticTokenTypes.variable,
|
||||
[SemanticTokenModifiers.declaration, SemanticTokenModifiers.static]),
|
||||
_Token("'StaticFieldVal'", SemanticTokenTypes.string),
|
||||
_Token('main', SemanticTokenTypes.function,
|
||||
[SemanticTokenModifiers.declaration, SemanticTokenModifiers.static]),
|
||||
_Token('final', SemanticTokenTypes.keyword),
|
||||
_Token('a', SemanticTokenTypes.variable,
|
||||
[SemanticTokenModifiers.declaration]),
|
||||
_Token('MyClass', SemanticTokenTypes.class_),
|
||||
_Token('print', SemanticTokenTypes.function),
|
||||
_Token('a', SemanticTokenTypes.variable),
|
||||
_Token('myField', SemanticTokenTypes.property),
|
||||
_Token('MyClass', SemanticTokenTypes.class_),
|
||||
_Token('myStaticField', SemanticTokenTypes.property,
|
||||
[SemanticTokenModifiers.static]),
|
||||
_Token("'a'", SemanticTokenTypes.string),
|
||||
];
|
||||
|
||||
await initialize();
|
||||
await openFile(mainFileUri, withoutMarkers(content));
|
||||
|
||||
final tokens = await getSemanticTokens(mainFileUri);
|
||||
final decoded = decodeSemanticTokens(content, tokens);
|
||||
expect(decoded, equals(expected));
|
||||
}
|
||||
|
||||
Future<void> test_class_getterSetter() async {
|
||||
final content = '''
|
||||
class MyClass {
|
||||
/// getter docs
|
||||
String get myGetter => 'GetterVal';
|
||||
/// setter docs
|
||||
set mySetter(String v) {};
|
||||
/// static getter docs
|
||||
static String get myStaticGetter => 'StaticGetterVal';
|
||||
/// static setter docs
|
||||
static set myStaticSetter(String staticV) {};
|
||||
}
|
||||
|
||||
main() {
|
||||
final a = MyClass();
|
||||
print(a.myGetter);
|
||||
a.mySetter = 'a';
|
||||
}
|
||||
''';
|
||||
|
||||
final expected = [
|
||||
_Token('class', SemanticTokenTypes.keyword),
|
||||
_Token('MyClass', SemanticTokenTypes.class_),
|
||||
_Token('/// getter docs', SemanticTokenTypes.comment,
|
||||
[SemanticTokenModifiers.documentation]),
|
||||
_Token('String', SemanticTokenTypes.class_),
|
||||
_Token('get', SemanticTokenTypes.keyword),
|
||||
_Token('myGetter', SemanticTokenTypes.property,
|
||||
[SemanticTokenModifiers.declaration]),
|
||||
_Token("'GetterVal'", SemanticTokenTypes.string),
|
||||
_Token('/// setter docs', SemanticTokenTypes.comment,
|
||||
[SemanticTokenModifiers.documentation]),
|
||||
_Token('set', SemanticTokenTypes.keyword),
|
||||
_Token('mySetter', SemanticTokenTypes.property,
|
||||
[SemanticTokenModifiers.declaration]),
|
||||
_Token('String', SemanticTokenTypes.class_),
|
||||
_Token('v', SemanticTokenTypes.parameter,
|
||||
[SemanticTokenModifiers.declaration]),
|
||||
_Token('/// static getter docs', SemanticTokenTypes.comment,
|
||||
[SemanticTokenModifiers.documentation]),
|
||||
_Token('static', SemanticTokenTypes.keyword),
|
||||
_Token('String', SemanticTokenTypes.class_),
|
||||
_Token('get', SemanticTokenTypes.keyword),
|
||||
_Token('myStaticGetter', SemanticTokenTypes.property,
|
||||
[SemanticTokenModifiers.declaration, SemanticTokenModifiers.static]),
|
||||
_Token("'StaticGetterVal'", SemanticTokenTypes.string),
|
||||
_Token('/// static setter docs', SemanticTokenTypes.comment,
|
||||
[SemanticTokenModifiers.documentation]),
|
||||
_Token('static', SemanticTokenTypes.keyword),
|
||||
_Token('set', SemanticTokenTypes.keyword),
|
||||
_Token('myStaticSetter', SemanticTokenTypes.property,
|
||||
[SemanticTokenModifiers.declaration, SemanticTokenModifiers.static]),
|
||||
_Token('String', SemanticTokenTypes.class_),
|
||||
_Token('staticV', SemanticTokenTypes.parameter,
|
||||
[SemanticTokenModifiers.declaration]),
|
||||
_Token('main', SemanticTokenTypes.function,
|
||||
[SemanticTokenModifiers.declaration, SemanticTokenModifiers.static]),
|
||||
_Token('final', SemanticTokenTypes.keyword),
|
||||
_Token('a', SemanticTokenTypes.variable,
|
||||
[SemanticTokenModifiers.declaration]),
|
||||
_Token('MyClass', SemanticTokenTypes.class_),
|
||||
_Token('print', SemanticTokenTypes.function),
|
||||
_Token('a', SemanticTokenTypes.variable),
|
||||
_Token('myGetter', SemanticTokenTypes.property),
|
||||
_Token('a', SemanticTokenTypes.variable),
|
||||
_Token('mySetter', SemanticTokenTypes.property),
|
||||
_Token("'a'", SemanticTokenTypes.string),
|
||||
];
|
||||
|
||||
await initialize();
|
||||
await openFile(mainFileUri, withoutMarkers(content));
|
||||
|
||||
final tokens = await getSemanticTokens(mainFileUri);
|
||||
final decoded = decodeSemanticTokens(content, tokens);
|
||||
expect(decoded, equals(expected));
|
||||
}
|
||||
|
||||
Future<void> test_class_method() async {
|
||||
final content = '''
|
||||
class MyClass {
|
||||
/// method docs
|
||||
@override
|
||||
void myMethod() {}
|
||||
/// static method docs
|
||||
static void myStaticMethod() {
|
||||
// static method comment
|
||||
}
|
||||
}
|
||||
|
||||
main() {
|
||||
final a = MyClass();
|
||||
a.myMethod();
|
||||
MyClass.myStaticMethod();
|
||||
}
|
||||
''';
|
||||
|
||||
final expected = [
|
||||
_Token('class', SemanticTokenTypes.keyword),
|
||||
_Token('MyClass', SemanticTokenTypes.class_),
|
||||
_Token('/// method docs', SemanticTokenTypes.comment,
|
||||
[SemanticTokenModifiers.documentation]),
|
||||
_Token('@', CustomSemanticTokenTypes.annotation),
|
||||
_Token('override', SemanticTokenTypes.property),
|
||||
_Token('void', SemanticTokenTypes.keyword),
|
||||
_Token('myMethod', SemanticTokenTypes.method,
|
||||
[SemanticTokenModifiers.declaration]),
|
||||
_Token('/// static method docs', SemanticTokenTypes.comment,
|
||||
[SemanticTokenModifiers.documentation]),
|
||||
_Token('static', SemanticTokenTypes.keyword),
|
||||
_Token('void', SemanticTokenTypes.keyword),
|
||||
_Token('myStaticMethod', SemanticTokenTypes.method,
|
||||
[SemanticTokenModifiers.declaration, SemanticTokenModifiers.static]),
|
||||
_Token('// static method comment', SemanticTokenTypes.comment),
|
||||
_Token('main', SemanticTokenTypes.function,
|
||||
[SemanticTokenModifiers.declaration, SemanticTokenModifiers.static]),
|
||||
_Token('final', SemanticTokenTypes.keyword),
|
||||
_Token('a', SemanticTokenTypes.variable,
|
||||
[SemanticTokenModifiers.declaration]),
|
||||
_Token('MyClass', SemanticTokenTypes.class_),
|
||||
_Token('a', SemanticTokenTypes.variable),
|
||||
_Token('myMethod', SemanticTokenTypes.method),
|
||||
_Token('MyClass', SemanticTokenTypes.class_),
|
||||
_Token('myStaticMethod', SemanticTokenTypes.method,
|
||||
[SemanticTokenModifiers.static]),
|
||||
];
|
||||
|
||||
await initialize();
|
||||
await openFile(mainFileUri, withoutMarkers(content));
|
||||
|
||||
final tokens = await getSemanticTokens(mainFileUri);
|
||||
final decoded = decodeSemanticTokens(content, tokens);
|
||||
expect(decoded, equals(expected));
|
||||
}
|
||||
|
||||
Future<void> test_directives() async {
|
||||
final content = '''
|
||||
import 'package:flutter/material.dart';
|
||||
export 'package:flutter/widgets.dart';
|
||||
import '../file.dart';
|
||||
|
||||
library foo;
|
||||
''';
|
||||
|
||||
final expected = [
|
||||
_Token('import', SemanticTokenTypes.keyword),
|
||||
_Token("'package:flutter/material.dart'", SemanticTokenTypes.string),
|
||||
_Token('export', SemanticTokenTypes.keyword),
|
||||
_Token("'package:flutter/widgets.dart'", SemanticTokenTypes.string),
|
||||
_Token('import', SemanticTokenTypes.keyword),
|
||||
_Token("'../file.dart'", SemanticTokenTypes.string),
|
||||
_Token('library', SemanticTokenTypes.keyword),
|
||||
_Token('foo', SemanticTokenTypes.namespace),
|
||||
];
|
||||
|
||||
await initialize();
|
||||
await openFile(mainFileUri, withoutMarkers(content));
|
||||
|
||||
final tokens = await getSemanticTokens(mainFileUri);
|
||||
final decoded = decodeSemanticTokens(content, tokens);
|
||||
expect(decoded, equals(expected));
|
||||
}
|
||||
|
||||
Future<void> test_fromPlugin() async {
|
||||
final pluginAnalyzedFilePath = join(projectFolderPath, 'lib', 'foo.foo');
|
||||
final pluginAnalyzedFileUri = Uri.file(pluginAnalyzedFilePath);
|
||||
final content = 'CLASS STRING VARIABLE';
|
||||
|
||||
final expected = [
|
||||
_Token('CLASS', SemanticTokenTypes.class_),
|
||||
_Token('STRING', SemanticTokenTypes.string),
|
||||
_Token('VARIABLE', SemanticTokenTypes.variable,
|
||||
[SemanticTokenModifiers.declaration]),
|
||||
];
|
||||
|
||||
await initialize();
|
||||
await openFile(pluginAnalyzedFileUri, withoutMarkers(content));
|
||||
|
||||
final pluginResult = plugin.AnalysisHighlightsParams(
|
||||
pluginAnalyzedFilePath,
|
||||
[
|
||||
plugin.HighlightRegion(plugin.HighlightRegionType.CLASS, 0, 5),
|
||||
plugin.HighlightRegion(plugin.HighlightRegionType.LITERAL_STRING, 6, 6),
|
||||
plugin.HighlightRegion(
|
||||
plugin.HighlightRegionType.TOP_LEVEL_VARIABLE_DECLARATION, 13, 8),
|
||||
],
|
||||
);
|
||||
configureTestPlugin(notification: pluginResult.toNotification());
|
||||
|
||||
final tokens = await getSemanticTokens(pluginAnalyzedFileUri);
|
||||
final decoded = decodeSemanticTokens(content, tokens);
|
||||
expect(decoded, equals(expected));
|
||||
}
|
||||
|
||||
Future<void> test_invalidSyntax() async {
|
||||
final content = '''
|
||||
/// class docs
|
||||
class MyClass {
|
||||
// class comment
|
||||
}
|
||||
|
||||
this is not valid code.
|
||||
|
||||
/// class docs 2
|
||||
class MyClass2 {
|
||||
// class comment 2
|
||||
}
|
||||
''';
|
||||
|
||||
// Expect toe correct tokens for the valid code before/after but don't
|
||||
// check the the tokens for the invalid code as thre are no concrete
|
||||
// expectations for them.
|
||||
final expected1 = [
|
||||
_Token('/// class docs', SemanticTokenTypes.comment,
|
||||
[SemanticTokenModifiers.documentation]),
|
||||
_Token('class', SemanticTokenTypes.keyword),
|
||||
_Token('MyClass', SemanticTokenTypes.class_),
|
||||
_Token('// class comment', SemanticTokenTypes.comment),
|
||||
];
|
||||
final expected2 = [
|
||||
_Token('/// class docs 2', SemanticTokenTypes.comment,
|
||||
[SemanticTokenModifiers.documentation]),
|
||||
_Token('class', SemanticTokenTypes.keyword),
|
||||
_Token('MyClass2', SemanticTokenTypes.class_),
|
||||
_Token('// class comment 2', SemanticTokenTypes.comment),
|
||||
];
|
||||
|
||||
await initialize();
|
||||
await openFile(mainFileUri, withoutMarkers(content));
|
||||
|
||||
final tokens = await getSemanticTokens(mainFileUri);
|
||||
final decoded = decodeSemanticTokens(content, tokens);
|
||||
|
||||
// Remove the tokens between the two expected sets.
|
||||
decoded.removeRange(expected1.length, decoded.length - expected2.length);
|
||||
|
||||
expect(decoded, equals([...expected1, ...expected2]));
|
||||
}
|
||||
|
||||
Future<void> test_lastLine_code() async {
|
||||
final content = 'String var;';
|
||||
|
||||
final expected = [
|
||||
_Token('String', SemanticTokenTypes.class_),
|
||||
_Token('var', SemanticTokenTypes.variable,
|
||||
[SemanticTokenModifiers.declaration]),
|
||||
];
|
||||
|
||||
await initialize();
|
||||
await openFile(mainFileUri, withoutMarkers(content));
|
||||
|
||||
final tokens = await getSemanticTokens(mainFileUri);
|
||||
final decoded = decodeSemanticTokens(content, tokens);
|
||||
expect(decoded, equals(expected));
|
||||
}
|
||||
|
||||
Future<void> test_lastLine_comment() async {
|
||||
final content = '// Trailing comment';
|
||||
|
||||
final expected = [
|
||||
_Token('// Trailing comment', SemanticTokenTypes.comment),
|
||||
];
|
||||
|
||||
await initialize();
|
||||
await openFile(mainFileUri, withoutMarkers(content));
|
||||
|
||||
final tokens = await getSemanticTokens(mainFileUri);
|
||||
final decoded = decodeSemanticTokens(content, tokens);
|
||||
expect(decoded, equals(expected));
|
||||
}
|
||||
|
||||
Future<void> test_lastLine_multilineComment() async {
|
||||
final content = '''/**
|
||||
* Trailing comment
|
||||
*/''';
|
||||
|
||||
final expected = [
|
||||
_Token('/**', SemanticTokenTypes.comment,
|
||||
[SemanticTokenModifiers.documentation]),
|
||||
_Token('* Trailing comment', SemanticTokenTypes.comment,
|
||||
[SemanticTokenModifiers.documentation]),
|
||||
_Token('*/', SemanticTokenTypes.comment,
|
||||
[SemanticTokenModifiers.documentation]),
|
||||
];
|
||||
|
||||
await initialize();
|
||||
await openFile(mainFileUri, withoutMarkers(content));
|
||||
|
||||
final tokens = await getSemanticTokens(mainFileUri);
|
||||
final decoded = decodeSemanticTokens(content, tokens);
|
||||
expect(decoded, equals(expected));
|
||||
}
|
||||
|
||||
Future<void> test_multilineRegions() async {
|
||||
final content = '''
|
||||
/**
|
||||
* This is my class comment
|
||||
*
|
||||
* There are
|
||||
* multiple lines
|
||||
*/
|
||||
class MyClass {}
|
||||
''';
|
||||
|
||||
final expected = [
|
||||
_Token('/**', SemanticTokenTypes.comment,
|
||||
[SemanticTokenModifiers.documentation]),
|
||||
_Token('* This is my class comment', SemanticTokenTypes.comment,
|
||||
[SemanticTokenModifiers.documentation]),
|
||||
_Token('*', SemanticTokenTypes.comment,
|
||||
[SemanticTokenModifiers.documentation]),
|
||||
_Token('* There are', SemanticTokenTypes.comment,
|
||||
[SemanticTokenModifiers.documentation]),
|
||||
_Token('* multiple lines', SemanticTokenTypes.comment,
|
||||
[SemanticTokenModifiers.documentation]),
|
||||
_Token('*/', SemanticTokenTypes.comment,
|
||||
[SemanticTokenModifiers.documentation]),
|
||||
_Token('class', SemanticTokenTypes.keyword),
|
||||
_Token('MyClass', SemanticTokenTypes.class_),
|
||||
];
|
||||
|
||||
await initialize();
|
||||
await openFile(mainFileUri, withoutMarkers(content));
|
||||
|
||||
final tokens = await getSemanticTokens(mainFileUri);
|
||||
final decoded = decodeSemanticTokens(content, tokens);
|
||||
expect(decoded, equals(expected));
|
||||
}
|
||||
|
||||
Future<void> test_strings() async {
|
||||
final content = r'''
|
||||
const string1 = 'test';
|
||||
const string2 = '$string1 ${string1.length}';
|
||||
const string3 = r'$string1 ${string1.length}';
|
||||
''';
|
||||
|
||||
final expected = [
|
||||
_Token('const', SemanticTokenTypes.keyword),
|
||||
_Token('string1', SemanticTokenTypes.variable,
|
||||
[SemanticTokenModifiers.declaration]),
|
||||
_Token("'test'", SemanticTokenTypes.string),
|
||||
_Token('const', SemanticTokenTypes.keyword),
|
||||
_Token('string2', SemanticTokenTypes.variable,
|
||||
[SemanticTokenModifiers.declaration]),
|
||||
// TODO(dantup): Shold we expect String tokens for the non-interpolated
|
||||
// parts of interpolated strings? Currently they do not produce highlight
|
||||
// regions so they do not come through.
|
||||
// _Token(r"'$", SemanticTokenTypes.string),
|
||||
_Token('string1', SemanticTokenTypes.property),
|
||||
// _Token(r'${', SemanticTokenTypes.string),
|
||||
_Token('string1', SemanticTokenTypes.property),
|
||||
// _Token('.', SemanticTokenTypes.string),
|
||||
_Token('length', SemanticTokenTypes.property),
|
||||
// _Token("}'", SemanticTokenTypes.string),
|
||||
_Token('const', SemanticTokenTypes.keyword),
|
||||
_Token('string3', SemanticTokenTypes.variable,
|
||||
[SemanticTokenModifiers.declaration]),
|
||||
_Token(r"r'$string1 ${string1.length}'", SemanticTokenTypes.string),
|
||||
];
|
||||
|
||||
await initialize();
|
||||
await openFile(mainFileUri, withoutMarkers(content));
|
||||
|
||||
final tokens = await getSemanticTokens(mainFileUri);
|
||||
final decoded = decodeSemanticTokens(content, tokens);
|
||||
expect(decoded, equals(expected));
|
||||
}
|
||||
|
||||
Future<void> test_topLevel() async {
|
||||
final content = '''
|
||||
/// strings docs
|
||||
const strings = <String>["test", 'test', r'test', \'''test\'''];
|
||||
|
||||
/// func docs
|
||||
func(String a) => print(a);
|
||||
|
||||
/// abc docs
|
||||
bool get abc => true;
|
||||
''';
|
||||
|
||||
final expected = [
|
||||
_Token('/// strings docs', SemanticTokenTypes.comment,
|
||||
[SemanticTokenModifiers.documentation]),
|
||||
_Token('const', SemanticTokenTypes.keyword),
|
||||
_Token('strings', SemanticTokenTypes.variable,
|
||||
[SemanticTokenModifiers.declaration]),
|
||||
_Token('String', SemanticTokenTypes.class_),
|
||||
_Token('"test"', SemanticTokenTypes.string),
|
||||
_Token("'test'", SemanticTokenTypes.string),
|
||||
_Token("r'test'", SemanticTokenTypes.string),
|
||||
_Token("'''test'''", SemanticTokenTypes.string),
|
||||
_Token('/// func docs', SemanticTokenTypes.comment,
|
||||
[SemanticTokenModifiers.documentation]),
|
||||
_Token('func', SemanticTokenTypes.function,
|
||||
[SemanticTokenModifiers.declaration, SemanticTokenModifiers.static]),
|
||||
_Token('String', SemanticTokenTypes.class_),
|
||||
_Token('a', SemanticTokenTypes.parameter,
|
||||
[SemanticTokenModifiers.declaration]),
|
||||
_Token('print', SemanticTokenTypes.function),
|
||||
_Token('a', SemanticTokenTypes.parameter),
|
||||
_Token('/// abc docs', SemanticTokenTypes.comment,
|
||||
[SemanticTokenModifiers.documentation]),
|
||||
_Token('bool', SemanticTokenTypes.class_),
|
||||
_Token('get', SemanticTokenTypes.keyword),
|
||||
_Token('abc', SemanticTokenTypes.property,
|
||||
[SemanticTokenModifiers.declaration]),
|
||||
_Token('true', CustomSemanticTokenTypes.boolean),
|
||||
];
|
||||
|
||||
await initialize();
|
||||
await openFile(mainFileUri, withoutMarkers(content));
|
||||
|
||||
final tokens = await getSemanticTokens(mainFileUri);
|
||||
final decoded = decodeSemanticTokens(content, tokens);
|
||||
expect(decoded, equals(expected));
|
||||
}
|
||||
}
|
||||
|
||||
class _Token {
|
||||
final String content;
|
||||
final SemanticTokenTypes type;
|
||||
final List<SemanticTokenModifiers> modifiers;
|
||||
|
||||
_Token(this.content, this.type, [this.modifiers = const []]);
|
||||
|
||||
@override
|
||||
int get hashCode => content.hashCode;
|
||||
|
||||
@override
|
||||
bool operator ==(Object o) =>
|
||||
o is _Token &&
|
||||
o.content == content &&
|
||||
o.type == type &&
|
||||
listEqual(
|
||||
// Treat nulls the same as empty lists for convenience when comparing.
|
||||
o.modifiers ?? <SemanticTokenModifiers>[],
|
||||
modifiers ?? <SemanticTokenModifiers>[],
|
||||
(SemanticTokenModifiers a, SemanticTokenModifiers b) => a == b);
|
||||
|
||||
@override
|
||||
String toString() => '$content (${[type, ...?modifiers]})';
|
||||
}
|
|
@ -222,6 +222,12 @@ mixin ClientCapabilitiesHelperMixin {
|
|||
'codeAction': {'dynamicRegistration': true},
|
||||
'rename': {'dynamicRegistration': true},
|
||||
'foldingRange': {'dynamicRegistration': true},
|
||||
'semanticTokens': SemanticTokensClientCapabilities(
|
||||
dynamicRegistration: true,
|
||||
requests: SemanticTokensClientCapabilitiesRequests(),
|
||||
formats: [],
|
||||
tokenModifiers: [],
|
||||
tokenTypes: []).toJson(),
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -1014,6 +1020,16 @@ mixin LspAnalysisServerTestMixin implements ClientCapabilitiesHelperMixin {
|
|||
request, _fromJsonList(Location.fromJson));
|
||||
}
|
||||
|
||||
Future<SemanticTokens> getSemanticTokens(Uri uri) {
|
||||
final request = makeRequest(
|
||||
Method.textDocument_semanticTokens_full,
|
||||
SemanticTokensParams(
|
||||
textDocument: TextDocumentIdentifier(uri: uri.toString()),
|
||||
),
|
||||
);
|
||||
return expectSuccessfulResponseTo(request, SemanticTokens.fromJson);
|
||||
}
|
||||
|
||||
Future<SignatureHelp> getSignatureHelp(Uri uri, Position pos,
|
||||
[SignatureHelpContext context]) {
|
||||
final request = makeRequest(
|
||||
|
|
|
@ -34,6 +34,7 @@ import 'priority_files_test.dart' as priority_files;
|
|||
import 'reanalyze_test.dart' as reanalyze;
|
||||
import 'references_test.dart' as references;
|
||||
import 'rename_test.dart' as rename;
|
||||
import 'semantic_tokens_test.dart' as semantic_tokens;
|
||||
import 'server_test.dart' as server;
|
||||
import 'signature_help_test.dart' as signature_help;
|
||||
import 'super_test.dart' as get_super;
|
||||
|
@ -73,6 +74,7 @@ void main() {
|
|||
reanalyze.main();
|
||||
references.main();
|
||||
rename.main();
|
||||
semantic_tokens.main();
|
||||
server.main();
|
||||
signature_help.main();
|
||||
will_rename_files.main();
|
||||
|
|
|
@ -102,6 +102,7 @@ Below is a list of LSP methods and their implementation status.
|
|||
| textDocument/rename | ✅ | ✅ | | ✅ | ✅ |
|
||||
| textDocument/prepareRename | ✅ | ✅ | | ✅ | ✅ |
|
||||
| textDocument/foldingRange | ✅ | ✅ | ✅ | ✅ | ✅ |
|
||||
| textDocument/semanticTokens/full | ✅ | ✅ | ✅ | ✅ | ✅ |
|
||||
|
||||
## Custom Methods and Notifications
|
||||
|
||||
|
|
Loading…
Reference in a new issue