This commit is contained in:
Martin Aeschlimann 2019-12-17 17:07:51 +01:00
parent 2ca09d93aa
commit 8bbe5ce092
4 changed files with 125 additions and 25 deletions

11
.vscode/launch.json vendored
View file

@ -299,5 +299,16 @@
"Run Unit Tests"
]
},
{
"type": "node",
"request": "launch",
"name": "HTML Unit Tests",
"program": "${workspaceFolder}/extensions/html-language-features/server/node_modules/mocha/bin/_mocha",
"stopOnEntry": false,
"cwd": "${workspaceFolder}/extensions/html-language-features/server",
"outFiles": [
"${workspaceFolder}/extensions/html-language-features/server/out/**/*.js"
]
},
]
}

View file

@ -12,30 +12,29 @@ export function getSemanticTokens(jsLanguageService: ts.LanguageService, current
//https://ts-ast-viewer.com/#code/AQ0g2CmAuwGbALzAJwG4BQZQGNwEMBnQ4AQQEYBmYAb2C22zgEtJwATJVTRxgcwD27AQAp8AGmAAjAJS0A9POB8+7NQ168oscAJz5wANXwAnLug2bsJmAFcTAO2XAA1MHyvgu-UdOeWbOw8ViAAvpagocBAA
let resultTokens: SemanticTokenData[] = [];
const tokens = jsLanguageService.getSemanticClassifications(fileName, { start: 0, length: currentTextDocument.getText().length });
for (let token of tokens) {
const typeIdx = tokenFromClassificationMapping[token.classificationType];
if (typeIdx !== undefined) {
resultTokens.push({ offset: token.textSpan.start, length: token.textSpan.length, typeIdx, modifierSet: 0 });
}
}
// const tokens = jsLanguageService.getSemanticClassifications(fileName, { start: 0, length: currentTextDocument.getText().length });
// for (let token of tokens) {
// const typeIdx = tokenFromClassificationMapping[token.classificationType];
// if (typeIdx !== undefined) {
// resultTokens.push({ offset: token.textSpan.start, length: token.textSpan.length, typeIdx, modifierSet: 0 });
// }
// }
const program = jsLanguageService.getProgram();
if (program) {
const typeChecker = program.getTypeChecker();
function visit(node: ts.Node) {
if (node.kind === ts.SyntaxKind.Identifier) {
const symbol = typeChecker.getSymbolAtLocation(node);
if (symbol) {
let typeIdx = tokenFromDeclarationMapping[symbol.valueDeclaration.kind];
let modifierSet = 0;
if (symbol.valueDeclaration === node.parent) {
if (node.parent && (<ts.NamedDeclaration>node.parent).name === node) {
modifierSet = TokenModifier.declaration;
}
if (typeIdx !== undefined) {
resultTokens.push({ offset: node.pos, length: node.end - node.pos, typeIdx, modifierSet });
resultTokens.push({ offset: node.getStart(), length: node.getWidth(), typeIdx, modifierSet });
}
}
}
@ -91,7 +90,7 @@ export function getSemanticTokenLegend() {
}
const tokenTypes: string[] = ['class', 'enum', 'interface', 'namespace', 'parameterType', 'type', 'parameter', 'variable', 'property', 'constant', 'function'];
const tokenTypes: string[] = ['class', 'enum', 'interface', 'namespace', 'parameterType', 'type', 'parameter', 'variable', 'property', 'constant', 'function', 'member'];
const tokenModifiers: string[] = ['declaration',];
enum TokenType {
@ -106,22 +105,22 @@ enum TokenType {
'property' = 8,
'constant' = 9,
'function' = 10,
'member' = 11
}
enum TokenModifier {
'declaration' = 0x01,
'declaration' = 0x01
}
const tokenFromClassificationMapping: { [name: string]: TokenType } = {
[ts.ClassificationTypeNames.className]: TokenType.class,
[ts.ClassificationTypeNames.enumName]: TokenType.enum,
[ts.ClassificationTypeNames.interfaceName]: TokenType.interface,
[ts.ClassificationTypeNames.moduleName]: TokenType.namespace,
[ts.ClassificationTypeNames.typeParameterName]: TokenType.parameterType,
[ts.ClassificationTypeNames.typeAliasName]: TokenType.type,
[ts.ClassificationTypeNames.parameterName]: TokenType.parameter
};
// const tokenFromClassificationMapping: { [name: string]: TokenType } = {
// [ts.ClassificationTypeNames.className]: TokenType.class,
// [ts.ClassificationTypeNames.enumName]: TokenType.enum,
// [ts.ClassificationTypeNames.interfaceName]: TokenType.interface,
// [ts.ClassificationTypeNames.moduleName]: TokenType.namespace,
// [ts.ClassificationTypeNames.typeParameterName]: TokenType.parameterType,
// [ts.ClassificationTypeNames.typeAliasName]: TokenType.type,
// [ts.ClassificationTypeNames.parameterName]: TokenType.parameter
// };
const tokenFromDeclarationMapping: { [name: string]: TokenType } = {
[ts.SyntaxKind.VariableDeclaration]: TokenType.variable,

View file

@ -5,9 +5,8 @@
import 'mocha';
import * as assert from 'assert';
import { TextDocument } from 'vscode-html-languageservice';
import { getFoldingRanges } from '../modes/htmlFolding';
import { getLanguageModes } from '../modes/languageModes';
import { TextDocument, getLanguageModes } from '../modes/languageModes';
import { ClientCapabilities } from 'vscode-css-languageservice';
interface ExpectedIndentRange {
@ -17,7 +16,7 @@ interface ExpectedIndentRange {
}
function assertRanges(lines: string[], expected: ExpectedIndentRange[], message?: string, nRanges?: number): void {
const document = TextDocument.create('test://foo/bar.json', 'json', 1, lines.join('\n'));
const document = TextDocument.create('test://foo/bar.html', 'html', 1, lines.join('\n'));
const workspace = {
settings: {},
folders: [{ name: 'foo', uri: 'test://foo' }]

View file

@ -0,0 +1,91 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import 'mocha';
import * as assert from 'assert';
import { TextDocument, getLanguageModes, ClientCapabilities, Range, Position } from '../modes/languageModes';
interface ExpectedToken {
startLine: number;
character: number;
length: number;
tokenClassifiction: string;
}
function assertTokens(lines: string[], expected: ExpectedToken[], range?: Range, message?: string): void {
const document = TextDocument.create('test://foo/bar.html', 'html', 1, lines.join('\n'));
const workspace = {
settings: {},
folders: [{ name: 'foo', uri: 'test://foo' }]
};
const languageModes = getLanguageModes({ css: true, javascript: true }, workspace, ClientCapabilities.LATEST);
if (!range) {
range = Range.create(Position.create(0, 0), document.positionAt(document.getText().length));
}
const jsMode = languageModes.getMode('javascript')!;
const legend = jsMode.getSemanticTokenLegend!();
const actual = jsMode.getSemanticTokens!(document, [range]);
let actualRanges = [];
let lastLine = 0;
let lastCharacter = 0;
for (let i = 0; i < actual.length; i += 5) {
const lineDelta = actual[i], charDelta = actual[i + 1], len = actual[i + 2], typeIdx = actual[i + 3], modSet = actual[i + 4];
const line = lastLine + lineDelta;
const character = lineDelta === 0 ? lastCharacter + charDelta : charDelta;
const tokenClassifiction = [legend.types[typeIdx], ...legend.modifiers.filter((_, i) => modSet & 1 << i)].join('.');
actualRanges.push(t(line, character, len, tokenClassifiction));
lastLine = line;
lastCharacter = character;
}
assert.deepEqual(actualRanges, expected, message);
}
function t(startLine: number, character: number, length: number, tokenClassifiction: string): ExpectedToken {
return { startLine, character, length, tokenClassifiction };
}
suite('JavaScript Semantic Tokens', () => {
test('variables', () => {
const input = [
/*0*/'<html>',
/*1*/'<head>',
/*2*/'<script>',
/*3*/' var x = 9, y1 = x;',
/*4*/' throw y1;',
/*5*/'</script>',
/*6*/'</head>',
/*7*/'</html>',
];
assertTokens(input, [
t(3, 6, 1, 'variable.declaration'), t(3, 13, 2, 'variable.declaration'), t(3, 18, 1, 'variable'),
t(4, 8, 2, 'variable')
]);
});
test('function', () => {
const input = [
/*0*/'<html>',
/*1*/'<head>',
/*2*/'<script>',
/*3*/' function foo(p1) {',
/*4*/' return foo(Math.abs(p1))',
/*5*/' }',
/*6*/'</script>',
/*7*/'</head>',
/*8*/'</html>',
];
assertTokens(input, [
t(3, 11, 3, 'function.declaration'), t(3, 15, 2, 'parameter.declaration'),
t(4, 11, 3, 'function'), t(4, 15, 3, 'namespace'), t(4, 20, 3, 'member'), t(4, 24, 2, 'parameter')
]);
});
});