Remove areas from the semantic tokens API

This commit is contained in:
Alex Dima 2019-12-02 10:31:54 +01:00
parent 9995919f55
commit d6dae16f40
No known key found for this signature in database
GPG key ID: 6E58D7B045760DA0
12 changed files with 544 additions and 986 deletions

View file

@ -10,34 +10,19 @@ export function activate(context: vscode.ExtensionContext): any {
const tokenModifiers = ['static', 'abstract', 'deprecated'];
const tokenTypes = ['strings', 'types', 'structs', 'classes', 'functions', 'variables'];
const legend = new vscode.SemanticColoringLegend(tokenTypes, tokenModifiers);
const legend = new vscode.SemanticTokensLegend(tokenTypes, tokenModifiers);
/*
* A certain token (at index `i` is encoded using 5 uint32 integers):
* - at index `5*i` - `deltaLine`: token line number, relative to `SemanticColoringArea.line`
* - at index `5*i+1` - `startCharacter`: token start character offset inside the line (inclusive)
* - at index `5*i+2` - `endCharacter`: token end character offset inside the line (exclusive)
* - at index `5*i+3` - `tokenType`: will be looked up in `SemanticColoringLegend.tokenTypes`
* - at index `5*i+4` - `tokenModifiers`: each set bit will be looked up in `SemanticColoringLegend.tokenModifiers`
*/
const semanticHighlightProvider: vscode.SemanticColoringProvider = {
provideSemanticColoring(document: vscode.TextDocument): vscode.ProviderResult<vscode.SemanticColoring> {
const result: number[] = [];
const semanticHighlightProvider: vscode.SemanticTokensProvider = {
provideSemanticTokens(document: vscode.TextDocument): vscode.ProviderResult<vscode.SemanticTokens> {
const builder = new vscode.SemanticTokensBuilder();
const visitor: jsoncParser.JSONVisitor = {
onObjectProperty: (property: string, _offset: number, length: number, startLine: number, startCharacter: number) => {
result.push(startLine);
result.push(startCharacter);
result.push(startCharacter + length);
const [type, ...modifiers] = property.split('.');
let tokenType = legend.tokenTypes.indexOf(type);
if (tokenType === -1) {
tokenType = 0;
}
result.push(tokenType);
let tokenModifiers = 0;
for (let i = 0; i < modifiers.length; i++) {
@ -46,15 +31,17 @@ export function activate(context: vscode.ExtensionContext): any {
tokenModifiers = tokenModifiers | 1 << index;
}
}
result.push(tokenModifiers);
builder.push(startLine, startCharacter, length, tokenType, tokenModifiers);
}
};
jsoncParser.visit(document.getText(), visitor);
return new vscode.SemanticColoring([new vscode.SemanticColoringArea(0, new Uint32Array(result))]);
return new vscode.SemanticTokens(builder.build());
}
};
context.subscriptions.push(vscode.languages.registerSemanticColoringProvider({ pattern: '**/color-test.json' }, semanticHighlightProvider, legend));
context.subscriptions.push(vscode.languages.registerSemanticTokensProvider({ pattern: '**/color-test.json' }, semanticHighlightProvider, legend));
}

View file

@ -1465,31 +1465,31 @@ export interface CodeLensProvider {
resolveCodeLens?(model: model.ITextModel, codeLens: CodeLens, token: CancellationToken): ProviderResult<CodeLens>;
}
export interface SemanticColoringLegend {
export interface SemanticTokensLegend {
readonly tokenTypes: string[];
readonly tokenModifiers: string[];
}
export interface SemanticColoringArea {
/**
* The zero-based line value where this token block begins.
*/
readonly line: number;
/**
* The actual token block encoded data.
*/
export interface SemanticTokens {
readonly resultId?: string;
readonly data: Uint32Array;
}
export interface SemanticColoring {
readonly areas: SemanticColoringArea[];
dispose(): void;
export interface SemanticTokensEdit {
readonly start: number;
readonly deleteCount: number;
readonly data?: Uint32Array;
}
export interface SemanticColoringProvider {
getLegend(): SemanticColoringLegend;
provideSemanticColoring(model: model.ITextModel, token: CancellationToken): ProviderResult<SemanticColoring>;
export interface SemanticTokensEdits {
readonly resultId?: string;
readonly edits: SemanticTokensEdit[];
}
export interface SemanticTokensProvider {
getLegend(): SemanticTokensLegend;
provideSemanticTokens(model: model.ITextModel, lastResultId: string | null, ranges: Range[] | null, token: CancellationToken): ProviderResult<SemanticTokens | SemanticTokensEdits>;
releaseSemanticTokens(resultId: string | undefined): void;
}
// --- feature registries ------
@ -1597,7 +1597,7 @@ export const FoldingRangeProviderRegistry = new LanguageFeatureRegistry<FoldingR
/**
* @internal
*/
export const SemanticColoringProviderRegistry = new LanguageFeatureRegistry<SemanticColoringProvider>();
export const SemanticTokensProviderRegistry = new LanguageFeatureRegistry<SemanticTokensProvider>();
/**
* @internal

View file

@ -14,7 +14,7 @@ import { Range } from 'vs/editor/common/core/range';
import { DefaultEndOfLine, EndOfLinePreference, EndOfLineSequence, IIdentifiedSingleEditOperation, ITextBuffer, ITextBufferFactory, ITextModel, ITextModelCreationOptions } from 'vs/editor/common/model';
import { TextModel, createTextBuffer } from 'vs/editor/common/model/textModel';
import { IModelLanguageChangedEvent, IModelContentChangedEvent } from 'vs/editor/common/model/textModelEvents';
import { LanguageIdentifier, SemanticColoringProviderRegistry, SemanticColoringProvider, SemanticColoring, SemanticColoringLegend } from 'vs/editor/common/modes';
import { LanguageIdentifier, SemanticTokensProviderRegistry, SemanticTokensProvider, SemanticTokensLegend, SemanticTokens, SemanticTokensEdits } from 'vs/editor/common/modes';
import { PLAINTEXT_LANGUAGE_IDENTIFIER } from 'vs/editor/common/modes/modesRegistry';
import { ILanguageSelection } from 'vs/editor/common/services/modeService';
import { IModelService } from 'vs/editor/common/services/modelService';
@ -459,22 +459,22 @@ class SemanticColoringFeature extends Disposable {
class SemanticStyling extends Disposable {
private _caches: WeakMap<SemanticColoringProvider, SemanticColoringProviderStyling>;
private _caches: WeakMap<SemanticTokensProvider, SemanticColoringProviderStyling>;
constructor(
private readonly _themeService: IThemeService
) {
super();
this._caches = new WeakMap<SemanticColoringProvider, SemanticColoringProviderStyling>();
this._caches = new WeakMap<SemanticTokensProvider, SemanticColoringProviderStyling>();
if (this._themeService) {
// workaround for tests which use undefined... :/
this._register(this._themeService.onThemeChange(() => {
this._caches = new WeakMap<SemanticColoringProvider, SemanticColoringProviderStyling>();
this._caches = new WeakMap<SemanticTokensProvider, SemanticColoringProviderStyling>();
}));
}
}
public get(provider: SemanticColoringProvider): SemanticColoringProviderStyling {
public get(provider: SemanticTokensProvider): SemanticColoringProviderStyling {
if (!this._caches.has(provider)) {
this._caches.set(provider, new SemanticColoringProviderStyling(provider.getLegend(), this._themeService));
}
@ -580,7 +580,7 @@ class SemanticColoringProviderStyling {
private readonly _hashTable: HashTable;
constructor(
private readonly _legend: SemanticColoringLegend,
private readonly _legend: SemanticTokensLegend,
private readonly _themeService: IThemeService
) {
this._hashTable = new HashTable();
@ -611,13 +611,39 @@ class SemanticColoringProviderStyling {
}
}
const enum SemanticColoringConstants {
/**
* Let's aim at having 8KB buffers if possible...
* So that would be 8192 / (5 * 4) = 409.6 tokens per area
*/
DesiredTokensPerArea = 400,
/**
* Try to keep the total number of areas under 1024 if possible,
* simply compensate by having more tokens per area...
*/
DesiredMaxAreas = 1024,
}
class SemanticTokensResponse {
constructor(
private readonly _provider: SemanticTokensProvider,
public readonly resultId: string | undefined,
public readonly data: Uint32Array
) { }
public dispose(): void {
this._provider.releaseSemanticTokens(this.resultId);
}
}
class ModelSemanticColoring extends Disposable {
private _isDisposed: boolean;
private readonly _model: ITextModel;
private readonly _semanticStyling: SemanticStyling;
private readonly _fetchSemanticTokens: RunOnceScheduler;
private _currentResponse: SemanticColoring | null;
private _currentResponse: SemanticTokensResponse | null;
private _currentRequestCancellationTokenSource: CancellationTokenSource | null;
constructor(model: ITextModel, themeService: IThemeService, stylingProvider: SemanticStyling) {
@ -631,12 +657,12 @@ class ModelSemanticColoring extends Disposable {
this._currentRequestCancellationTokenSource = null;
this._register(this._model.onDidChangeContent(e => this._fetchSemanticTokens.schedule()));
this._register(SemanticColoringProviderRegistry.onDidChange(e => this._fetchSemanticTokens.schedule()));
this._register(SemanticTokensProviderRegistry.onDidChange(e => this._fetchSemanticTokens.schedule()));
if (themeService) {
// workaround for tests which use undefined... :/
this._register(themeService.onThemeChange(_ => {
// clear out existing tokens
this._setSemanticTokens(null, null, []);
this._setSemanticTokens(null, null, null, []);
this._fetchSemanticTokens.schedule();
}));
}
@ -673,90 +699,206 @@ class ModelSemanticColoring extends Disposable {
});
const styling = this._semanticStyling.get(provider);
const request = Promise.resolve(provider.provideSemanticColoring(this._model, this._currentRequestCancellationTokenSource.token));
const lastResultId = this._currentResponse ? this._currentResponse.resultId || null : null;
const request = Promise.resolve(provider.provideSemanticTokens(this._model, lastResultId, null, this._currentRequestCancellationTokenSource.token));
request.then((res) => {
this._currentRequestCancellationTokenSource = null;
contentChangeListener.dispose();
this._setSemanticTokens(res || null, styling, pendingChanges);
this._setSemanticTokens(provider, res || null, styling, pendingChanges);
}, (err) => {
errors.onUnexpectedError(err);
this._currentRequestCancellationTokenSource = null;
contentChangeListener.dispose();
this._setSemanticTokens(null, styling, pendingChanges);
this._setSemanticTokens(provider, null, styling, pendingChanges);
});
}
private _setSemanticTokens(tokens: SemanticColoring | null, styling: SemanticColoringProviderStyling | null, pendingChanges: IModelContentChangedEvent[]): void {
private static _isSemanticTokens(v: SemanticTokens | SemanticTokensEdits): v is SemanticTokens {
return v && !!((<SemanticTokens>v).data);
}
private static _isSemanticTokensEdits(v: SemanticTokens | SemanticTokensEdits): v is SemanticTokensEdits {
return v && Array.isArray((<SemanticTokensEdits>v).edits);
}
private static _copy(src: Uint32Array, srcOffset: number, dest: Uint32Array, destOffset: number, length: number): void {
for (let i = 0; i < length; i++) {
dest[destOffset + i] = src[srcOffset + i];
}
}
private _setSemanticTokens(provider: SemanticTokensProvider | null, tokens: SemanticTokens | SemanticTokensEdits | null, styling: SemanticColoringProviderStyling | null, pendingChanges: IModelContentChangedEvent[]): void {
const currentResponse = this._currentResponse;
if (this._currentResponse) {
this._currentResponse.dispose();
this._currentResponse = null;
}
if (this._isDisposed) {
// disposed!
if (tokens) {
tokens.dispose();
if (provider && tokens) {
provider.releaseSemanticTokens(tokens.resultId);
}
return;
}
this._currentResponse = tokens;
if (!this._currentResponse || !styling) {
if (!provider || !tokens || !styling) {
this._model.setSemanticTokens(null);
return;
}
const result: MultilineTokens2[] = [];
for (const area of this._currentResponse.areas) {
const srcTokens = area.data;
const tokenCount = srcTokens.length / 5;
let destTokens = new Uint32Array(4 * tokenCount);
let destOffset = 0;
for (let i = 0; i < tokenCount; i++) {
const srcOffset = 5 * i;
const deltaLine = srcTokens[srcOffset];
const startCharacter = srcTokens[srcOffset + 1];
const endCharacter = srcTokens[srcOffset + 2];
const tokenTypeIndex = srcTokens[srcOffset + 3];
const tokenModifierSet = srcTokens[srcOffset + 4];
const metadata = styling.getMetadata(tokenTypeIndex, tokenModifierSet);
if (metadata !== Constants.NO_STYLING) {
destTokens[destOffset] = deltaLine;
destTokens[destOffset + 1] = startCharacter;
destTokens[destOffset + 2] = endCharacter;
destTokens[destOffset + 3] = metadata;
destOffset += 4;
if (ModelSemanticColoring._isSemanticTokensEdits(tokens)) {
if (!currentResponse) {
// not possible!
this._model.setSemanticTokens(null);
return;
}
if (tokens.edits.length === 0) {
// nothing to do!
tokens = {
resultId: tokens.resultId,
data: currentResponse.data
};
} else {
let deltaLength = 0;
for (const edit of tokens.edits) {
deltaLength += (edit.data ? edit.data.length : 0) - edit.deleteCount;
}
}
if (destOffset !== destTokens.length) {
destTokens = destTokens.subarray(0, destOffset);
const srcData = currentResponse.data;
const destData = new Uint32Array(srcData.length + deltaLength);
let srcLastStart = srcData.length;
let destLastStart = destData.length;
for (let i = tokens.edits.length - 1; i >= 0; i--) {
const edit = tokens.edits[i];
const copyCount = srcLastStart - (edit.start + edit.deleteCount);
if (copyCount > 0) {
ModelSemanticColoring._copy(srcData, srcLastStart - copyCount, destData, destLastStart - copyCount, copyCount);
destLastStart -= copyCount;
}
if (edit.data) {
ModelSemanticColoring._copy(edit.data, 0, destData, destLastStart - edit.data.length, edit.data.length);
destLastStart -= edit.data.length;
}
srcLastStart = edit.start;
}
if (srcLastStart > 0) {
ModelSemanticColoring._copy(srcData, 0, destData, 0, srcLastStart);
}
tokens = {
resultId: tokens.resultId,
data: destData
};
}
const tokens = new MultilineTokens2(area.line, new SparseEncodedTokens(destTokens));
result.push(tokens);
}
// Adjust incoming semantic tokens
if (pendingChanges.length > 0) {
// More changes occurred while the request was running
// We need to:
// 1. Adjust incoming semantic tokens
// 2. Request them again
for (const change of pendingChanges) {
for (const area of result) {
for (const singleChange of change.changes) {
area.applyEdit(singleChange.range, singleChange.text);
if (ModelSemanticColoring._isSemanticTokens(tokens)) {
this._currentResponse = new SemanticTokensResponse(provider, tokens.resultId, tokens.data);
const srcData = tokens.data;
const tokenCount = (tokens.data.length / 5) | 0;
const tokensPerArea = Math.max(Math.ceil(tokenCount / SemanticColoringConstants.DesiredMaxAreas), SemanticColoringConstants.DesiredTokensPerArea);
const result: MultilineTokens2[] = [];
let tokenIndex = 0;
let lastLineNumber = 1;
let lastStartCharacter = 0;
while (tokenIndex < tokenCount) {
const tokenStartIndex = tokenIndex;
let tokenEndIndex = Math.min(tokenStartIndex + tokensPerArea, tokenCount);
// Keep tokens on the same line in the same area...
if (tokenEndIndex < tokenCount) {
let smallTokenEndIndex = tokenEndIndex;
while (smallTokenEndIndex - 1 > tokenStartIndex && srcData[5 * smallTokenEndIndex] === 0) {
smallTokenEndIndex--;
}
if (smallTokenEndIndex - 1 === tokenStartIndex) {
// there are so many tokens on this line that our area would be empty, we must now go right
let bigTokenEndIndex = tokenEndIndex;
while (bigTokenEndIndex + 1 < tokenCount && srcData[5 * bigTokenEndIndex] === 0) {
bigTokenEndIndex++;
}
tokenEndIndex = bigTokenEndIndex;
} else {
tokenEndIndex = smallTokenEndIndex;
}
}
let destData = new Uint32Array((tokenEndIndex - tokenStartIndex) * 4);
let destOffset = 0;
let areaLine = 0;
while (tokenIndex < tokenEndIndex) {
const srcOffset = 5 * tokenIndex;
const deltaLine = srcData[srcOffset];
const deltaCharacter = srcData[srcOffset + 1];
const lineNumber = lastLineNumber + deltaLine;
const startCharacter = (deltaLine === 0 ? lastStartCharacter + deltaCharacter : deltaCharacter);
const length = srcData[srcOffset + 2];
const tokenTypeIndex = srcData[srcOffset + 3];
const tokenModifierSet = srcData[srcOffset + 4];
const metadata = styling.getMetadata(tokenTypeIndex, tokenModifierSet);
if (metadata !== Constants.NO_STYLING) {
if (areaLine === 0) {
areaLine = lineNumber;
}
destData[destOffset] = lineNumber - areaLine;
destData[destOffset + 1] = startCharacter;
destData[destOffset + 2] = startCharacter + length;
destData[destOffset + 3] = metadata;
destOffset += 4;
}
lastLineNumber = lineNumber;
lastStartCharacter = startCharacter;
tokenIndex++;
}
if (destOffset !== destData.length) {
destData = destData.subarray(0, destOffset);
}
const tokens = new MultilineTokens2(areaLine, new SparseEncodedTokens(destData));
result.push(tokens);
}
this._fetchSemanticTokens.schedule();
// Adjust incoming semantic tokens
if (pendingChanges.length > 0) {
// More changes occurred while the request was running
// We need to:
// 1. Adjust incoming semantic tokens
// 2. Request them again
for (const change of pendingChanges) {
for (const area of result) {
for (const singleChange of change.changes) {
area.applyEdit(singleChange.range, singleChange.text);
}
}
}
this._fetchSemanticTokens.schedule();
}
this._model.setSemanticTokens(result);
return;
}
this._model.setSemanticTokens(result);
this._model.setSemanticTokens(null);
}
private _getSemanticColoringProvider(): SemanticColoringProvider | null {
const result = SemanticColoringProviderRegistry.ordered(this._model);
private _getSemanticColoringProvider(): SemanticTokensProvider | null {
const result = SemanticTokensProviderRegistry.ordered(this._model);
return (result.length > 0 ? result[0] : null);
}
}

31
src/vs/monaco.d.ts vendored
View file

@ -5575,30 +5575,31 @@ declare namespace monaco.languages {
resolveCodeLens?(model: editor.ITextModel, codeLens: CodeLens, token: CancellationToken): ProviderResult<CodeLens>;
}
export interface SemanticColoringLegend {
export interface SemanticTokensLegend {
readonly tokenTypes: string[];
readonly tokenModifiers: string[];
}
export interface SemanticColoringArea {
/**
* The zero-based line value where this token block begins.
*/
readonly line: number;
/**
* The actual token block encoded data.
*/
export interface SemanticTokens {
readonly resultId?: string;
readonly data: Uint32Array;
}
export interface SemanticColoring {
readonly areas: SemanticColoringArea[];
dispose(): void;
export interface SemanticTokensEdit {
readonly start: number;
readonly deleteCount: number;
readonly data?: Uint32Array;
}
export interface SemanticColoringProvider {
getLegend(): SemanticColoringLegend;
provideSemanticColoring(model: editor.ITextModel, token: CancellationToken): ProviderResult<SemanticColoring>;
export interface SemanticTokensEdits {
readonly resultId?: string;
readonly edits: SemanticTokensEdit[];
}
export interface SemanticTokensProvider {
getLegend(): SemanticTokensLegend;
provideSemanticTokens(model: editor.ITextModel, lastResultId: string | null, ranges: Range[] | null, token: CancellationToken): ProviderResult<SemanticTokens | SemanticTokensEdits>;
releaseSemanticTokens(resultId: string | undefined): void;
}
export interface ILanguageExtensionPoint {

View file

@ -68,64 +68,77 @@ declare module 'vscode' {
//#endregion
//#region Alex - semantic coloring
//#region Alex - semantic tokens
export class SemanticColoringLegend {
export class SemanticTokensLegend {
public readonly tokenTypes: string[];
public readonly tokenModifiers: string[];
constructor(tokenTypes: string[], tokenModifiers: string[]);
}
export class SemanticColoringArea {
/**
* The zero-based line value where this token block begins.
*/
public readonly line: number;
/**
* The actual token block encoded data.
* A certain token (at index `i` is encoded using 5 uint32 integers):
* - at index `5*i` - `deltaLine`: token line number, relative to `SemanticColoringArea.line`
* - at index `5*i+1` - `startCharacter`: token start character offset inside the line (inclusive)
* - at index `5*i+2` - `endCharacter`: token end character offset inside the line (exclusive)
* - at index `5*i+3` - `tokenType`: will be looked up in `SemanticColoringLegend.tokenTypes`
* - at index `5*i+4` - `tokenModifiers`: each set bit will be looked up in `SemanticColoringLegend.tokenModifiers`
*/
public readonly data: Uint32Array;
constructor(line: number, data: Uint32Array);
}
export class SemanticColoring {
public readonly areas: SemanticColoringArea[];
constructor(areas: SemanticColoringArea[]);
export class SemanticTokensBuilder {
constructor();
push(line: number, char: number, length: number, tokenType: number, tokenModifiers: number): void;
build(): Uint32Array;
}
/**
* The semantic coloring provider interface defines the contract between extensions and
* semantic coloring.
*
*
*/
export interface SemanticColoringProvider {
* A certain token (at index `i` is encoded using 5 uint32 integers):
* - at index `5*i` - `deltaLine`: token line number, relative to `SemanticColoringArea.line`
* - at index `5*i+1` - `deltaStart`: token start character offset inside the line (relative to 0 or the previous token if they are on the same line)
* - at index `5*i+2` - `length`: the length of the token
* - at index `5*i+3` - `tokenType`: will be looked up in `SemanticColoringLegend.tokenTypes`
* - at index `5*i+4` - `tokenModifiers`: each set bit will be looked up in `SemanticColoringLegend.tokenModifiers`
*/
export class SemanticTokens {
readonly resultId?: string;
readonly data: Uint32Array;
provideSemanticColoring(document: TextDocument, token: CancellationToken): ProviderResult<SemanticColoring>;
constructor(data: Uint32Array, resultId?: string);
}
export class SemanticTokensEdits {
readonly resultId?: string;
readonly edits: SemanticTokensEdit[];
constructor(edits: SemanticTokensEdit[], resultId?: string);
}
export class SemanticTokensEdit {
readonly start: number;
readonly deleteCount: number;
readonly data?: Uint32Array;
constructor(start: number, deleteCount: number, data?: Uint32Array);
}
export interface SemanticTokensRequestOptions {
readonly ranges?: readonly Range[];
readonly previousResultId?: string;
}
/**
* The semantic tokens provider interface defines the contract between extensions and
* semantic tokens.
*/
export interface SemanticTokensProvider {
provideSemanticTokens(document: TextDocument, options: SemanticTokensRequestOptions, token: CancellationToken): ProviderResult<SemanticTokens | SemanticTokensEdits>;
}
export namespace languages {
/**
* Register a semantic coloring provider.
* Register a semantic tokens provider.
*
* Multiple providers can be registered for a language. In that case providers are sorted
* by their [score](#languages.match) and the best-matching provider is used. Failure
* of the selected provider will cause a failure of the whole operation.
*
* @param selector A selector that defines the documents this provider is applicable to.
* @param provider A semantic coloring provider.
* @param provider A semantic tokens provider.
* @return A [disposable](#Disposable) that unregisters this provider when being disposed.
*/
export function registerSemanticColoringProvider(selector: DocumentSelector, provider: SemanticColoringProvider, legend: SemanticColoringLegend): Disposable;
export function registerSemanticTokensProvider(selector: DocumentSelector, provider: SemanticTokensProvider, legend: SemanticTokensLegend): Disposable;
}
//#endregion

View file

@ -21,7 +21,7 @@ import { Selection } from 'vs/editor/common/core/selection';
import { ExtensionIdentifier } from 'vs/platform/extensions/common/extensions';
import * as callh from 'vs/workbench/contrib/callHierarchy/browser/callHierarchy';
import { mixin } from 'vs/base/common/objects';
import { decodeSemanticTokensDto, ISemanticTokensDto } from 'vs/workbench/api/common/shared/semanticTokens';
import { decodeSemanticTokensDto } from 'vs/workbench/api/common/shared/semanticTokens';
@extHostNamedCustomer(MainContext.MainThreadLanguageFeatures)
export class MainThreadLanguageFeatures implements MainThreadLanguageFeaturesShape {
@ -325,10 +325,10 @@ export class MainThreadLanguageFeatures implements MainThreadLanguageFeaturesSha
}));
}
// --- semantic coloring
// --- semantic tokens
$registerSemanticColoringProvider(handle: number, selector: IDocumentFilterDto[], legend: modes.SemanticColoringLegend): void {
this._registrations.set(handle, modes.SemanticColoringProviderRegistry.register(selector, new MainThreadSemanticColoringProvider(this._proxy, handle, legend)));
$registerSemanticTokensProvider(handle: number, selector: IDocumentFilterDto[], legend: modes.SemanticTokensLegend): void {
this._registrations.set(handle, modes.SemanticTokensProviderRegistry.register(selector, new MainThreadSemanticTokensProvider(this._proxy, handle, legend)));
}
// --- suggest
@ -602,47 +602,28 @@ export class MainThreadLanguageFeatures implements MainThreadLanguageFeaturesSha
}
class MainThreadSemanticColoringCacheEntry implements modes.SemanticColoring {
constructor(
private readonly _parent: MainThreadSemanticColoringProvider,
public readonly uri: URI,
public readonly id: number,
public readonly areas: modes.SemanticColoringArea[],
) {
}
dispose(): void {
this._parent.release(this);
}
}
export class MainThreadSemanticColoringProvider implements modes.SemanticColoringProvider {
private readonly _cache = new Map<string, MainThreadSemanticColoringCacheEntry>();
export class MainThreadSemanticTokensProvider implements modes.SemanticTokensProvider {
constructor(
private readonly _proxy: ExtHostLanguageFeaturesShape,
private readonly _handle: number,
private readonly _legend: modes.SemanticColoringLegend,
private readonly _legend: modes.SemanticTokensLegend,
) {
}
release(entry: MainThreadSemanticColoringCacheEntry): void {
const currentCacheEntry = this._cache.get(entry.uri.toString()) || null;
if (currentCacheEntry && currentCacheEntry.id === entry.id) {
this._cache.delete(entry.uri.toString());
public releaseSemanticTokens(resultId: string | undefined): void {
if (resultId) {
this._proxy.$releaseSemanticTokens(this._handle, parseInt(resultId, 10));
}
this._proxy.$releaseSemanticColoring(this._handle, entry.id);
}
getLegend(): modes.SemanticColoringLegend {
public getLegend(): modes.SemanticTokensLegend {
return this._legend;
}
async provideSemanticColoring(model: ITextModel, token: CancellationToken): Promise<modes.SemanticColoring | null> {
const lastResult = this._cache.get(model.uri.toString()) || null;
const encodedDto = await this._proxy.$provideSemanticColoring(this._handle, model.uri, lastResult ? lastResult.id : 0, token);
async provideSemanticTokens(model: ITextModel, lastResultId: string | null, ranges: EditorRange[] | null, token: CancellationToken): Promise<modes.SemanticTokens | modes.SemanticTokensEdits | null> {
const nLastResultId = lastResultId ? parseInt(lastResultId, 10) : 0;
const encodedDto = await this._proxy.$provideSemanticTokens(this._handle, model.uri, ranges, nLastResultId, token);
if (!encodedDto) {
return null;
}
@ -650,27 +631,15 @@ export class MainThreadSemanticColoringProvider implements modes.SemanticColorin
return null;
}
const dto = decodeSemanticTokensDto(encodedDto);
const res = this._resolveDeltas(model, lastResult, dto);
this._cache.set(model.uri.toString(), res);
return res;
}
private _resolveDeltas(model: ITextModel, lastResult: MainThreadSemanticColoringCacheEntry | null, dto: ISemanticTokensDto): MainThreadSemanticColoringCacheEntry {
let areas: modes.SemanticColoringArea[] = [];
for (let i = 0, len = dto.areas.length; i < len; i++) {
const areaDto = dto.areas[i];
if (areaDto.type === 'full') {
areas[i] = {
line: areaDto.line,
data: areaDto.data
};
} else {
areas[i] = {
line: areaDto.line,
data: lastResult!.areas[areaDto.oldIndex].data
};
}
if (dto.type === 'full') {
return {
resultId: String(dto.id),
data: dto.data
};
}
return new MainThreadSemanticColoringCacheEntry(this, model.uri, dto.id, areas);
return {
resultId: String(dto.id),
edits: dto.deltas
};
}
}

View file

@ -351,9 +351,9 @@ export function createApiFactoryAndRegisterActors(accessor: ServicesAccessor): I
registerOnTypeFormattingEditProvider(selector: vscode.DocumentSelector, provider: vscode.OnTypeFormattingEditProvider, firstTriggerCharacter: string, ...moreTriggerCharacters: string[]): vscode.Disposable {
return extHostLanguageFeatures.registerOnTypeFormattingEditProvider(extension, checkSelector(selector), provider, [firstTriggerCharacter].concat(moreTriggerCharacters));
},
registerSemanticColoringProvider(selector: vscode.DocumentSelector, provider: vscode.SemanticColoringProvider, legend: vscode.SemanticColoringLegend): vscode.Disposable {
registerSemanticTokensProvider(selector: vscode.DocumentSelector, provider: vscode.SemanticTokensProvider, legend: vscode.SemanticTokensLegend): vscode.Disposable {
checkProposedApiEnabled(extension);
return extHostLanguageFeatures.registerSemanticColoringProvider(extension, checkSelector(selector), provider, legend);
return extHostLanguageFeatures.registerSemanticTokensProvider(extension, checkSelector(selector), provider, legend);
},
registerSignatureHelpProvider(selector: vscode.DocumentSelector, provider: vscode.SignatureHelpProvider, firstItem?: string | vscode.SignatureHelpProviderMetadata, ...remaining: string[]): vscode.Disposable {
if (typeof firstItem === 'object') {
@ -893,9 +893,11 @@ export function createApiFactoryAndRegisterActors(accessor: ServicesAccessor): I
RelativePattern: extHostTypes.RelativePattern,
ResolvedAuthority: extHostTypes.ResolvedAuthority,
RemoteAuthorityResolverError: extHostTypes.RemoteAuthorityResolverError,
SemanticColoring: extHostTypes.SemanticColoring,
SemanticColoringArea: extHostTypes.SemanticColoringArea,
SemanticColoringLegend: extHostTypes.SemanticColoringLegend,
SemanticTokensLegend: extHostTypes.SemanticTokensLegend,
SemanticTokensBuilder: extHostTypes.SemanticTokensBuilder,
SemanticTokens: extHostTypes.SemanticTokens,
SemanticTokensEdits: extHostTypes.SemanticTokensEdits,
SemanticTokensEdit: extHostTypes.SemanticTokensEdit,
Selection: extHostTypes.Selection,
SelectionRange: extHostTypes.SelectionRange,
ShellExecution: extHostTypes.ShellExecution,

View file

@ -354,7 +354,7 @@ export interface MainThreadLanguageFeaturesShape extends IDisposable {
$registerOnTypeFormattingSupport(handle: number, selector: IDocumentFilterDto[], autoFormatTriggerCharacters: string[], extensionId: ExtensionIdentifier): void;
$registerNavigateTypeSupport(handle: number): void;
$registerRenameSupport(handle: number, selector: IDocumentFilterDto[], supportsResolveInitialValues: boolean): void;
$registerSemanticColoringProvider(handle: number, selector: IDocumentFilterDto[], legend: modes.SemanticColoringLegend): void;
$registerSemanticTokensProvider(handle: number, selector: IDocumentFilterDto[], legend: modes.SemanticTokensLegend): void;
$registerSuggestSupport(handle: number, selector: IDocumentFilterDto[], triggerCharacters: string[], supportsResolveDetails: boolean, extensionId: ExtensionIdentifier): void;
$registerSignatureHelpProvider(handle: number, selector: IDocumentFilterDto[], metadata: ISignatureHelpProviderMetadataDto): void;
$registerDocumentLinkProvider(handle: number, selector: IDocumentFilterDto[], supportsResolve: boolean): void;
@ -1167,8 +1167,8 @@ export interface ExtHostLanguageFeaturesShape {
$releaseWorkspaceSymbols(handle: number, id: number): void;
$provideRenameEdits(handle: number, resource: UriComponents, position: IPosition, newName: string, token: CancellationToken): Promise<IWorkspaceEditDto | undefined>;
$resolveRenameLocation(handle: number, resource: UriComponents, position: IPosition, token: CancellationToken): Promise<modes.RenameLocation | undefined>;
$provideSemanticColoring(handle: number, resource: UriComponents, previousSemanticColoringResultId: number, token: CancellationToken): Promise<VSBuffer | null>;
$releaseSemanticColoring(handle: number, semanticColoringResultId: number): void;
$provideSemanticTokens(handle: number, resource: UriComponents, ranges: IRange[] | null, previousResultId: number, token: CancellationToken): Promise<VSBuffer | null>;
$releaseSemanticTokens(handle: number, semanticColoringResultId: number): void;
$provideCompletionItems(handle: number, resource: UriComponents, position: IPosition, context: modes.CompletionContext, token: CancellationToken): Promise<ISuggestResultDto | undefined>;
$resolveCompletionItem(handle: number, resource: UriComponents, position: IPosition, id: ChainedCacheId, token: CancellationToken): Promise<ISuggestDataDto | undefined>;
$releaseCompletionItems(handle: number, id: number): void;

View file

@ -7,7 +7,7 @@ import { URI, UriComponents } from 'vs/base/common/uri';
import { mixin } from 'vs/base/common/objects';
import * as vscode from 'vscode';
import * as typeConvert from 'vs/workbench/api/common/extHostTypeConverters';
import { Range, Disposable, CompletionList, SnippetString, CodeActionKind, SymbolInformation, DocumentSymbol, SemanticColoringArea } from 'vs/workbench/api/common/extHostTypes';
import { Range, Disposable, CompletionList, SnippetString, CodeActionKind, SymbolInformation, DocumentSymbol, SemanticTokensEdits } from 'vs/workbench/api/common/extHostTypes';
import { ISingleEditOperation } from 'vs/editor/common/model';
import * as modes from 'vs/editor/common/modes';
import { ExtHostDocuments } from 'vs/workbench/api/common/extHostDocuments';
@ -27,7 +27,7 @@ import { ExtensionIdentifier, IExtensionDescription } from 'vs/platform/extensio
import { IURITransformer } from 'vs/base/common/uriIpc';
import { DisposableStore, dispose } from 'vs/base/common/lifecycle';
import { VSBuffer } from 'vs/base/common/buffer';
import { encodeSemanticTokensDto, ISemanticTokensDto, ISemanticTokensAreaDto } from 'vs/workbench/api/common/shared/semanticTokens';
import { encodeSemanticTokensDto } from 'vs/workbench/api/common/shared/semanticTokens';
import { IdGenerator } from 'vs/base/common/idGenerator';
// --- adapter
@ -616,62 +616,40 @@ class RenameAdapter {
}
}
export const enum SemanticColoringConstants {
/**
* Let's aim at having 8KB buffers if possible...
* So that would be 8192 / (5 * 4) = 409.6 tokens per area
*/
DesiredTokensPerArea = 400,
/**
* Try to keep the total number of areas under 1024 if possible,
* simply compensate by having more tokens per area...
*/
DesiredMaxAreas = 1024,
/**
* Threshold for merging multiple delta areas and sending a full area.
*/
MinTokensPerArea = 50
class SemanticTokensPreviousResult {
constructor(
public readonly resultId: string | undefined,
public readonly tokens?: Uint32Array,
) { }
}
interface ISemanticColoringAreaPair {
data: Uint32Array;
dto: ISemanticTokensAreaDto;
}
export class SemanticTokensAdapter {
export class SemanticColoringAdapter {
private readonly _previousResults: Map<number, Uint32Array[]>;
private readonly _splitSingleAreaTokenCountThreshold: number;
private readonly _previousResults: Map<number, SemanticTokensPreviousResult>;
private _nextResultId = 1;
constructor(
private readonly _documents: ExtHostDocuments,
private readonly _provider: vscode.SemanticColoringProvider,
private readonly _desiredTokensPerArea = SemanticColoringConstants.DesiredTokensPerArea,
private readonly _desiredMaxAreas = SemanticColoringConstants.DesiredMaxAreas,
private readonly _minTokensPerArea = SemanticColoringConstants.MinTokensPerArea
private readonly _provider: vscode.SemanticTokensProvider,
) {
this._previousResults = new Map<number, Uint32Array[]>();
this._splitSingleAreaTokenCountThreshold = Math.round(1.5 * this._desiredTokensPerArea);
this._previousResults = new Map<number, SemanticTokensPreviousResult>();
}
provideSemanticColoring(resource: URI, previousSemanticColoringResultId: number, token: CancellationToken): Promise<VSBuffer | null> {
provideSemanticTokens(resource: URI, ranges: IRange[] | null, previousResultId: number, token: CancellationToken): Promise<VSBuffer | null> {
const doc = this._documents.getDocument(resource);
return asPromise(() => this._provider.provideSemanticColoring(doc, token)).then(value => {
const previousResult = (previousResultId !== 0 ? this._previousResults.get(previousResultId) : null);
const opts: vscode.SemanticTokensRequestOptions = {
ranges: (Array.isArray(ranges) && ranges.length > 0 ? ranges.map<Range>(typeConvert.Range.to) : undefined),
previousResultId: (previousResult ? previousResult.resultId : undefined)
};
return asPromise(() => this._provider.provideSemanticTokens(doc, opts, token)).then(value => {
if (!value) {
return null;
}
const oldAreas = (previousSemanticColoringResultId !== 0 ? this._previousResults.get(previousSemanticColoringResultId) : null);
if (oldAreas) {
this._previousResults.delete(previousSemanticColoringResultId);
return this._deltaEncodeAreas(oldAreas, value.areas);
if (previousResult) {
this._previousResults.delete(previousResultId);
}
return this._fullEncodeAreas(value.areas);
return this._send(SemanticTokensAdapter._convertToEdits(previousResult, value), value);
});
}
@ -679,298 +657,77 @@ export class SemanticColoringAdapter {
this._previousResults.delete(semanticColoringResultId);
}
private _deltaEncodeAreas(oldAreas: Uint32Array[], newAreas: SemanticColoringArea[]): VSBuffer {
if (newAreas.length > 1) {
// this is a fancy provider which is smart enough to break things into good areas
// we therefore try to match old areas only by object identity
const oldAreasIndexMap = new Map<Uint32Array, number>();
for (let i = 0, len = oldAreas.length; i < len; i++) {
oldAreasIndexMap.set(oldAreas[i], i);
}
let result: ISemanticColoringAreaPair[] = [];
for (let i = 0, len = newAreas.length; i < len; i++) {
const newArea = newAreas[i];
if (oldAreasIndexMap.has(newArea.data)) {
// great! we can reuse this area
const oldIndex = oldAreasIndexMap.get(newArea.data)!;
result.push({
data: newArea.data,
dto: {
type: 'delta',
line: newArea.line,
oldIndex: oldIndex
}
});
} else {
result.push({
data: newArea.data,
dto: {
type: 'full',
line: newArea.line,
data: newArea.data
}
});
}
}
return this._saveResultAndEncode(result);
}
return this._deltaEncodeArea(oldAreas, newAreas[0]);
private static _isSemanticTokens(v: vscode.SemanticTokens | vscode.SemanticTokensEdits): v is vscode.SemanticTokens {
return v && !!((v as vscode.SemanticTokens).data);
}
private static _oldAreaAppearsInNewArea(oldAreaData: Uint32Array, oldAreaTokenCount: number, newAreaData: Uint32Array, newAreaOffset: number): boolean {
const newTokenStartDeltaLine = newAreaData[5 * newAreaOffset];
// check that each and every value from `oldArea` is equal to `area`
for (let j = 0; j < oldAreaTokenCount; j++) {
const oldOffset = 5 * j;
const newOffset = 5 * (j + newAreaOffset);
if (
(oldAreaData[oldOffset] !== newAreaData[newOffset] - newTokenStartDeltaLine)
|| (oldAreaData[oldOffset + 1] !== newAreaData[newOffset + 1])
|| (oldAreaData[oldOffset + 2] !== newAreaData[newOffset + 2])
|| (oldAreaData[oldOffset + 3] !== newAreaData[newOffset + 3])
|| (oldAreaData[oldOffset + 4] !== newAreaData[newOffset + 4])
) {
return false;
}
}
return true;
private static _isSemanticTokensEdits(v: vscode.SemanticTokens | vscode.SemanticTokensEdits): v is vscode.SemanticTokensEdits {
return v && Array.isArray((v as vscode.SemanticTokensEdits).edits);
}
private _deltaEncodeArea(oldAreas: Uint32Array[], newArea: SemanticColoringArea): VSBuffer {
const newAreaData = newArea.data;
const prependAreas: ISemanticColoringAreaPair[] = [];
const appendAreas: ISemanticColoringAreaPair[] = [];
private static _convertToEdits(previousResult: SemanticTokensPreviousResult | null | undefined, newResult: vscode.SemanticTokens | vscode.SemanticTokensEdits): vscode.SemanticTokens | vscode.SemanticTokensEdits {
if (!SemanticTokensAdapter._isSemanticTokens(newResult)) {
return newResult;
}
if (!previousResult || !previousResult.tokens) {
return newResult;
}
const oldData = previousResult.tokens;
const oldLength = oldData.length;
const newData = newResult.data;
const newLength = newData.length;
// Try to find appearences of `oldAreas` inside `area`.
let newTokenStartIndex = 0;
let newTokenEndIndex = (newAreaData.length / 5) | 0;
let oldAreaUsedIndex = -1;
for (let i = 0, len = oldAreas.length; i < len; i++) {
const oldAreaData = oldAreas[i];
const oldAreaTokenCount = (oldAreaData.length / 5) | 0;
if (oldAreaTokenCount === 0) {
// skip old empty areas
continue;
}
if (newTokenEndIndex - newTokenStartIndex < oldAreaTokenCount) {
// there are too many old tokens, this cannot work
break;
}
let commonPrefixLength = 0;
const maxCommonPrefixLength = Math.min(oldLength, newLength);
while (commonPrefixLength < maxCommonPrefixLength && oldData[commonPrefixLength] === newData[commonPrefixLength]) {
commonPrefixLength++;
}
const newAreaOffset = newTokenStartIndex;
const newTokenStartDeltaLine = newAreaData[5 * newAreaOffset];
const isEqual = SemanticColoringAdapter._oldAreaAppearsInNewArea(oldAreaData, oldAreaTokenCount, newAreaData, newAreaOffset);
if (!isEqual) {
break;
}
newTokenStartIndex += oldAreaTokenCount;
if (commonPrefixLength === oldLength && commonPrefixLength === newLength) {
// complete overlap!
return new SemanticTokensEdits([], newResult.resultId);
}
oldAreaUsedIndex = i;
prependAreas.push({
data: oldAreaData,
dto: {
type: 'delta',
line: newArea.line + newTokenStartDeltaLine,
oldIndex: i
}
let commonSuffixLength = 0;
const maxCommonSuffixLength = maxCommonPrefixLength - commonPrefixLength;
while (commonSuffixLength < maxCommonSuffixLength && oldData[oldLength - commonSuffixLength - 1] === newData[newLength - commonSuffixLength - 1]) {
commonSuffixLength++;
}
return new SemanticTokensEdits([{
start: commonPrefixLength,
deleteCount: (oldLength - commonPrefixLength - commonSuffixLength),
data: newData.subarray(commonPrefixLength, newLength - commonSuffixLength)
}], newResult.resultId);
}
private _send(value: vscode.SemanticTokens | vscode.SemanticTokensEdits, original: vscode.SemanticTokens | vscode.SemanticTokensEdits): VSBuffer | null {
if (SemanticTokensAdapter._isSemanticTokens(value)) {
const myId = this._nextResultId++;
this._previousResults.set(myId, new SemanticTokensPreviousResult(value.resultId, value.data));
return encodeSemanticTokensDto({
id: myId,
type: 'full',
data: value.data
});
}
for (let i = oldAreas.length - 1; i > oldAreaUsedIndex; i--) {
const oldAreaData = oldAreas[i];
const oldAreaTokenCount = (oldAreaData.length / 5) | 0;
if (oldAreaTokenCount === 0) {
// skip old empty areas
continue;
if (SemanticTokensAdapter._isSemanticTokensEdits(value)) {
const myId = this._nextResultId++;
if (SemanticTokensAdapter._isSemanticTokens(original)) {
// store the original
this._previousResults.set(myId, new SemanticTokensPreviousResult(original.resultId, original.data));
} else {
this._previousResults.set(myId, new SemanticTokensPreviousResult(value.resultId));
}
if (newTokenEndIndex - newTokenStartIndex < oldAreaTokenCount) {
// there are too many old tokens, this cannot work
break;
}
const newAreaOffset = (newTokenEndIndex - oldAreaTokenCount);
const newTokenStartDeltaLine = newAreaData[5 * newAreaOffset];
const isEqual = SemanticColoringAdapter._oldAreaAppearsInNewArea(oldAreaData, oldAreaTokenCount, newAreaData, newAreaOffset);
if (!isEqual) {
break;
}
newTokenEndIndex -= oldAreaTokenCount;
appendAreas.unshift({
data: oldAreaData,
dto: {
type: 'delta',
line: newArea.line + newTokenStartDeltaLine,
oldIndex: i
}
return encodeSemanticTokensDto({
id: myId,
type: 'delta',
deltas: (value.edits || []).map(edit => ({ start: edit.start, deleteCount: edit.deleteCount, data: edit.data }))
});
}
if (prependAreas.length === 0 && appendAreas.length === 0) {
// There is no reuse possibility!
return this._fullEncodeAreas([newArea]);
}
if (newTokenStartIndex === newTokenEndIndex) {
// 100% reuse!
return this._saveResultAndEncode(prependAreas.concat(appendAreas));
}
// It is clear at this point that there will be at least one full area.
// Expand the mid area if the areas next to it are too small
while (prependAreas.length > 0) {
const tokenCount = (prependAreas[prependAreas.length - 1].data.length / 5);
if (tokenCount < this._minTokensPerArea) {
newTokenStartIndex -= tokenCount;
prependAreas.pop();
} else {
break;
}
}
while (appendAreas.length > 0) {
const tokenCount = (appendAreas[0].data.length / 5);
if (tokenCount < this._minTokensPerArea) {
newTokenEndIndex += tokenCount;
appendAreas.shift();
} else {
break;
}
}
// Extract the mid area
const newTokenStartDeltaLine = newAreaData[5 * newTokenStartIndex];
const newMidAreaData = new Uint32Array(5 * (newTokenEndIndex - newTokenStartIndex));
for (let tokenIndex = newTokenStartIndex; tokenIndex < newTokenEndIndex; tokenIndex++) {
const srcOffset = 5 * tokenIndex;
const deltaLine = newAreaData[srcOffset];
const startCharacter = newAreaData[srcOffset + 1];
const endCharacter = newAreaData[srcOffset + 2];
const tokenType = newAreaData[srcOffset + 3];
const tokenModifiers = newAreaData[srcOffset + 4];
const destOffset = 5 * (tokenIndex - newTokenStartIndex);
newMidAreaData[destOffset] = deltaLine - newTokenStartDeltaLine;
newMidAreaData[destOffset + 1] = startCharacter;
newMidAreaData[destOffset + 2] = endCharacter;
newMidAreaData[destOffset + 3] = tokenType;
newMidAreaData[destOffset + 4] = tokenModifiers;
}
const newMidArea = new SemanticColoringArea(newArea.line + newTokenStartDeltaLine, newMidAreaData);
const newMidAreas = this._splitAreaIntoMultipleAreasIfNecessary(newMidArea);
const newMidAreasPairs: ISemanticColoringAreaPair[] = newMidAreas.map(a => {
return {
data: a.data,
dto: {
type: 'full',
line: a.line,
data: a.data,
}
};
});
return this._saveResultAndEncode(prependAreas.concat(newMidAreasPairs).concat(appendAreas));
}
private _fullEncodeAreas(areas: SemanticColoringArea[]): VSBuffer {
if (areas.length === 1) {
areas = this._splitAreaIntoMultipleAreasIfNecessary(areas[0]);
}
return this._saveResultAndEncode(areas.map(a => {
return {
data: a.data,
dto: {
type: 'full',
line: a.line,
data: a.data
}
};
}));
}
private _saveResultAndEncode(areas: ISemanticColoringAreaPair[]): VSBuffer {
const myId = this._nextResultId++;
this._previousResults.set(myId, areas.map(a => a.data));
console.log(`_saveResultAndEncode: ${myId} --> ${areas.map(a => `${a.dto.line}-${a.dto.type}(${a.data.length / 5})`).join(', ')}`);
const dto: ISemanticTokensDto = {
id: myId,
areas: areas.map(a => a.dto)
};
return encodeSemanticTokensDto(dto);
}
private _splitAreaIntoMultipleAreasIfNecessary(area: vscode.SemanticColoringArea): SemanticColoringArea[] {
const srcAreaLine = area.line;
const srcAreaData = area.data;
const tokenCount = (srcAreaData.length / 5) | 0;
if (tokenCount <= this._splitSingleAreaTokenCountThreshold) {
return [area];
}
const tokensPerArea = Math.max(Math.ceil(tokenCount / this._desiredMaxAreas), this._desiredTokensPerArea);
let result: SemanticColoringArea[] = [];
let tokenIndex = 0;
while (tokenIndex < tokenCount) {
const tokenStartIndex = tokenIndex;
let tokenEndIndex = Math.min(tokenStartIndex + tokensPerArea, tokenCount);
// Keep tokens on the same line in the same area...
if (tokenEndIndex < tokenCount) {
let smallAvoidDeltaLine = srcAreaData[5 * tokenEndIndex];
let smallTokenEndIndex = tokenEndIndex;
while (smallTokenEndIndex - 1 > tokenStartIndex && srcAreaData[5 * (smallTokenEndIndex - 1)] === smallAvoidDeltaLine) {
smallTokenEndIndex--;
}
if (smallTokenEndIndex - 1 === tokenStartIndex) {
// there are so many tokens on this line that our area would be empty, we must now go right
let bigAvoidDeltaLine = srcAreaData[5 * (tokenEndIndex - 1)];
let bigTokenEndIndex = tokenEndIndex;
while (bigTokenEndIndex + 1 < tokenCount && srcAreaData[5 * (bigTokenEndIndex + 1)] === bigAvoidDeltaLine) {
bigTokenEndIndex++;
}
tokenEndIndex = bigTokenEndIndex;
} else {
tokenEndIndex = smallTokenEndIndex;
}
}
let destAreaLine = 0;
const destAreaData = new Uint32Array((tokenEndIndex - tokenStartIndex) * 5);
while (tokenIndex < tokenEndIndex) {
const srcOffset = 5 * tokenIndex;
const line = srcAreaLine + srcAreaData[srcOffset];
const startCharacter = srcAreaData[srcOffset + 1];
const endCharacter = srcAreaData[srcOffset + 2];
const tokenType = srcAreaData[srcOffset + 3];
const tokenModifiers = srcAreaData[srcOffset + 4];
if (tokenIndex === tokenStartIndex) {
destAreaLine = line;
}
const destOffset = 5 * (tokenIndex - tokenStartIndex);
destAreaData[destOffset] = line - destAreaLine;
destAreaData[destOffset + 1] = startCharacter;
destAreaData[destOffset + 2] = endCharacter;
destAreaData[destOffset + 3] = tokenType;
destAreaData[destOffset + 4] = tokenModifiers;
tokenIndex++;
}
result.push(new SemanticColoringArea(destAreaLine, destAreaData));
}
return result;
return null;
}
}
@ -1481,7 +1238,7 @@ class CallHierarchyAdapter {
type Adapter = DocumentSymbolAdapter | CodeLensAdapter | DefinitionAdapter | HoverAdapter
| DocumentHighlightAdapter | ReferenceAdapter | CodeActionAdapter | DocumentFormattingAdapter
| RangeFormattingAdapter | OnTypeFormattingAdapter | NavigateTypeAdapter | RenameAdapter
| SemanticColoringAdapter | SuggestAdapter | SignatureHelpAdapter | LinkProviderAdapter
| SemanticTokensAdapter | SuggestAdapter | SignatureHelpAdapter | LinkProviderAdapter
| ImplementationAdapter | TypeDefinitionAdapter | ColorProviderAdapter | FoldingProviderAdapter
| DeclarationAdapter | SelectionRangeAdapter | CallHierarchyAdapter;
@ -1809,18 +1566,18 @@ export class ExtHostLanguageFeatures implements extHostProtocol.ExtHostLanguageF
//#region semantic coloring
registerSemanticColoringProvider(extension: IExtensionDescription, selector: vscode.DocumentSelector, provider: vscode.SemanticColoringProvider, legend: vscode.SemanticColoringLegend): vscode.Disposable {
const handle = this._addNewAdapter(new SemanticColoringAdapter(this._documents, provider), extension);
this._proxy.$registerSemanticColoringProvider(handle, this._transformDocumentSelector(selector), legend);
registerSemanticTokensProvider(extension: IExtensionDescription, selector: vscode.DocumentSelector, provider: vscode.SemanticTokensProvider, legend: vscode.SemanticTokensLegend): vscode.Disposable {
const handle = this._addNewAdapter(new SemanticTokensAdapter(this._documents, provider), extension);
this._proxy.$registerSemanticTokensProvider(handle, this._transformDocumentSelector(selector), legend);
return this._createDisposable(handle);
}
$provideSemanticColoring(handle: number, resource: UriComponents, previousSemanticColoringResultId: number, token: CancellationToken): Promise<VSBuffer | null> {
return this._withAdapter(handle, SemanticColoringAdapter, adapter => adapter.provideSemanticColoring(URI.revive(resource), previousSemanticColoringResultId, token), null);
$provideSemanticTokens(handle: number, resource: UriComponents, ranges: IRange[] | null, previousResultId: number, token: CancellationToken): Promise<VSBuffer | null> {
return this._withAdapter(handle, SemanticTokensAdapter, adapter => adapter.provideSemanticTokens(URI.revive(resource), ranges, previousResultId, token), null);
}
$releaseSemanticColoring(handle: number, semanticColoringResultId: number): void {
this._withAdapter(handle, SemanticColoringAdapter, adapter => adapter.releaseSemanticColoring(semanticColoringResultId), undefined);
$releaseSemanticTokens(handle: number, semanticColoringResultId: number): void {
this._withAdapter(handle, SemanticTokensAdapter, adapter => adapter.releaseSemanticColoring(semanticColoringResultId), undefined);
}
//#endregion

View file

@ -2368,7 +2368,7 @@ export enum CommentMode {
//#region Semantic Coloring
export class SemanticColoringLegend {
export class SemanticTokensLegend {
public readonly tokenTypes: string[];
public readonly tokenModifiers: string[];
@ -2378,21 +2378,74 @@ export class SemanticColoringLegend {
}
}
export class SemanticColoringArea {
public readonly line: number;
public readonly data: Uint32Array;
export class SemanticTokensBuilder {
constructor(line: number, data: Uint32Array) {
this.line = line;
private _prevLine: number;
private _prevChar: number;
private _data: number[];
private _dataLen: number;
constructor() {
this._prevLine = 0;
this._prevChar = 0;
this._data = [];
this._dataLen = 0;
}
public push(line: number, char: number, length: number, tokenType: number, tokenModifiers: number): void {
let pushLine = line;
let pushChar = char;
if (this._dataLen > 0) {
pushLine -= this._prevLine;
if (pushLine === 0) {
pushChar -= this._prevChar;
}
}
this._data[this._dataLen++] = pushLine;
this._data[this._dataLen++] = pushChar;
this._data[this._dataLen++] = length;
this._data[this._dataLen++] = tokenType;
this._data[this._dataLen++] = tokenModifiers;
this._prevLine = line;
this._prevChar = char;
}
public build(): Uint32Array {
return new Uint32Array(this._data);
}
}
export class SemanticTokens {
readonly resultId?: string;
readonly data: Uint32Array;
constructor(data: Uint32Array, resultId?: string) {
this.resultId = resultId;
this.data = data;
}
}
export class SemanticColoring {
public readonly areas: SemanticColoringArea[];
export class SemanticTokensEdit {
readonly start: number;
readonly deleteCount: number;
readonly data?: Uint32Array;
constructor(areas: SemanticColoringArea[]) {
this.areas = areas;
constructor(start: number, deleteCount: number, data?: Uint32Array) {
this.start = start;
this.deleteCount = deleteCount;
this.data = data;
}
}
export class SemanticTokensEdits {
readonly resultId?: string;
readonly edits: SemanticTokensEdit[];
constructor(edits: SemanticTokensEdit[], resultId?: string) {
this.resultId = resultId;
this.edits = edits;
}
}

View file

@ -5,47 +5,71 @@
import { VSBuffer } from 'vs/base/common/buffer';
export interface ISemanticTokensFullAreaDto {
export interface IFullSemanticTokensDto {
id: number;
type: 'full';
line: number;
data: Uint32Array;
}
export interface ISemanticTokensDeltaAreaDto {
type: 'delta';
line: number;
oldIndex: number;
}
export type ISemanticTokensAreaDto = ISemanticTokensFullAreaDto | ISemanticTokensDeltaAreaDto;
export interface ISemanticTokensDto {
export interface IDeltaSemanticTokensDto {
id: number;
areas: ISemanticTokensAreaDto[];
type: 'delta';
deltas: { start: number; deleteCount: number; data?: Uint32Array; }[];
}
const enum EncodedSemanticTokensAreaType {
export type ISemanticTokensDto = IFullSemanticTokensDto | IDeltaSemanticTokensDto;
const enum EncodedSemanticTokensType {
Full = 1,
Delta = 2
}
export function encodeSemanticTokensDto(semanticTokens: ISemanticTokensDto): VSBuffer {
const buff = VSBuffer.alloc(encodedSize(semanticTokens));
const buff = VSBuffer.alloc(encodedSize2(semanticTokens));
let offset = 0;
buff.writeUInt32BE(semanticTokens.id, offset); offset += 4;
buff.writeUInt32BE(semanticTokens.areas.length, offset); offset += 4;
for (let i = 0; i < semanticTokens.areas.length; i++) {
offset = encodeArea(semanticTokens.areas[i], buff, offset);
if (semanticTokens.type === 'full') {
buff.writeUInt8(EncodedSemanticTokensType.Full, offset); offset += 1;
buff.writeUInt32BE(semanticTokens.data.length, offset); offset += 4;
for (const uint of semanticTokens.data) {
buff.writeUInt32BE(uint, offset); offset += 4;
}
} else {
buff.writeUInt8(EncodedSemanticTokensType.Delta, offset); offset += 1;
buff.writeUInt32BE(semanticTokens.deltas.length, offset); offset += 4;
for (const delta of semanticTokens.deltas) {
buff.writeUInt32BE(delta.start, offset); offset += 4;
buff.writeUInt32BE(delta.deleteCount, offset); offset += 4;
if (delta.data) {
buff.writeUInt32BE(delta.data.length, offset); offset += 4;
for (const uint of delta.data) {
buff.writeUInt32BE(uint, offset); offset += 4;
}
} else {
buff.writeUInt32BE(0, offset); offset += 4;
}
}
}
return buff;
}
function encodedSize(semanticTokens: ISemanticTokensDto): number {
function encodedSize2(semanticTokens: ISemanticTokensDto): number {
let result = 0;
result += 4; // etag
result += 4; // area count
for (let i = 0; i < semanticTokens.areas.length; i++) {
result += encodedAreaSize(semanticTokens.areas[i]);
result += 4; // id
result += 1; // type
if (semanticTokens.type === 'full') {
result += 4; // data length
result += semanticTokens.data.byteLength;
} else {
result += 4; // delta count
for (const delta of semanticTokens.deltas) {
result += 4; // start
result += 4; // deleteCount
result += 4; // data length
if (delta.data) {
result += delta.data.byteLength;
}
}
}
return result;
}
@ -53,84 +77,37 @@ function encodedSize(semanticTokens: ISemanticTokensDto): number {
export function decodeSemanticTokensDto(buff: VSBuffer): ISemanticTokensDto {
let offset = 0;
const id = buff.readUInt32BE(offset); offset += 4;
const areasCount = buff.readUInt32BE(offset); offset += 4;
let areas: ISemanticTokensAreaDto[] = [];
for (let i = 0; i < areasCount; i++) {
offset = decodeArea(buff, offset, areas);
const type: EncodedSemanticTokensType = buff.readUInt8(offset); offset += 1;
if (type === EncodedSemanticTokensType.Full) {
const length = buff.readUInt32BE(offset); offset += 4;
const data = new Uint32Array(length);
for (let j = 0; j < length; j++) {
data[j] = buff.readUInt32BE(offset); offset += 4;
}
return {
id: id,
type: 'full',
data: data
};
}
const deltaCount = buff.readUInt32BE(offset); offset += 4;
let deltas: { start: number; deleteCount: number; data?: Uint32Array; }[] = [];
for (let i = 0; i < deltaCount; i++) {
const start = buff.readUInt32BE(offset); offset += 4;
const deleteCount = buff.readUInt32BE(offset); offset += 4;
const length = buff.readUInt32BE(offset); offset += 4;
let data: Uint32Array | undefined;
if (length > 0) {
data = new Uint32Array(length);
for (let j = 0; j < length; j++) {
data[j] = buff.readUInt32BE(offset); offset += 4;
}
}
deltas[i] = { start, deleteCount, data };
}
return {
id: id,
areas: areas
type: 'delta',
deltas: deltas
};
}
function encodeArea(area: ISemanticTokensAreaDto, buff: VSBuffer, offset: number): number {
buff.writeUInt8(area.type === 'full' ? EncodedSemanticTokensAreaType.Full : EncodedSemanticTokensAreaType.Delta, offset); offset += 1;
buff.writeUInt32BE(area.line + 1, offset); offset += 4;
if (area.type === 'full') {
const tokens = area.data;
const tokenCount = (tokens.length / 5) | 0;
buff.writeUInt32BE(tokenCount, offset); offset += 4;
// here we are explicitly iterating an writing the ints again to ensure writing the desired endianness.
for (let i = 0; i < tokenCount; i++) {
const tokenOffset = 5 * i;
buff.writeUInt32BE(tokens[tokenOffset], offset); offset += 4;
buff.writeUInt32BE(tokens[tokenOffset + 1], offset); offset += 4;
buff.writeUInt32BE(tokens[tokenOffset + 2], offset); offset += 4;
buff.writeUInt32BE(tokens[tokenOffset + 3], offset); offset += 4;
buff.writeUInt32BE(tokens[tokenOffset + 4], offset); offset += 4;
}
// buff.set(VSBuffer.wrap(uint8), offset); offset += area.data.byteLength;
} else {
buff.writeUInt32BE(area.oldIndex, offset); offset += 4;
}
return offset;
}
function encodedAreaSize(area: ISemanticTokensAreaDto): number {
let result = 0;
result += 1; // type
result += 4; // line
if (area.type === 'full') {
const tokens = area.data;
const tokenCount = (tokens.length / 5) | 0;
result += 4; // token count
result += tokenCount * 5 * 4;
return result;
} else {
result += 4; // old index
return result;
}
}
function decodeArea(buff: VSBuffer, offset: number, areas: ISemanticTokensAreaDto[]): number {
const type: EncodedSemanticTokensAreaType = buff.readUInt8(offset); offset += 1;
const line = buff.readUInt32BE(offset); offset += 4;
if (type === EncodedSemanticTokensAreaType.Full) {
// here we are explicitly iterating and reading the ints again to ensure reading the desired endianness.
const tokenCount = buff.readUInt32BE(offset); offset += 4;
const data = new Uint32Array(5 * tokenCount);
for (let i = 0; i < tokenCount; i++) {
const destOffset = 5 * i;
data[destOffset] = buff.readUInt32BE(offset); offset += 4;
data[destOffset + 1] = buff.readUInt32BE(offset); offset += 4;
data[destOffset + 2] = buff.readUInt32BE(offset); offset += 4;
data[destOffset + 3] = buff.readUInt32BE(offset); offset += 4;
data[destOffset + 4] = buff.readUInt32BE(offset); offset += 4;
}
areas.push({
type: 'full',
line: line,
data: data
});
return offset;
} else {
const oldIndex = buff.readUInt32BE(offset); offset += 4;
areas.push({
type: 'delta',
line: line,
oldIndex: oldIndex
});
return offset;
}
}

View file

@ -1,343 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as assert from 'assert';
import { URI } from 'vs/base/common/uri';
import * as types from 'vs/workbench/api/common/extHostTypes';
import { TestRPCProtocol } from 'vs/workbench/test/electron-browser/api/testRPCProtocol';
import { SemanticColoringAdapter, SemanticColoringConstants } from 'vs/workbench/api/common/extHostLanguageFeatures';
import { ExtHostDocuments } from 'vs/workbench/api/common/extHostDocuments';
import { ExtHostDocumentsAndEditors } from 'vs/workbench/api/common/extHostDocumentsAndEditors';
import { ExtHostContext } from 'vs/workbench/api/common/extHost.protocol';
import * as vscode from 'vscode';
import { CancellationToken } from 'vs/base/common/cancellation';
import { decodeSemanticTokensDto, ISemanticTokensDto } from 'vs/workbench/api/common/shared/semanticTokens';
import { ExtHostDocumentData } from 'vs/workbench/api/common/extHostDocumentData';
suite('SemanticColoringAdapter', () => {
const resource = URI.parse('foo:bar');
const rpcProtocol = new TestRPCProtocol();
const initialText = [
'const enum E01 {}',
'const enum E02 {}',
'const enum E03 {}',
'const enum E04 {}',
'const enum E05 {}',
'const enum E06 {}',
'const enum E07 {}',
'const enum E08 {}',
'const enum E09 {}',
'const enum E10 {}',
'const enum E11 {}',
'const enum E12 {}',
'const enum E13 {}',
'const enum E14 {}',
'const enum E15 {}',
'const enum E16 {}',
'const enum E17 {}',
'const enum E18 {}',
'const enum E19 {}',
'const enum E20 {}',
'const enum E21 {}',
'const enum E22 {}',
'const enum E23 {}',
].join('\n');
const extHostDocumentsAndEditors = new ExtHostDocumentsAndEditors(rpcProtocol);
extHostDocumentsAndEditors.$acceptDocumentsAndEditorsDelta({
addedDocuments: [{
isDirty: false,
versionId: 1,
modeId: 'javascript',
uri: resource,
lines: initialText.split(/\n/),
EOL: '\n',
}]
});
const extHostDocuments = new ExtHostDocuments(rpcProtocol, extHostDocumentsAndEditors);
rpcProtocol.set(ExtHostContext.ExtHostDocuments, extHostDocuments);
const semanticTokensProvider = new class implements vscode.SemanticColoringProvider {
provideSemanticColoring(document: vscode.TextDocument, token: vscode.CancellationToken): types.SemanticColoring {
const lines = document.getText().split(/\r\n|\r|\n/g);
const tokens: number[] = [];
const pushToken = (line: number, startCharacter: number, endCharacter: number, type: number) => {
tokens.push(line);
tokens.push(startCharacter);
tokens.push(endCharacter);
tokens.push(type);
tokens.push(0);
};
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
const m = line.match(/^(const enum )([\w\d]+) \{\}/);
if (m) {
pushToken(i, m[1].length, m[1].length + m[2].length, parseInt(m[2].substr(1)));
}
}
return new types.SemanticColoring([new types.SemanticColoringArea(0, new Uint32Array(tokens))]);
}
};
let adapter: SemanticColoringAdapter;
let doc: ExtHostDocumentData;
setup(() => {
adapter = new SemanticColoringAdapter(extHostDocuments, semanticTokensProvider, 10, SemanticColoringConstants.DesiredMaxAreas, 5);
doc = extHostDocumentsAndEditors.getDocument(resource)!;
const docLineCount = doc.document.lineCount;
const allRange = { startLineNumber: 1, startColumn: 1, endLineNumber: docLineCount, endColumn: doc.document.lineAt(docLineCount - 1).text.length + 1 };
doc.onEvents({
versionId: 1,
eol: '\n',
changes: [{
range: allRange,
rangeOffset: 0,
rangeLength: 0,
text: initialText
}]
});
});
type SimpleTokensDto = { type: 'full'; line: number; tokens: number[]; } | { type: 'delta'; line: number; oldIndex: number };
function assertDTO(actual: ISemanticTokensDto, expected: SimpleTokensDto[]): void {
const simpleActual: SimpleTokensDto[] = actual.areas.map((area) => {
if (area.type === 'full') {
const tokenCount = (area.data.length / 5) | 0;
let tokens: number[] = [];
for (let i = 0; i < tokenCount; i++) {
tokens.push(area.data[5 * i]);
}
return {
type: 'full',
line: area.line,
tokens: tokens
};
}
return {
type: 'delta',
line: area.line,
oldIndex: area.oldIndex
};
});
assert.deepEqual(simpleActual, expected);
}
test('single area - breaks it up', async () => {
const dto = (await adapter.provideSemanticColoring(resource, 0, CancellationToken.None))!;
const result = decodeSemanticTokensDto(dto);
assertDTO(result, [
{ type: 'full', line: 1, tokens: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] },
{ type: 'full', line: 11, tokens: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] },
{ type: 'full', line: 21, tokens: [0, 1, 2] },
]);
});
test('single area - after a not important change', async () => {
const result1 = decodeSemanticTokensDto((await adapter.provideSemanticColoring(resource, 0, CancellationToken.None))!);
assertDTO(result1, [
{ type: 'full', line: 1, tokens: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] },
{ type: 'full', line: 11, tokens: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] },
{ type: 'full', line: 21, tokens: [0, 1, 2] },
]);
doc.onEvents({
versionId: 2,
eol: '\n',
changes: [{
range: { startLineNumber: 2, startColumn: 18, endLineNumber: 2, endColumn: 18 },
rangeOffset: 0,
rangeLength: 0,
text: '//'
}]
});
const result2 = decodeSemanticTokensDto((await adapter.provideSemanticColoring(resource, result1.id, CancellationToken.None))!);
assertDTO(result2, [
{ type: 'delta', line: 1, oldIndex: 0 },
{ type: 'delta', line: 11, oldIndex: 1 },
{ type: 'delta', line: 21, oldIndex: 2 },
]);
adapter.releaseSemanticColoring(result1.id);
});
test('single area - after a single removal in the first block', async () => {
const result1 = decodeSemanticTokensDto((await adapter.provideSemanticColoring(resource, 0, CancellationToken.None))!);
assertDTO(result1, [
{ type: 'full', line: 1, tokens: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] },
{ type: 'full', line: 11, tokens: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] },
{ type: 'full', line: 21, tokens: [0, 1, 2] },
]);
doc.onEvents({
versionId: 2,
eol: '\n',
changes: [{
range: { startLineNumber: 2, startColumn: 1, endLineNumber: 2, endColumn: 1 },
rangeOffset: 0,
rangeLength: 0,
text: '//'
}]
});
const result2 = decodeSemanticTokensDto((await adapter.provideSemanticColoring(resource, result1.id, CancellationToken.None))!);
assertDTO(result2, [
{ type: 'full', line: 1, tokens: [0, 2, 3, 4, 5, 6, 7, 8, 9] },
{ type: 'delta', line: 11, oldIndex: 1 },
{ type: 'delta', line: 21, oldIndex: 2 },
]);
adapter.releaseSemanticColoring(result1.id);
});
test('single area - after a not important change', async () => {
const result1 = decodeSemanticTokensDto((await adapter.provideSemanticColoring(resource, 0, CancellationToken.None))!);
assertDTO(result1, [
{ type: 'full', line: 1, tokens: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] },
{ type: 'full', line: 11, tokens: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] },
{ type: 'full', line: 21, tokens: [0, 1, 2] },
]);
doc.onEvents({
versionId: 2,
eol: '\n',
changes: [{
range: { startLineNumber: 2, startColumn: 18, endLineNumber: 2, endColumn: 18 },
rangeOffset: 0,
rangeLength: 0,
text: '//'
}]
});
const result2 = decodeSemanticTokensDto((await adapter.provideSemanticColoring(resource, result1.id, CancellationToken.None))!);
assertDTO(result2, [
{ type: 'delta', line: 1, oldIndex: 0 },
{ type: 'delta', line: 11, oldIndex: 1 },
{ type: 'delta', line: 21, oldIndex: 2 },
]);
adapter.releaseSemanticColoring(result1.id);
});
test('single area - after a down shift of all the blocks', async () => {
const result1 = decodeSemanticTokensDto((await adapter.provideSemanticColoring(resource, 0, CancellationToken.None))!);
assertDTO(result1, [
{ type: 'full', line: 1, tokens: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] },
{ type: 'full', line: 11, tokens: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] },
{ type: 'full', line: 21, tokens: [0, 1, 2] },
]);
doc.onEvents({
versionId: 2,
eol: '\n',
changes: [{
range: { startLineNumber: 1, startColumn: 1, endLineNumber: 1, endColumn: 1 },
rangeOffset: 0,
rangeLength: 0,
text: '\n'
}]
});
const result2 = decodeSemanticTokensDto((await adapter.provideSemanticColoring(resource, result1.id, CancellationToken.None))!);
assertDTO(result2, [
{ type: 'delta', line: 2, oldIndex: 0 },
{ type: 'delta', line: 12, oldIndex: 1 },
{ type: 'delta', line: 22, oldIndex: 2 },
]);
adapter.releaseSemanticColoring(result1.id);
});
test('single area - after a single removal in the last block', async () => {
const result1 = decodeSemanticTokensDto((await adapter.provideSemanticColoring(resource, 0, CancellationToken.None))!);
assertDTO(result1, [
{ type: 'full', line: 1, tokens: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] },
{ type: 'full', line: 11, tokens: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] },
{ type: 'full', line: 21, tokens: [0, 1, 2] },
]);
doc.onEvents({
versionId: 2,
eol: '\n',
changes: [{
range: { startLineNumber: 22, startColumn: 1, endLineNumber: 22, endColumn: 1 },
rangeOffset: 0,
rangeLength: 0,
text: '//'
}]
});
const result2 = decodeSemanticTokensDto((await adapter.provideSemanticColoring(resource, result1.id, CancellationToken.None))!);
assertDTO(result2, [
{ type: 'delta', line: 1, oldIndex: 0 },
{ type: 'delta', line: 11, oldIndex: 1 },
{ type: 'full', line: 21, tokens: [0, 2] },
]);
adapter.releaseSemanticColoring(result1.id);
});
test('single area - after a single addition in the first block', async () => {
const result1 = decodeSemanticTokensDto((await adapter.provideSemanticColoring(resource, 0, CancellationToken.None))!);
assertDTO(result1, [
{ type: 'full', line: 1, tokens: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] },
{ type: 'full', line: 11, tokens: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] },
{ type: 'full', line: 21, tokens: [0, 1, 2] },
]);
doc.onEvents({
versionId: 2,
eol: '\n',
changes: [{
range: { startLineNumber: 2, startColumn: 1, endLineNumber: 2, endColumn: 1 },
rangeOffset: 0,
rangeLength: 0,
text: 'const enum E00 {}\n'
}]
});
const result2 = decodeSemanticTokensDto((await adapter.provideSemanticColoring(resource, result1.id, CancellationToken.None))!);
assertDTO(result2, [
{ type: 'full', line: 1, tokens: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] },
{ type: 'delta', line: 12, oldIndex: 1 },
{ type: 'delta', line: 22, oldIndex: 2 },
]);
adapter.releaseSemanticColoring(result1.id);
});
test('going from empty to 1 semantic token', async () => {
doc.onEvents({
versionId: 2,
eol: '\n',
changes: [{
range: { startLineNumber: 1, startColumn: 1, endLineNumber: 23, endColumn: 18 },
rangeOffset: 0,
rangeLength: 0,
text: ''
}]
});
const result1 = decodeSemanticTokensDto((await adapter.provideSemanticColoring(resource, 0, CancellationToken.None))!);
assertDTO(result1, [
{ type: 'full', line: 1, tokens: [] },
]);
doc.onEvents({
versionId: 3,
eol: '\n',
changes: [{
range: { startLineNumber: 1, startColumn: 1, endLineNumber: 1, endColumn: 1 },
rangeOffset: 0,
rangeLength: 0,
text: 'const enum E01 {}\n'
}]
});
const result2 = decodeSemanticTokensDto((await adapter.provideSemanticColoring(resource, result1.id, CancellationToken.None))!);
assertDTO(result2, [
{ type: 'full', line: 1, tokens: [0] }
]);
adapter.releaseSemanticColoring(result1.id);
});
});