Implements worker based textmate tokenization. Adds setting "editor.experimental.asyncTokenization" (default false).

This commit is contained in:
Henning Dieterichs 2023-02-15 10:07:53 +01:00
parent d491e2e750
commit 3c3902cc5e
No known key found for this signature in database
GPG key ID: 771381EFFDB9EC06
25 changed files with 1304 additions and 1012 deletions

View file

@ -95,6 +95,11 @@ const editorConfiguration: IConfigurationNode = {
default: 20_000,
description: nls.localize('maxTokenizationLineLength', "Lines above this length will not be tokenized for performance reasons")
},
'editor.experimental.asyncTokenization': {
type: 'boolean',
default: false,
description: nls.localize('editor.experimental.asyncTokenization', "Controls whether the tokenization should happen asynchronously on a web worker.")
},
'editor.language.brackets': {
type: ['array', 'null'],
default: null, // We want to distinguish the empty array from not configured.

View file

@ -30,7 +30,7 @@ const enum Constants {
* An array that avoids being sparse by always
* filling up unused indices with a default value.
*/
class ContiguousGrowingArray<T> {
export class ContiguousGrowingArray<T> {
private _store: T[] = [];

View file

@ -21,7 +21,7 @@ import { FontStyle, ColorId, StandardTokenType, TokenMetadata } from 'vs/editor/
import { ILanguageService } from 'vs/editor/common/languages/language';
import { INotificationService } from 'vs/platform/notification/common/notification';
import { findMatchingThemeRule } from 'vs/workbench/services/textMate/common/TMHelper';
import { ITextMateService } from 'vs/workbench/services/textMate/browser/textMate';
import { ITextMateTokenizationFeature } from 'vs/workbench/services/textMate/browser/textMateTokenizationFeature';
import type { IGrammar, IToken, StateStack } from 'vscode-textmate';
import { IWorkbenchThemeService } from 'vs/workbench/services/themes/common/workbenchThemeService';
import { CancellationTokenSource } from 'vs/base/common/cancellation';
@ -43,7 +43,7 @@ class InspectEditorTokensController extends Disposable implements IEditorContrib
}
private _editor: ICodeEditor;
private _textMateService: ITextMateService;
private _textMateService: ITextMateTokenizationFeature;
private _themeService: IWorkbenchThemeService;
private _languageService: ILanguageService;
private _notificationService: INotificationService;
@ -53,7 +53,7 @@ class InspectEditorTokensController extends Disposable implements IEditorContrib
constructor(
editor: ICodeEditor,
@ITextMateService textMateService: ITextMateService,
@ITextMateTokenizationFeature textMateService: ITextMateTokenizationFeature,
@ILanguageService languageService: ILanguageService,
@IWorkbenchThemeService themeService: IWorkbenchThemeService,
@INotificationService notificationService: INotificationService,
@ -187,7 +187,7 @@ class InspectEditorTokensWidget extends Disposable implements IContentWidget {
private readonly _editor: IActiveCodeEditor;
private readonly _languageService: ILanguageService;
private readonly _themeService: IWorkbenchThemeService;
private readonly _textMateService: ITextMateService;
private readonly _textMateService: ITextMateTokenizationFeature;
private readonly _notificationService: INotificationService;
private readonly _configurationService: IConfigurationService;
private readonly _languageFeaturesService: ILanguageFeaturesService;
@ -197,7 +197,7 @@ class InspectEditorTokensWidget extends Disposable implements IContentWidget {
constructor(
editor: IActiveCodeEditor,
textMateService: ITextMateService,
textMateService: ITextMateTokenizationFeature,
languageService: ILanguageService,
themeService: IWorkbenchThemeService,
notificationService: INotificationService,

View file

@ -14,7 +14,7 @@ import { ILanguageService } from 'vs/editor/common/languages/language';
import { Extensions, IJSONContributionRegistry } from 'vs/platform/jsonschemas/common/jsonContributionRegistry';
import { Registry } from 'vs/platform/registry/common/platform';
import { IExtensionService } from 'vs/workbench/services/extensions/common/extensions';
import { ITextMateService } from 'vs/workbench/services/textMate/browser/textMate';
import { ITextMateTokenizationFeature } from 'vs/workbench/services/textMate/browser/textMateTokenizationFeature';
import { getParseErrorMessage } from 'vs/base/common/jsonErrorMessages';
import { IExtensionResourceLoaderService } from 'vs/platform/extensionResourceLoader/common/extensionResourceLoader';
import { hash } from 'vs/base/common/hash';
@ -95,7 +95,7 @@ export class LanguageConfigurationFileHandler extends Disposable {
private readonly _done = new Map<string, number>();
constructor(
@ITextMateService textMateService: ITextMateService,
@ITextMateTokenizationFeature textMateService: ITextMateTokenizationFeature,
@ILanguageService private readonly _languageService: ILanguageService,
@IExtensionResourceLoaderService private readonly _extensionResourceLoaderService: IExtensionResourceLoaderService,
@IExtensionService private readonly _extensionService: IExtensionService,

View file

@ -7,7 +7,7 @@ import * as nls from 'vs/nls';
import { Range } from 'vs/editor/common/core/range';
import { Action2, registerAction2 } from 'vs/platform/actions/common/actions';
import { Categories } from 'vs/platform/action/common/actionCommonCategories';
import { ITextMateService } from 'vs/workbench/services/textMate/browser/textMate';
import { ITextMateTokenizationFeature } from 'vs/workbench/services/textMate/browser/textMateTokenizationFeature';
import { IModelService } from 'vs/editor/common/services/model';
import { IEditorService } from 'vs/workbench/services/editor/common/editorService';
import { URI } from 'vs/base/common/uri';
@ -52,7 +52,7 @@ class StartDebugTextMate extends Action2 {
}
async run(accessor: ServicesAccessor) {
const textMateService = accessor.get(ITextMateService);
const textMateService = accessor.get(ITextMateTokenizationFeature);
const modelService = accessor.get(IModelService);
const editorService = accessor.get(IEditorService);
const codeEditorService = accessor.get(ICodeEditorService);

View file

@ -10,7 +10,7 @@ import { IInstantiationService, ServicesAccessor } from 'vs/platform/instantiati
import { IWorkbenchThemeService, IWorkbenchColorTheme } from 'vs/workbench/services/themes/common/workbenchThemeService';
import { IEditorService } from 'vs/workbench/services/editor/common/editorService';
import { EditorResourceAccessor } from 'vs/workbench/common/editor';
import { ITextMateService } from 'vs/workbench/services/textMate/browser/textMate';
import { ITextMateTokenizationFeature } from 'vs/workbench/services/textMate/browser/textMateTokenizationFeature';
import type { IGrammar, StateStack } from 'vscode-textmate';
import { TokenizationRegistry } from 'vs/editor/common/languages';
import { TokenMetadata } from 'vs/editor/common/encodedTokenAttributes';
@ -92,7 +92,7 @@ class Snapper {
constructor(
@ILanguageService private readonly languageService: ILanguageService,
@IWorkbenchThemeService private readonly themeService: IWorkbenchThemeService,
@ITextMateService private readonly textMateService: ITextMateService
@ITextMateTokenizationFeature private readonly textMateService: ITextMateTokenizationFeature
) {
}

View file

@ -1,460 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as nls from 'vs/nls';
import * as dom from 'vs/base/browser/dom';
import { Color } from 'vs/base/common/color';
import { onUnexpectedError } from 'vs/base/common/errors';
import { Emitter, Event } from 'vs/base/common/event';
import * as resources from 'vs/base/common/resources';
import * as types from 'vs/base/common/types';
import { equals as equalArray } from 'vs/base/common/arrays';
import { URI } from 'vs/base/common/uri';
import { IState, ITokenizationSupport, TokenizationRegistry, ITokenizationSupportFactory, TokenizationResult, EncodedTokenizationResult } from 'vs/editor/common/languages';
import { LanguageId, StandardTokenType } from 'vs/editor/common/encodedTokenAttributes';
import { nullTokenizeEncoded } from 'vs/editor/common/languages/nullTokenize';
import { generateTokensCSSForColorMap } from 'vs/editor/common/languages/supports/tokenization';
import { ILanguageService } from 'vs/editor/common/languages/language';
import { ILogService } from 'vs/platform/log/common/log';
import { INotificationService } from 'vs/platform/notification/common/notification';
import { ExtensionMessageCollector } from 'vs/workbench/services/extensions/common/extensionsRegistry';
import { ITMSyntaxExtensionPoint, grammarsExtPoint } from 'vs/workbench/services/textMate/common/TMGrammars';
import { ITextMateService } from 'vs/workbench/services/textMate/browser/textMate';
import { ITextMateThemingRule, IWorkbenchThemeService, IWorkbenchColorTheme } from 'vs/workbench/services/themes/common/workbenchThemeService';
import type { IGrammar, StateStack, IOnigLib, IRawTheme } from 'vscode-textmate';
import { Disposable, IDisposable, dispose } from 'vs/base/common/lifecycle';
import { IConfigurationService } from 'vs/platform/configuration/common/configuration';
import { IValidGrammarDefinition, IValidEmbeddedLanguagesMap, IValidTokenTypeMap } from 'vs/workbench/services/textMate/common/TMScopeRegistry';
import { missingTMGrammarErrorMessage, TMGrammarFactory } from 'vs/workbench/services/textMate/common/TMGrammarFactory';
import { IExtensionResourceLoaderService } from 'vs/platform/extensionResourceLoader/common/extensionResourceLoader';
import { IProgressService, ProgressLocation } from 'vs/platform/progress/common/progress';
import { TMTokenization } from 'vs/workbench/services/textMate/common/TMTokenization';
export abstract class AbstractTextMateService extends Disposable implements ITextMateService {
public _serviceBrand: undefined;
private readonly _onDidEncounterLanguage: Emitter<string> = this._register(new Emitter<string>());
public readonly onDidEncounterLanguage: Event<string> = this._onDidEncounterLanguage.event;
private readonly _styleElement: HTMLStyleElement;
private readonly _createdModes: string[];
private readonly _encounteredLanguages: boolean[];
private _debugMode: boolean;
private _debugModePrintFunc: (str: string) => void;
private _grammarDefinitions: IValidGrammarDefinition[] | null;
private _grammarFactory: TMGrammarFactory | null;
private _tokenizersRegistrations: IDisposable[];
protected _currentTheme: IRawTheme | null;
protected _currentTokenColorMap: string[] | null;
constructor(
@ILanguageService protected readonly _languageService: ILanguageService,
@IWorkbenchThemeService private readonly _themeService: IWorkbenchThemeService,
@IExtensionResourceLoaderService protected readonly _extensionResourceLoaderService: IExtensionResourceLoaderService,
@INotificationService private readonly _notificationService: INotificationService,
@ILogService private readonly _logService: ILogService,
@IConfigurationService private readonly _configurationService: IConfigurationService,
@IProgressService private readonly _progressService: IProgressService
) {
super();
this._styleElement = dom.createStyleSheet();
this._styleElement.className = 'vscode-tokens-styles';
this._createdModes = [];
this._encounteredLanguages = [];
this._debugMode = false;
this._debugModePrintFunc = () => { };
this._grammarDefinitions = null;
this._grammarFactory = null;
this._tokenizersRegistrations = [];
this._currentTheme = null;
this._currentTokenColorMap = null;
grammarsExtPoint.setHandler((extensions) => {
this._grammarDefinitions = null;
if (this._grammarFactory) {
this._grammarFactory.dispose();
this._grammarFactory = null;
this._onDidDisposeGrammarFactory();
}
this._tokenizersRegistrations = dispose(this._tokenizersRegistrations);
this._grammarDefinitions = [];
for (const extension of extensions) {
const grammars = extension.value;
for (const grammar of grammars) {
if (!this._validateGrammarExtensionPoint(extension.description.extensionLocation, grammar, extension.collector)) {
continue;
}
const grammarLocation = resources.joinPath(extension.description.extensionLocation, grammar.path);
const embeddedLanguages: IValidEmbeddedLanguagesMap = Object.create(null);
if (grammar.embeddedLanguages) {
const scopes = Object.keys(grammar.embeddedLanguages);
for (let i = 0, len = scopes.length; i < len; i++) {
const scope = scopes[i];
const language = grammar.embeddedLanguages[scope];
if (typeof language !== 'string') {
// never hurts to be too careful
continue;
}
if (this._languageService.isRegisteredLanguageId(language)) {
embeddedLanguages[scope] = this._languageService.languageIdCodec.encodeLanguageId(language);
}
}
}
const tokenTypes: IValidTokenTypeMap = Object.create(null);
if (grammar.tokenTypes) {
const scopes = Object.keys(grammar.tokenTypes);
for (const scope of scopes) {
const tokenType = grammar.tokenTypes[scope];
switch (tokenType) {
case 'string':
tokenTypes[scope] = StandardTokenType.String;
break;
case 'other':
tokenTypes[scope] = StandardTokenType.Other;
break;
case 'comment':
tokenTypes[scope] = StandardTokenType.Comment;
break;
}
}
}
let validLanguageId: string | null = null;
if (grammar.language && this._languageService.isRegisteredLanguageId(grammar.language)) {
validLanguageId = grammar.language;
}
function asStringArray(array: unknown, defaultValue: string[]): string[] {
if (!Array.isArray(array)) {
return defaultValue;
}
if (!array.every(e => typeof e === 'string')) {
return defaultValue;
}
return array;
}
this._grammarDefinitions.push({
location: grammarLocation,
language: validLanguageId ? validLanguageId : undefined,
scopeName: grammar.scopeName,
embeddedLanguages: embeddedLanguages,
tokenTypes: tokenTypes,
injectTo: grammar.injectTo,
balancedBracketSelectors: asStringArray(grammar.balancedBracketScopes, ['*']),
unbalancedBracketSelectors: asStringArray(grammar.unbalancedBracketScopes, []),
});
if (validLanguageId) {
this._tokenizersRegistrations.push(TokenizationRegistry.registerFactory(validLanguageId, this._createFactory(validLanguageId)));
}
}
}
for (const createMode of this._createdModes) {
TokenizationRegistry.getOrCreate(createMode);
}
});
this._updateTheme(this._grammarFactory, this._themeService.getColorTheme(), true);
this._register(this._themeService.onDidColorThemeChange(() => {
this._updateTheme(this._grammarFactory, this._themeService.getColorTheme(), false);
}));
this._languageService.onDidEncounterLanguage((languageId) => {
this._createdModes.push(languageId);
});
}
public startDebugMode(printFn: (str: string) => void, onStop: () => void): void {
if (this._debugMode) {
this._notificationService.error(nls.localize('alreadyDebugging', "Already Logging."));
return;
}
this._debugModePrintFunc = printFn;
this._debugMode = true;
if (this._debugMode) {
this._progressService.withProgress(
{
location: ProgressLocation.Notification,
buttons: [nls.localize('stop', "Stop")]
},
(progress) => {
progress.report({
message: nls.localize('progress1', "Preparing to log TM Grammar parsing. Press Stop when finished.")
});
return this._getVSCodeOniguruma().then((vscodeOniguruma) => {
vscodeOniguruma.setDefaultDebugCall(true);
progress.report({
message: nls.localize('progress2', "Now logging TM Grammar parsing. Press Stop when finished.")
});
return new Promise<void>((resolve, reject) => { });
});
},
(choice) => {
this._getVSCodeOniguruma().then((vscodeOniguruma) => {
this._debugModePrintFunc = () => { };
this._debugMode = false;
vscodeOniguruma.setDefaultDebugCall(false);
onStop();
});
}
);
}
}
private _canCreateGrammarFactory(): boolean {
// Check if extension point is ready
return (this._grammarDefinitions ? true : false);
}
private async _getOrCreateGrammarFactory(): Promise<TMGrammarFactory> {
if (this._grammarFactory) {
return this._grammarFactory;
}
const [vscodeTextmate, vscodeOniguruma] = await Promise.all([import('vscode-textmate'), this._getVSCodeOniguruma()]);
const onigLib: Promise<IOnigLib> = Promise.resolve({
createOnigScanner: (sources: string[]) => vscodeOniguruma.createOnigScanner(sources),
createOnigString: (str: string) => vscodeOniguruma.createOnigString(str)
});
// Avoid duplicate instantiations
if (this._grammarFactory) {
return this._grammarFactory;
}
this._grammarFactory = new TMGrammarFactory({
logTrace: (msg: string) => this._logService.trace(msg),
logError: (msg: string, err: any) => this._logService.error(msg, err),
readFile: (resource: URI) => this._extensionResourceLoaderService.readExtensionResource(resource)
}, this._grammarDefinitions || [], vscodeTextmate, onigLib);
this._onDidCreateGrammarFactory(this._grammarDefinitions || []);
this._updateTheme(this._grammarFactory, this._themeService.getColorTheme(), true);
return this._grammarFactory;
}
private _createFactory(languageId: string): ITokenizationSupportFactory {
return {
createTokenizationSupport: async (): Promise<ITokenizationSupport | null> => {
if (!this._languageService.isRegisteredLanguageId(languageId)) {
return null;
}
if (!this._canCreateGrammarFactory()) {
return null;
}
const encodedLanguageId = this._languageService.languageIdCodec.encodeLanguageId(languageId);
try {
const grammarFactory = await this._getOrCreateGrammarFactory();
if (!grammarFactory.has(languageId)) {
return null;
}
const r = await grammarFactory.createGrammar(languageId, encodedLanguageId);
if (!r.grammar) {
return null;
}
const tokenization = new TMTokenization(r.grammar, r.initialState, r.containsEmbeddedLanguages);
tokenization.onDidEncounterLanguage((encodedLanguageId) => {
if (!this._encounteredLanguages[encodedLanguageId]) {
const languageId = this._languageService.languageIdCodec.decodeLanguageId(encodedLanguageId);
this._encounteredLanguages[encodedLanguageId] = true;
this._onDidEncounterLanguage.fire(languageId);
}
});
return new TMTokenizationSupportWithLineLimit(languageId, encodedLanguageId, tokenization, this._configurationService);
} catch (err) {
if (err.message && err.message === missingTMGrammarErrorMessage) {
// Don't log this error message
return null;
}
onUnexpectedError(err);
return null;
}
}
};
}
private static _toColorMap(colorMap: string[]): Color[] {
const result: Color[] = [null!];
for (let i = 1, len = colorMap.length; i < len; i++) {
result[i] = Color.fromHex(colorMap[i]);
}
return result;
}
private _updateTheme(grammarFactory: TMGrammarFactory | null, colorTheme: IWorkbenchColorTheme, forceUpdate: boolean): void {
if (!forceUpdate && this._currentTheme && this._currentTokenColorMap && AbstractTextMateService.equalsTokenRules(this._currentTheme.settings, colorTheme.tokenColors) && equalArray(this._currentTokenColorMap, colorTheme.tokenColorMap)) {
return;
}
this._currentTheme = { name: colorTheme.label, settings: colorTheme.tokenColors };
this._currentTokenColorMap = colorTheme.tokenColorMap;
this._doUpdateTheme(grammarFactory, this._currentTheme, this._currentTokenColorMap);
}
protected _doUpdateTheme(grammarFactory: TMGrammarFactory | null, theme: IRawTheme, tokenColorMap: string[]): void {
grammarFactory?.setTheme(theme, tokenColorMap);
const colorMap = AbstractTextMateService._toColorMap(tokenColorMap);
const cssRules = generateTokensCSSForColorMap(colorMap);
this._styleElement.textContent = cssRules;
TokenizationRegistry.setColorMap(colorMap);
}
private static equalsTokenRules(a: ITextMateThemingRule[] | null, b: ITextMateThemingRule[] | null): boolean {
if (!b || !a || b.length !== a.length) {
return false;
}
for (let i = b.length - 1; i >= 0; i--) {
const r1 = b[i];
const r2 = a[i];
if (r1.scope !== r2.scope) {
return false;
}
const s1 = r1.settings;
const s2 = r2.settings;
if (s1 && s2) {
if (s1.fontStyle !== s2.fontStyle || s1.foreground !== s2.foreground || s1.background !== s2.background) {
return false;
}
} else if (!s1 || !s2) {
return false;
}
}
return true;
}
private _validateGrammarExtensionPoint(extensionLocation: URI, syntax: ITMSyntaxExtensionPoint, collector: ExtensionMessageCollector): boolean {
if (syntax.language && ((typeof syntax.language !== 'string') || !this._languageService.isRegisteredLanguageId(syntax.language))) {
collector.error(nls.localize('invalid.language', "Unknown language in `contributes.{0}.language`. Provided value: {1}", grammarsExtPoint.name, String(syntax.language)));
return false;
}
if (!syntax.scopeName || (typeof syntax.scopeName !== 'string')) {
collector.error(nls.localize('invalid.scopeName', "Expected string in `contributes.{0}.scopeName`. Provided value: {1}", grammarsExtPoint.name, String(syntax.scopeName)));
return false;
}
if (!syntax.path || (typeof syntax.path !== 'string')) {
collector.error(nls.localize('invalid.path.0', "Expected string in `contributes.{0}.path`. Provided value: {1}", grammarsExtPoint.name, String(syntax.path)));
return false;
}
if (syntax.injectTo && (!Array.isArray(syntax.injectTo) || syntax.injectTo.some(scope => typeof scope !== 'string'))) {
collector.error(nls.localize('invalid.injectTo', "Invalid value in `contributes.{0}.injectTo`. Must be an array of language scope names. Provided value: {1}", grammarsExtPoint.name, JSON.stringify(syntax.injectTo)));
return false;
}
if (syntax.embeddedLanguages && !types.isObject(syntax.embeddedLanguages)) {
collector.error(nls.localize('invalid.embeddedLanguages', "Invalid value in `contributes.{0}.embeddedLanguages`. Must be an object map from scope name to language. Provided value: {1}", grammarsExtPoint.name, JSON.stringify(syntax.embeddedLanguages)));
return false;
}
if (syntax.tokenTypes && !types.isObject(syntax.tokenTypes)) {
collector.error(nls.localize('invalid.tokenTypes', "Invalid value in `contributes.{0}.tokenTypes`. Must be an object map from scope name to token type. Provided value: {1}", grammarsExtPoint.name, JSON.stringify(syntax.tokenTypes)));
return false;
}
const grammarLocation = resources.joinPath(extensionLocation, syntax.path);
if (!resources.isEqualOrParent(grammarLocation, extensionLocation)) {
collector.warn(nls.localize('invalid.path.1', "Expected `contributes.{0}.path` ({1}) to be included inside extension's folder ({2}). This might make the extension non-portable.", grammarsExtPoint.name, grammarLocation.path, extensionLocation.path));
}
return true;
}
public async createGrammar(languageId: string): Promise<IGrammar | null> {
if (!this._languageService.isRegisteredLanguageId(languageId)) {
return null;
}
const grammarFactory = await this._getOrCreateGrammarFactory();
if (!grammarFactory.has(languageId)) {
return null;
}
const encodedLanguageId = this._languageService.languageIdCodec.encodeLanguageId(languageId);
const { grammar } = await grammarFactory.createGrammar(languageId, encodedLanguageId);
return grammar;
}
protected _onDidCreateGrammarFactory(grammarDefinitions: IValidGrammarDefinition[]): void {
}
protected _onDidDisposeGrammarFactory(): void {
}
private _vscodeOniguruma: Promise<typeof import('vscode-oniguruma')> | null = null;
private _getVSCodeOniguruma(): Promise<typeof import('vscode-oniguruma')> {
if (!this._vscodeOniguruma) {
this._vscodeOniguruma = this._doGetVSCodeOniguruma();
}
return this._vscodeOniguruma;
}
private async _doGetVSCodeOniguruma(): Promise<typeof import('vscode-oniguruma')> {
const [vscodeOniguruma, wasm] = await Promise.all([import('vscode-oniguruma'), this._loadVSCodeOnigurumWASM()]);
const options = {
data: wasm,
print: (str: string) => {
this._debugModePrintFunc(str);
}
};
await vscodeOniguruma.loadWASM(options);
return vscodeOniguruma;
}
protected abstract _loadVSCodeOnigurumWASM(): Promise<Response | ArrayBuffer>;
}
class TMTokenizationSupportWithLineLimit implements ITokenizationSupport {
private readonly _languageId: string;
private readonly _encodedLanguageId: LanguageId;
private readonly _actual: TMTokenization;
private _maxTokenizationLineLength: number;
constructor(
languageId: string,
encodedLanguageId: LanguageId,
actual: TMTokenization,
@IConfigurationService private readonly _configurationService: IConfigurationService,
) {
this._languageId = languageId;
this._encodedLanguageId = encodedLanguageId;
this._actual = actual;
this._maxTokenizationLineLength = this._configurationService.getValue<number>('editor.maxTokenizationLineLength', {
overrideIdentifier: this._languageId
});
this._configurationService.onDidChangeConfiguration(e => {
if (e.affectsConfiguration('editor.maxTokenizationLineLength')) {
this._maxTokenizationLineLength = this._configurationService.getValue<number>('editor.maxTokenizationLineLength', {
overrideIdentifier: this._languageId
});
}
});
}
getInitialState(): IState {
return this._actual.getInitialState();
}
tokenize(line: string, hasEOL: boolean, state: IState): TokenizationResult {
throw new Error('Not supported!');
}
tokenizeEncoded(line: string, hasEOL: boolean, state: StateStack): EncodedTokenizationResult {
// Do not attempt to tokenize if a line is too long
if (line.length >= this._maxTokenizationLineLength) {
return nullTokenizeEncoded(this._encodedLanguageId, state);
}
return this._actual.tokenizeEncoded(line, hasEOL, state);
}
}

View file

@ -0,0 +1,95 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { compareBy, numberComparator } from 'vs/base/common/arrays';
import { ContiguousGrowingArray } from 'vs/editor/common/model/textModelTokens';
export class ArrayEdit {
public readonly edits: readonly SingleArrayEdit[];
constructor(
/**
* Disjoint edits that are applied in parallel
*/
edits: readonly SingleArrayEdit[]
) {
this.edits = edits.slice().sort(compareBy(c => c.offset, numberComparator));
}
applyTo(array: ContiguousGrowingArray<any>): void {
for (let i = this.edits.length - 1; i >= 0; i--) {
const c = this.edits[i];
array.delete(c.offset, c.length);
array.insert(c.offset, c.newLength);
}
}
}
export class SingleArrayEdit {
constructor(
public readonly offset: number,
public readonly length: number,
public readonly newLength: number,
) { }
toString() {
return `[${this.offset}, +${this.length}) -> +${this.newLength}}`;
}
}
export interface IIndexTransformer {
transform(index: number): number | undefined;
}
export class MonotonousIndexTransformer implements IIndexTransformer {
public static fromMany(transformations: ArrayEdit[]): IIndexTransformer {
// TODO improve performance by combining transformations first
const transformers = transformations.map(t => new MonotonousIndexTransformer(t));
return new CombinedIndexTransformer(transformers);
}
private idx = 0;
private offset = 0;
constructor(private readonly transformation: ArrayEdit) {
}
/**
* Precondition: index >= previous-value-of(index).
*/
transform(index: number): number | undefined {
let nextChange = this.transformation.edits[this.idx] as SingleArrayEdit | undefined;
while (nextChange && nextChange.offset + nextChange.length <= index) {
this.offset += nextChange.newLength - nextChange.length;
this.idx++;
nextChange = this.transformation.edits[this.idx];
}
// assert nextChange === undefined || index < nextChange.offset + nextChange.length
if (nextChange && nextChange.offset <= index) {
// Offset is touched by the change
return undefined;
}
return index + this.offset;
}
}
export class CombinedIndexTransformer implements IIndexTransformer {
constructor(
private readonly transformers: IIndexTransformer[]
) { }
transform(index: number): number | undefined {
for (const transformer of this.transformers) {
const result = transformer.transform(index);
if (result === undefined) {
return undefined;
}
index = result;
}
return index;
}
}

View file

@ -1,21 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { ITextMateService } from 'vs/workbench/services/textMate/browser/textMate';
import { InstantiationType, registerSingleton } from 'vs/platform/instantiation/common/extensions';
import { AbstractTextMateService } from 'vs/workbench/services/textMate/browser/abstractTextMateService';
import { FileAccess } from 'vs/base/common/network';
export class TextMateService extends AbstractTextMateService {
protected async _loadVSCodeOnigurumWASM(): Promise<Response | ArrayBuffer> {
const response = await fetch(FileAccess.asBrowserUri('vscode-oniguruma/../onig.wasm').toString(true));
// Using the response directly only works if the server sets the MIME type 'application/wasm'.
// Otherwise, a TypeError is thrown when using the streaming compiler.
// We therefore use the non-streaming compiler :(.
return await response.arrayBuffer();
}
}
registerSingleton(ITextMateService, TextMateService, InstantiationType.Eager);

View file

@ -1,261 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { ITextMateService } from 'vs/workbench/services/textMate/browser/textMate';
import { InstantiationType, registerSingleton } from 'vs/platform/instantiation/common/extensions';
import { AbstractTextMateService } from 'vs/workbench/services/textMate/browser/abstractTextMateService';
import { ILanguageService } from 'vs/editor/common/languages/language';
import { IWorkbenchThemeService } from 'vs/workbench/services/themes/common/workbenchThemeService';
import { INotificationService } from 'vs/platform/notification/common/notification';
import { ILogService } from 'vs/platform/log/common/log';
import { IConfigurationService } from 'vs/platform/configuration/common/configuration';
import { createWebWorker, MonacoWebWorker } from 'vs/editor/browser/services/webWorker';
import { IModelService } from 'vs/editor/common/services/model';
import type { IRawTheme } from 'vscode-textmate';
import { IValidGrammarDefinition } from 'vs/workbench/services/textMate/common/TMScopeRegistry';
import { ICreateData, TextMateWorker } from 'vs/workbench/services/textMate/browser/textMateWorker';
import { ITextModel } from 'vs/editor/common/model';
import { Disposable } from 'vs/base/common/lifecycle';
import { UriComponents, URI } from 'vs/base/common/uri';
import { ContiguousMultilineTokensBuilder } from 'vs/editor/common/tokens/contiguousMultilineTokensBuilder';
import { TMGrammarFactory } from 'vs/workbench/services/textMate/common/TMGrammarFactory';
import { IModelContentChangedEvent } from 'vs/editor/common/textModelEvents';
import { IExtensionResourceLoaderService } from 'vs/platform/extensionResourceLoader/common/extensionResourceLoader';
import { IWorkbenchEnvironmentService } from 'vs/workbench/services/environment/common/environmentService';
import { IProgressService } from 'vs/platform/progress/common/progress';
import { FileAccess, nodeModulesAsarUnpackedPath, nodeModulesPath } from 'vs/base/common/network';
import { ILanguageIdCodec } from 'vs/editor/common/languages';
import { ILanguageConfigurationService } from 'vs/editor/common/languages/languageConfigurationRegistry';
const RUN_TEXTMATE_IN_WORKER = false;
class ModelWorkerTextMateTokenizer extends Disposable {
private readonly _worker: TextMateWorker;
private readonly _languageIdCodec: ILanguageIdCodec;
private readonly _model: ITextModel;
private _isSynced: boolean;
private _pendingChanges: IModelContentChangedEvent[] = [];
constructor(worker: TextMateWorker, languageIdCodec: ILanguageIdCodec, model: ITextModel) {
super();
this._worker = worker;
this._languageIdCodec = languageIdCodec;
this._model = model;
this._isSynced = false;
this._register(this._model.onDidChangeAttached(() => this._onDidChangeAttached()));
this._onDidChangeAttached();
this._register(this._model.onDidChangeContent((e) => {
if (this._isSynced) {
this._worker.acceptModelChanged(this._model.uri.toString(), e);
this._pendingChanges.push(e);
}
}));
this._register(this._model.onDidChangeLanguage((e) => {
if (this._isSynced) {
const languageId = this._model.getLanguageId();
const encodedLanguageId = this._languageIdCodec.encodeLanguageId(languageId);
this._worker.acceptModelLanguageChanged(this._model.uri.toString(), languageId, encodedLanguageId);
}
}));
}
private _onDidChangeAttached(): void {
if (this._model.isAttachedToEditor()) {
if (!this._isSynced) {
this._beginSync();
}
} else {
if (this._isSynced) {
this._endSync();
}
}
}
private _beginSync(): void {
this._isSynced = true;
const languageId = this._model.getLanguageId();
const encodedLanguageId = this._languageIdCodec.encodeLanguageId(languageId);
this._worker.acceptNewModel({
uri: this._model.uri,
versionId: this._model.getVersionId(),
lines: this._model.getLinesContent(),
EOL: this._model.getEOL(),
languageId,
encodedLanguageId
});
}
private _endSync(): void {
this._isSynced = false;
this._worker.acceptRemovedModel(this._model.uri.toString());
}
public override dispose() {
super.dispose();
this._endSync();
}
private _confirm(versionId: number): void {
while (this._pendingChanges.length > 0 && this._pendingChanges[0].versionId <= versionId) {
this._pendingChanges.shift();
}
}
public setTokens(versionId: number, rawTokens: ArrayBuffer): void {
this._confirm(versionId);
const tokens = ContiguousMultilineTokensBuilder.deserialize(new Uint8Array(rawTokens));
for (let i = 0; i < this._pendingChanges.length; i++) {
const change = this._pendingChanges[i];
for (let j = 0; j < tokens.length; j++) {
for (let k = 0; k < change.changes.length; k++) {
tokens[j].applyEdit(change.changes[k].range, change.changes[k].text);
}
}
}
this._model.tokenization.setTokens(tokens);
}
}
export class TextMateWorkerHost {
constructor(
private readonly textMateService: TextMateService,
@IExtensionResourceLoaderService private readonly _extensionResourceLoaderService: IExtensionResourceLoaderService
) {
}
async readFile(_resource: UriComponents): Promise<string> {
const resource = URI.revive(_resource);
return this._extensionResourceLoaderService.readExtensionResource(resource);
}
async setTokens(_resource: UriComponents, versionId: number, tokens: Uint8Array): Promise<void> {
const resource = URI.revive(_resource);
this.textMateService.setTokens(resource, versionId, tokens);
}
}
export class TextMateService extends AbstractTextMateService {
private _worker: MonacoWebWorker<TextMateWorker> | null;
private _workerProxy: TextMateWorker | null;
private _tokenizers: { [uri: string]: ModelWorkerTextMateTokenizer };
constructor(
@ILanguageService languageService: ILanguageService,
@IWorkbenchThemeService themeService: IWorkbenchThemeService,
@IExtensionResourceLoaderService extensionResourceLoaderService: IExtensionResourceLoaderService,
@INotificationService notificationService: INotificationService,
@ILogService logService: ILogService,
@IConfigurationService configurationService: IConfigurationService,
@IProgressService progressService: IProgressService,
@IModelService private readonly _modelService: IModelService,
@IWorkbenchEnvironmentService private readonly _environmentService: IWorkbenchEnvironmentService,
@ILanguageConfigurationService private readonly _languageConfigurationService: ILanguageConfigurationService,
) {
super(languageService, themeService, extensionResourceLoaderService, notificationService, logService, configurationService, progressService);
this._worker = null;
this._workerProxy = null;
this._tokenizers = Object.create(null);
this._register(this._modelService.onModelAdded(model => this._onModelAdded(model)));
this._register(this._modelService.onModelRemoved(model => this._onModelRemoved(model)));
this._modelService.getModels().forEach((model) => this._onModelAdded(model));
}
private _onModelAdded(model: ITextModel): void {
if (!this._workerProxy) {
return;
}
if (model.isTooLargeForSyncing()) {
return;
}
const key = model.uri.toString();
const tokenizer = new ModelWorkerTextMateTokenizer(this._workerProxy, this._languageService.languageIdCodec, model);
this._tokenizers[key] = tokenizer;
}
private _onModelRemoved(model: ITextModel): void {
const key = model.uri.toString();
if (this._tokenizers[key]) {
this._tokenizers[key].dispose();
delete this._tokenizers[key];
}
}
protected async _loadVSCodeOnigurumWASM(): Promise<Response | ArrayBuffer> {
const response = await fetch(this._environmentService.isBuilt
? FileAccess.asBrowserUri(`${nodeModulesAsarUnpackedPath}/vscode-oniguruma/release/onig.wasm`).toString(true)
: FileAccess.asBrowserUri(`${nodeModulesPath}/vscode-oniguruma/release/onig.wasm`).toString(true));
return response;
}
protected override _onDidCreateGrammarFactory(grammarDefinitions: IValidGrammarDefinition[]): void {
this._killWorker();
if (RUN_TEXTMATE_IN_WORKER) {
const workerHost = new TextMateWorkerHost(this, this._extensionResourceLoaderService);
const createData: ICreateData = { grammarDefinitions };
const worker = createWebWorker<TextMateWorker>(this._modelService, this._languageConfigurationService, {
createData,
label: 'textMateWorker',
moduleId: 'vs/workbench/services/textMate/browser/textMateWorker',
host: workerHost
});
this._worker = worker;
worker.getProxy().then((proxy) => {
if (this._worker !== worker) {
// disposed in the meantime
return;
}
this._workerProxy = proxy;
if (this._currentTheme && this._currentTokenColorMap) {
this._workerProxy.acceptTheme(this._currentTheme, this._currentTokenColorMap);
}
this._modelService.getModels().forEach((model) => this._onModelAdded(model));
});
}
}
protected override _doUpdateTheme(grammarFactory: TMGrammarFactory | null, theme: IRawTheme, colorMap: string[]): void {
super._doUpdateTheme(grammarFactory, theme, colorMap);
if (this._currentTheme && this._currentTokenColorMap && this._workerProxy) {
this._workerProxy.acceptTheme(this._currentTheme, this._currentTokenColorMap);
}
}
protected override _onDidDisposeGrammarFactory(): void {
this._killWorker();
}
private _killWorker(): void {
for (const key of Object.keys(this._tokenizers)) {
this._tokenizers[key].dispose();
}
this._tokenizers = Object.create(null);
if (this._worker) {
this._worker.dispose();
this._worker = null;
}
this._workerProxy = null;
}
setTokens(resource: URI, versionId: number, tokens: ArrayBuffer): void {
const key = resource.toString();
if (!this._tokenizers[key]) {
return;
}
this._tokenizers[key].setTokens(versionId, tokens);
}
}
registerSingleton(ITextMateService, TextMateService, InstantiationType.Eager);

View file

@ -0,0 +1,10 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { registerSingleton, InstantiationType } from 'vs/platform/instantiation/common/extensions';
import { ITextMateTokenizationFeature } from 'vs/workbench/services/textMate/browser/textMateTokenizationFeature';
import { TextMateTokenizationFeature } from 'vs/workbench/services/textMate/browser/textMateTokenizationFeatureImpl';
registerSingleton(ITextMateTokenizationFeature, TextMateTokenizationFeature, InstantiationType.Eager);

View file

@ -7,9 +7,9 @@ import { Event } from 'vs/base/common/event';
import { createDecorator } from 'vs/platform/instantiation/common/instantiation';
import type { IGrammar } from 'vscode-textmate';
export const ITextMateService = createDecorator<ITextMateService>('textMateService');
export const ITextMateTokenizationFeature = createDecorator<ITextMateTokenizationFeature>('textMateTokenizationFeature');
export interface ITextMateService {
export interface ITextMateTokenizationFeature {
readonly _serviceBrand: undefined;
onDidEncounterLanguage: Event<string>;

View file

@ -0,0 +1,430 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as dom from 'vs/base/browser/dom';
import { equals as equalArray } from 'vs/base/common/arrays';
import { Color } from 'vs/base/common/color';
import { onUnexpectedError } from 'vs/base/common/errors';
import { Emitter, Event } from 'vs/base/common/event';
import { Disposable, DisposableStore } from 'vs/base/common/lifecycle';
import { FileAccess, nodeModulesAsarUnpackedPath, nodeModulesPath } from 'vs/base/common/network';
import { isWeb } from 'vs/base/common/platform';
import * as resources from 'vs/base/common/resources';
import * as types from 'vs/base/common/types';
import { URI } from 'vs/base/common/uri';
import { StandardTokenType } from 'vs/editor/common/encodedTokenAttributes';
import { ITokenizationSupport, TokenizationRegistry } from 'vs/editor/common/languages';
import { ILanguageService } from 'vs/editor/common/languages/language';
import { generateTokensCSSForColorMap } from 'vs/editor/common/languages/supports/tokenization';
import * as nls from 'vs/nls';
import { IConfigurationService } from 'vs/platform/configuration/common/configuration';
import { IExtensionResourceLoaderService } from 'vs/platform/extensionResourceLoader/common/extensionResourceLoader';
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
import { ILogService } from 'vs/platform/log/common/log';
import { INotificationService } from 'vs/platform/notification/common/notification';
import { IProgressService, ProgressLocation } from 'vs/platform/progress/common/progress';
import { IWorkbenchEnvironmentService } from 'vs/workbench/services/environment/common/environmentService';
import { ExtensionMessageCollector, IExtensionPointUser } from 'vs/workbench/services/extensions/common/extensionsRegistry';
import { ITextMateTokenizationFeature } from 'vs/workbench/services/textMate/browser/textMateTokenizationFeature';
import { TextMateTokenizationSupport } from 'vs/workbench/services/textMate/browser/tokenizationSupport/textMateTokenizationSupport';
import { TokenizationSupportWithLineLimit } from 'vs/workbench/services/textMate/browser/tokenizationSupport/tokenizationSupportWithLineLimit';
import { TextMateWorkerHost } from 'vs/workbench/services/textMate/browser/workerHost/textMateWorkerHost';
import { missingTMGrammarErrorMessage, TMGrammarFactory } from 'vs/workbench/services/textMate/common/TMGrammarFactory';
import { grammarsExtPoint, ITMSyntaxExtensionPoint } from 'vs/workbench/services/textMate/common/TMGrammars';
import { IValidEmbeddedLanguagesMap, IValidGrammarDefinition, IValidTokenTypeMap } from 'vs/workbench/services/textMate/common/TMScopeRegistry';
import { ITextMateThemingRule, IWorkbenchColorTheme, IWorkbenchThemeService } from 'vs/workbench/services/themes/common/workbenchThemeService';
import type { IGrammar, IOnigLib, IRawTheme } from 'vscode-textmate';
export class TextMateTokenizationFeature extends Disposable implements ITextMateTokenizationFeature {
public _serviceBrand: undefined;
private readonly _onDidEncounterLanguage: Emitter<string> = this._register(new Emitter<string>());
public readonly onDidEncounterLanguage: Event<string> = this._onDidEncounterLanguage.event;
private readonly _styleElement: HTMLStyleElement;
private readonly _createdModes: string[] = [];
private readonly _encounteredLanguages: boolean[] = [];
private _debugMode: boolean = false;
private _debugModePrintFunc: (str: string) => void = () => { };
private _grammarDefinitions: IValidGrammarDefinition[] | null = null;
private _grammarFactory: TMGrammarFactory | null = null;
private readonly _tokenizersRegistrations = new DisposableStore();
private _currentTheme: IRawTheme | null = null;
private _currentTokenColorMap: string[] | null = null;
private readonly _workerHost = this._instantiationService.createInstance(TextMateWorkerHost);
constructor(
@ILanguageService private readonly _languageService: ILanguageService,
@IWorkbenchThemeService private readonly _themeService: IWorkbenchThemeService,
@IExtensionResourceLoaderService private readonly _extensionResourceLoaderService: IExtensionResourceLoaderService,
@INotificationService private readonly _notificationService: INotificationService,
@ILogService private readonly _logService: ILogService,
@IConfigurationService private readonly _configurationService: IConfigurationService,
@IProgressService private readonly _progressService: IProgressService,
@IWorkbenchEnvironmentService private readonly _environmentService: IWorkbenchEnvironmentService,
@IInstantiationService private readonly _instantiationService: IInstantiationService,
) {
super();
this._styleElement = dom.createStyleSheet();
this._styleElement.className = 'vscode-tokens-styles';
grammarsExtPoint.setHandler((extensions) => this.handleGrammarsExtPoint(extensions));
this._updateTheme(this._themeService.getColorTheme(), true);
this._register(this._themeService.onDidColorThemeChange(() => {
this._updateTheme(this._themeService.getColorTheme(), false);
}));
this._languageService.onDidEncounterLanguage((languageId) => {
this._createdModes.push(languageId);
});
}
private handleGrammarsExtPoint(extensions: readonly IExtensionPointUser<ITMSyntaxExtensionPoint[]>[]): void {
this._grammarDefinitions = null;
if (this._grammarFactory) {
this._grammarFactory.dispose();
this._grammarFactory = null;
}
this._tokenizersRegistrations.clear();
this._grammarDefinitions = [];
for (const extension of extensions) {
const grammars = extension.value;
for (const grammar of grammars) {
const def = this.createValidGrammarDefinition(extension, grammar);
if (def) {
this._grammarDefinitions.push(def);
if (def.language) {
this._tokenizersRegistrations.add(TokenizationRegistry.registerFactory(def.language, {
createTokenizationSupport: async (): Promise<ITokenizationSupport | null> => this.createTokenizationSupport(def.language!)
}));
}
}
}
}
this._workerHost.setGrammarDefinitions(this._grammarDefinitions);
for (const createdMode of this._createdModes) {
TokenizationRegistry.getOrCreate(createdMode);
}
}
private createValidGrammarDefinition(extension: IExtensionPointUser<ITMSyntaxExtensionPoint[]>, grammar: ITMSyntaxExtensionPoint): IValidGrammarDefinition | null {
if (!validateGrammarExtensionPoint(extension.description.extensionLocation, grammar, extension.collector, this._languageService)) {
return null;
}
const grammarLocation = resources.joinPath(extension.description.extensionLocation, grammar.path);
const embeddedLanguages: IValidEmbeddedLanguagesMap = Object.create(null);
if (grammar.embeddedLanguages) {
const scopes = Object.keys(grammar.embeddedLanguages);
for (let i = 0, len = scopes.length; i < len; i++) {
const scope = scopes[i];
const language = grammar.embeddedLanguages[scope];
if (typeof language !== 'string') {
// never hurts to be too careful
continue;
}
if (this._languageService.isRegisteredLanguageId(language)) {
embeddedLanguages[scope] = this._languageService.languageIdCodec.encodeLanguageId(language);
}
}
}
const tokenTypes: IValidTokenTypeMap = Object.create(null);
if (grammar.tokenTypes) {
const scopes = Object.keys(grammar.tokenTypes);
for (const scope of scopes) {
const tokenType = grammar.tokenTypes[scope];
switch (tokenType) {
case 'string':
tokenTypes[scope] = StandardTokenType.String;
break;
case 'other':
tokenTypes[scope] = StandardTokenType.Other;
break;
case 'comment':
tokenTypes[scope] = StandardTokenType.Comment;
break;
}
}
}
let validLanguageId: string | null = null;
if (grammar.language && this._languageService.isRegisteredLanguageId(grammar.language)) {
validLanguageId = grammar.language;
}
function asStringArray(array: unknown, defaultValue: string[]): string[] {
if (!Array.isArray(array)) {
return defaultValue;
}
if (!array.every(e => typeof e === 'string')) {
return defaultValue;
}
return array;
}
return {
location: grammarLocation,
language: validLanguageId || undefined,
scopeName: grammar.scopeName,
embeddedLanguages: embeddedLanguages,
tokenTypes: tokenTypes,
injectTo: grammar.injectTo,
balancedBracketSelectors: asStringArray(grammar.balancedBracketScopes, ['*']),
unbalancedBracketSelectors: asStringArray(grammar.unbalancedBracketScopes, []),
};
}
public startDebugMode(printFn: (str: string) => void, onStop: () => void): void {
if (this._debugMode) {
this._notificationService.error(nls.localize('alreadyDebugging', "Already Logging."));
return;
}
this._debugModePrintFunc = printFn;
this._debugMode = true;
if (this._debugMode) {
this._progressService.withProgress(
{
location: ProgressLocation.Notification,
buttons: [nls.localize('stop', "Stop")]
},
(progress) => {
progress.report({
message: nls.localize('progress1', "Preparing to log TM Grammar parsing. Press Stop when finished.")
});
return this._getVSCodeOniguruma().then((vscodeOniguruma) => {
vscodeOniguruma.setDefaultDebugCall(true);
progress.report({
message: nls.localize('progress2', "Now logging TM Grammar parsing. Press Stop when finished.")
});
return new Promise<void>((resolve, reject) => { });
});
},
(choice) => {
this._getVSCodeOniguruma().then((vscodeOniguruma) => {
this._debugModePrintFunc = () => { };
this._debugMode = false;
vscodeOniguruma.setDefaultDebugCall(false);
onStop();
});
}
);
}
}
private _canCreateGrammarFactory(): boolean {
// Check if extension point is ready
return !!this._grammarDefinitions;
}
private async _getOrCreateGrammarFactory(): Promise<TMGrammarFactory> {
if (this._grammarFactory) {
return this._grammarFactory;
}
const [vscodeTextmate, vscodeOniguruma] = await Promise.all([import('vscode-textmate'), this._getVSCodeOniguruma()]);
const onigLib: Promise<IOnigLib> = Promise.resolve({
createOnigScanner: (sources: string[]) => vscodeOniguruma.createOnigScanner(sources),
createOnigString: (str: string) => vscodeOniguruma.createOnigString(str)
});
// Avoid duplicate instantiations
if (this._grammarFactory) {
return this._grammarFactory;
}
this._grammarFactory = new TMGrammarFactory({
logTrace: (msg: string) => this._logService.trace(msg),
logError: (msg: string, err: any) => this._logService.error(msg, err),
readFile: (resource: URI) => this._extensionResourceLoaderService.readExtensionResource(resource)
}, this._grammarDefinitions || [], vscodeTextmate, onigLib);
this._updateTheme(this._themeService.getColorTheme(), true);
return this._grammarFactory;
}
private async createTokenizationSupport(languageId: string): Promise<ITokenizationSupport | null> {
if (!this._languageService.isRegisteredLanguageId(languageId)) {
return null;
}
if (!this._canCreateGrammarFactory()) {
return null;
}
try {
const grammarFactory = await this._getOrCreateGrammarFactory();
if (!grammarFactory.has(languageId)) {
return null;
}
const encodedLanguageId = this._languageService.languageIdCodec.encodeLanguageId(languageId);
const r = await grammarFactory.createGrammar(languageId, encodedLanguageId);
if (!r.grammar) {
return null;
}
const tokenization = new TextMateTokenizationSupport(
r.grammar,
r.initialState,
r.containsEmbeddedLanguages,
(textModel, tokenStore) => this._workerHost.createBackgroundTokenizer(textModel, tokenStore),
);
tokenization.onDidEncounterLanguage((encodedLanguageId) => {
if (!this._encounteredLanguages[encodedLanguageId]) {
const languageId = this._languageService.languageIdCodec.decodeLanguageId(encodedLanguageId);
this._encounteredLanguages[encodedLanguageId] = true;
this._onDidEncounterLanguage.fire(languageId);
}
});
return new TokenizationSupportWithLineLimit(languageId, encodedLanguageId, tokenization, this._configurationService);
} catch (err) {
if (err.message && err.message === missingTMGrammarErrorMessage) {
// Don't log this error message
return null;
}
onUnexpectedError(err);
return null;
}
}
private _updateTheme(colorTheme: IWorkbenchColorTheme, forceUpdate: boolean): void {
if (!forceUpdate && this._currentTheme && this._currentTokenColorMap && equalsTokenRules(this._currentTheme.settings, colorTheme.tokenColors)
&& equalArray(this._currentTokenColorMap, colorTheme.tokenColorMap)) {
return;
}
this._currentTheme = { name: colorTheme.label, settings: colorTheme.tokenColors };
this._currentTokenColorMap = colorTheme.tokenColorMap;
this._grammarFactory?.setTheme(this._currentTheme, this._currentTokenColorMap);
const colorMap = toColorMap(this._currentTokenColorMap);
const cssRules = generateTokensCSSForColorMap(colorMap);
this._styleElement.textContent = cssRules;
TokenizationRegistry.setColorMap(colorMap);
if (this._currentTheme && this._currentTokenColorMap) {
this._workerHost.acceptTheme(this._currentTheme, this._currentTokenColorMap);
}
}
public async createGrammar(languageId: string): Promise<IGrammar | null> {
if (!this._languageService.isRegisteredLanguageId(languageId)) {
return null;
}
const grammarFactory = await this._getOrCreateGrammarFactory();
if (!grammarFactory.has(languageId)) {
return null;
}
const encodedLanguageId = this._languageService.languageIdCodec.encodeLanguageId(languageId);
const { grammar } = await grammarFactory.createGrammar(languageId, encodedLanguageId);
return grammar;
}
private _vscodeOniguruma: Promise<typeof import('vscode-oniguruma')> | null = null;
private _getVSCodeOniguruma(): Promise<typeof import('vscode-oniguruma')> {
if (!this._vscodeOniguruma) {
this._vscodeOniguruma = (async () => {
const [vscodeOniguruma, wasm] = await Promise.all([import('vscode-oniguruma'), this._loadVSCodeOnigurumaWASM()]);
await vscodeOniguruma.loadWASM({
data: wasm,
print: (str: string) => {
this._debugModePrintFunc(str);
}
});
return vscodeOniguruma;
})();
}
return this._vscodeOniguruma;
}
private async _loadVSCodeOnigurumaWASM(): Promise<Response | ArrayBuffer> {
if (isWeb) {
const response = await fetch(FileAccess.asBrowserUri('vscode-oniguruma/../onig.wasm').toString(true));
// Using the response directly only works if the server sets the MIME type 'application/wasm'.
// Otherwise, a TypeError is thrown when using the streaming compiler.
// We therefore use the non-streaming compiler :(.
return await response.arrayBuffer();
} else {
const response = await fetch(this._environmentService.isBuilt
? FileAccess.asBrowserUri(`${nodeModulesAsarUnpackedPath}/vscode-oniguruma/release/onig.wasm`).toString(true)
: FileAccess.asBrowserUri(`${nodeModulesPath}/vscode-oniguruma/release/onig.wasm`).toString(true));
return response;
}
}
}
function toColorMap(colorMap: string[]): Color[] {
const result: Color[] = [null!];
for (let i = 1, len = colorMap.length; i < len; i++) {
result[i] = Color.fromHex(colorMap[i]);
}
return result;
}
function equalsTokenRules(a: ITextMateThemingRule[] | null, b: ITextMateThemingRule[] | null): boolean {
if (!b || !a || b.length !== a.length) {
return false;
}
for (let i = b.length - 1; i >= 0; i--) {
const r1 = b[i];
const r2 = a[i];
if (r1.scope !== r2.scope) {
return false;
}
const s1 = r1.settings;
const s2 = r2.settings;
if (s1 && s2) {
if (s1.fontStyle !== s2.fontStyle || s1.foreground !== s2.foreground || s1.background !== s2.background) {
return false;
}
} else if (!s1 || !s2) {
return false;
}
}
return true;
}
function validateGrammarExtensionPoint(extensionLocation: URI, syntax: ITMSyntaxExtensionPoint, collector: ExtensionMessageCollector, _languageService: ILanguageService): boolean {
if (syntax.language && ((typeof syntax.language !== 'string') || !_languageService.isRegisteredLanguageId(syntax.language))) {
collector.error(nls.localize('invalid.language', "Unknown language in `contributes.{0}.language`. Provided value: {1}", grammarsExtPoint.name, String(syntax.language)));
return false;
}
if (!syntax.scopeName || (typeof syntax.scopeName !== 'string')) {
collector.error(nls.localize('invalid.scopeName', "Expected string in `contributes.{0}.scopeName`. Provided value: {1}", grammarsExtPoint.name, String(syntax.scopeName)));
return false;
}
if (!syntax.path || (typeof syntax.path !== 'string')) {
collector.error(nls.localize('invalid.path.0', "Expected string in `contributes.{0}.path`. Provided value: {1}", grammarsExtPoint.name, String(syntax.path)));
return false;
}
if (syntax.injectTo && (!Array.isArray(syntax.injectTo) || syntax.injectTo.some(scope => typeof scope !== 'string'))) {
collector.error(nls.localize('invalid.injectTo', "Invalid value in `contributes.{0}.injectTo`. Must be an array of language scope names. Provided value: {1}", grammarsExtPoint.name, JSON.stringify(syntax.injectTo)));
return false;
}
if (syntax.embeddedLanguages && !types.isObject(syntax.embeddedLanguages)) {
collector.error(nls.localize('invalid.embeddedLanguages', "Invalid value in `contributes.{0}.embeddedLanguages`. Must be an object map from scope name to language. Provided value: {1}", grammarsExtPoint.name, JSON.stringify(syntax.embeddedLanguages)));
return false;
}
if (syntax.tokenTypes && !types.isObject(syntax.tokenTypes)) {
collector.error(nls.localize('invalid.tokenTypes', "Invalid value in `contributes.{0}.tokenTypes`. Must be an object map from scope name to token type. Provided value: {1}", grammarsExtPoint.name, JSON.stringify(syntax.tokenTypes)));
return false;
}
const grammarLocation = resources.joinPath(extensionLocation, syntax.path);
if (!resources.isEqualOrParent(grammarLocation, extensionLocation)) {
collector.warn(nls.localize('invalid.path.1', "Expected `contributes.{0}.path` ({1}) to be included inside extension's folder ({2}). This might make the extension non-portable.", grammarsExtPoint.name, grammarLocation.path, extensionLocation.path));
}
return true;
}

View file

@ -1,234 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { IWorkerContext } from 'vs/editor/common/services/editorSimpleWorker';
import { UriComponents, URI } from 'vs/base/common/uri';
import { LanguageId } from 'vs/editor/common/encodedTokenAttributes';
import { IValidEmbeddedLanguagesMap, IValidTokenTypeMap, IValidGrammarDefinition } from 'vs/workbench/services/textMate/common/TMScopeRegistry';
import { TMGrammarFactory, ICreateGrammarResult } from 'vs/workbench/services/textMate/common/TMGrammarFactory';
import { IModelChangedEvent, MirrorTextModel } from 'vs/editor/common/model/mirrorTextModel';
import { TextMateWorkerHost } from 'vs/workbench/services/textMate/browser/nativeTextMateService';
import { TokenizationStateStore } from 'vs/editor/common/model/textModelTokens';
import type { IRawTheme, IOnigLib } from 'vscode-textmate';
import { ContiguousMultilineTokensBuilder } from 'vs/editor/common/tokens/contiguousMultilineTokensBuilder';
import { countEOL } from 'vs/editor/common/core/eolCounter';
import { LineTokens } from 'vs/editor/common/tokens/lineTokens';
import { AppResourcePath, FileAccess, nodeModulesAsarPath, nodeModulesPath } from 'vs/base/common/network';
import { TMTokenization } from 'vs/workbench/services/textMate/common/TMTokenization';
const textmateModuleLocation: AppResourcePath = `${nodeModulesPath}/vscode-textmate`;
const textmateModuleLocationAsar: AppResourcePath = `${nodeModulesAsarPath}/vscode-textmate`;
const onigurumaModuleLocation: AppResourcePath = `${nodeModulesPath}/vscode-oniguruma`;
const onigurumaModuleLocationAsar: AppResourcePath = `${nodeModulesAsarPath}/vscode-oniguruma`;
export interface IValidGrammarDefinitionDTO {
location: UriComponents;
language?: string;
scopeName: string;
embeddedLanguages: IValidEmbeddedLanguagesMap;
tokenTypes: IValidTokenTypeMap;
injectTo?: string[];
balancedBracketSelectors: string[];
unbalancedBracketSelectors: string[];
}
export interface ICreateData {
grammarDefinitions: IValidGrammarDefinitionDTO[];
}
export interface IRawModelData {
uri: UriComponents;
versionId: number;
lines: string[];
EOL: string;
languageId: string;
encodedLanguageId: LanguageId;
}
class TextMateWorkerModel extends MirrorTextModel {
private _tokenizationStateStore: TokenizationStateStore | null;
private readonly _worker: TextMateWorker;
private _languageId: string;
private _encodedLanguageId: LanguageId;
private _isDisposed: boolean;
constructor(uri: URI, lines: string[], eol: string, versionId: number, worker: TextMateWorker, languageId: string, encodedLanguageId: LanguageId) {
super(uri, lines, eol, versionId);
this._tokenizationStateStore = null;
this._worker = worker;
this._languageId = languageId;
this._encodedLanguageId = encodedLanguageId;
this._isDisposed = false;
this._resetTokenization();
}
public override dispose(): void {
this._isDisposed = true;
super.dispose();
}
public onLanguageId(languageId: string, encodedLanguageId: LanguageId): void {
this._languageId = languageId;
this._encodedLanguageId = encodedLanguageId;
this._resetTokenization();
}
override onEvents(e: IModelChangedEvent): void {
super.onEvents(e);
if (this._tokenizationStateStore) {
for (let i = 0; i < e.changes.length; i++) {
const change = e.changes[i];
const [eolCount] = countEOL(change.text);
this._tokenizationStateStore.applyEdits(change.range, eolCount);
}
}
this._ensureTokens();
}
private _resetTokenization(): void {
this._tokenizationStateStore = null;
const languageId = this._languageId;
const encodedLanguageId = this._encodedLanguageId;
this._worker.getOrCreateGrammar(languageId, encodedLanguageId).then((r) => {
if (this._isDisposed || languageId !== this._languageId || encodedLanguageId !== this._encodedLanguageId || !r) {
return;
}
if (r.grammar) {
const tokenizationSupport = new TMTokenization(r.grammar, r.initialState, false);
this._tokenizationStateStore = new TokenizationStateStore(tokenizationSupport, tokenizationSupport.getInitialState());
} else {
this._tokenizationStateStore = null;
}
this._ensureTokens();
});
}
private _ensureTokens(): void {
if (!this._tokenizationStateStore) {
return;
}
const builder = new ContiguousMultilineTokensBuilder();
const lineCount = this._lines.length;
// Validate all states up to and including endLineIndex
for (let lineIndex = this._tokenizationStateStore.invalidLineStartIndex; lineIndex < lineCount; lineIndex++) {
const text = this._lines[lineIndex];
const lineStartState = this._tokenizationStateStore.getBeginState(lineIndex);
const r = this._tokenizationStateStore.tokenizationSupport.tokenizeEncoded(text, true, lineStartState!);
LineTokens.convertToEndOffset(r.tokens, text.length);
builder.add(lineIndex + 1, r.tokens);
this._tokenizationStateStore.setEndState(lineCount, lineIndex, r.endState);
lineIndex = this._tokenizationStateStore.invalidLineStartIndex - 1; // -1 because the outer loop increments it
}
this._worker._setTokens(this._uri, this._versionId, builder.serialize());
}
}
export class TextMateWorker {
private readonly _host: TextMateWorkerHost;
private readonly _models: { [uri: string]: TextMateWorkerModel };
private readonly _grammarCache: Promise<ICreateGrammarResult>[];
private readonly _grammarFactory: Promise<TMGrammarFactory | null>;
constructor(ctx: IWorkerContext<TextMateWorkerHost>, createData: ICreateData) {
this._host = ctx.host;
this._models = Object.create(null);
this._grammarCache = [];
const grammarDefinitions = createData.grammarDefinitions.map<IValidGrammarDefinition>((def) => {
return {
location: URI.revive(def.location),
language: def.language,
scopeName: def.scopeName,
embeddedLanguages: def.embeddedLanguages,
tokenTypes: def.tokenTypes,
injectTo: def.injectTo,
balancedBracketSelectors: def.balancedBracketSelectors,
unbalancedBracketSelectors: def.unbalancedBracketSelectors,
};
});
this._grammarFactory = this._loadTMGrammarFactory(grammarDefinitions);
}
private async _loadTMGrammarFactory(grammarDefinitions: IValidGrammarDefinition[]): Promise<TMGrammarFactory> {
// TODO: asar support
const useAsar = false; // this._environmentService.isBuilt && !isWeb
const textmateLocation: AppResourcePath = useAsar ? textmateModuleLocation : textmateModuleLocationAsar;
const onigurumaLocation: AppResourcePath = useAsar ? onigurumaModuleLocation : onigurumaModuleLocationAsar;
const textmateMain: AppResourcePath = `${textmateLocation}/release/main.js`;
const onigurumaMain: AppResourcePath = `${onigurumaLocation}/release/main.js`;
const onigurumaWASM: AppResourcePath = `${onigurumaLocation}/release/onig.wasm`;
const vscodeTextmate = await import(FileAccess.asBrowserUri(textmateMain).toString(true));
const vscodeOniguruma = await import(FileAccess.asBrowserUri(onigurumaMain).toString(true));
const response = await fetch(FileAccess.asBrowserUri(onigurumaWASM).toString(true));
// Using the response directly only works if the server sets the MIME type 'application/wasm'.
// Otherwise, a TypeError is thrown when using the streaming compiler.
// We therefore use the non-streaming compiler :(.
const bytes = await response.arrayBuffer();
await vscodeOniguruma.loadWASM(bytes);
const onigLib: Promise<IOnigLib> = Promise.resolve({
createOnigScanner: (sources) => vscodeOniguruma.createOnigScanner(sources),
createOnigString: (str) => vscodeOniguruma.createOnigString(str)
});
return new TMGrammarFactory({
logTrace: (msg: string) => {/* console.log(msg) */ },
logError: (msg: string, err: any) => console.error(msg, err),
readFile: (resource: URI) => this._host.readFile(resource)
}, grammarDefinitions, vscodeTextmate, onigLib);
}
public acceptNewModel(data: IRawModelData): void {
const uri = URI.revive(data.uri);
const key = uri.toString();
this._models[key] = new TextMateWorkerModel(uri, data.lines, data.EOL, data.versionId, this, data.languageId, data.encodedLanguageId);
}
public acceptModelChanged(strURL: string, e: IModelChangedEvent): void {
this._models[strURL].onEvents(e);
}
public acceptModelLanguageChanged(strURL: string, newLanguageId: string, newEncodedLanguageId: LanguageId): void {
this._models[strURL].onLanguageId(newLanguageId, newEncodedLanguageId);
}
public acceptRemovedModel(strURL: string): void {
if (this._models[strURL]) {
this._models[strURL].dispose();
delete this._models[strURL];
}
}
public async getOrCreateGrammar(languageId: string, encodedLanguageId: LanguageId): Promise<ICreateGrammarResult | null> {
const grammarFactory = await this._grammarFactory;
if (!grammarFactory) {
return Promise.resolve(null);
}
if (!this._grammarCache[encodedLanguageId]) {
this._grammarCache[encodedLanguageId] = grammarFactory.createGrammar(languageId, encodedLanguageId);
}
return this._grammarCache[encodedLanguageId];
}
public async acceptTheme(theme: IRawTheme, colorMap: string[]): Promise<void> {
const grammarFactory = await this._grammarFactory;
grammarFactory?.setTheme(theme, colorMap);
}
public _setTokens(resource: URI, versionId: number, tokens: Uint8Array): void {
this._host.setTokens(resource, versionId, tokens);
}
}
export function create(ctx: IWorkerContext<TextMateWorkerHost>, createData: ICreateData): TextMateWorker {
return new TextMateWorker(ctx, createData);
}

View file

@ -4,27 +4,24 @@
*--------------------------------------------------------------------------------------------*/
import { Emitter, Event } from 'vs/base/common/event';
import { IState, ITokenizationSupport, TokenizationResult, EncodedTokenizationResult } from 'vs/editor/common/languages';
import { LanguageId, TokenMetadata } from 'vs/editor/common/encodedTokenAttributes';
import type { IGrammar, StateStack } from 'vscode-textmate';
import { Disposable } from 'vs/base/common/lifecycle';
import { LanguageId, TokenMetadata } from 'vs/editor/common/encodedTokenAttributes';
import { EncodedTokenizationResult, IBackgroundTokenizationStore, IBackgroundTokenizer, IState, ITokenizationSupport, TokenizationResult } from 'vs/editor/common/languages';
import { ITextModel } from 'vs/editor/common/model';
import type { IGrammar, StateStack } from 'vscode-textmate';
export class TMTokenization extends Disposable implements ITokenizationSupport {
private readonly _grammar: IGrammar;
private readonly _containsEmbeddedLanguages: boolean;
private readonly _seenLanguages: boolean[];
private readonly _initialState: StateStack;
export class TextMateTokenizationSupport extends Disposable implements ITokenizationSupport {
private readonly _seenLanguages: boolean[] = [];
private readonly _onDidEncounterLanguage: Emitter<LanguageId> = this._register(new Emitter<LanguageId>());
public readonly onDidEncounterLanguage: Event<LanguageId> = this._onDidEncounterLanguage.event;
constructor(grammar: IGrammar, initialState: StateStack, containsEmbeddedLanguages: boolean) {
constructor(
private readonly _grammar: IGrammar,
private readonly _initialState: StateStack,
private readonly _containsEmbeddedLanguages: boolean,
private readonly _createBackgroundTokenizer?: (textModel: ITextModel, tokenStore: IBackgroundTokenizationStore) => IBackgroundTokenizer | undefined
) {
super();
this._grammar = grammar;
this._initialState = initialState;
this._containsEmbeddedLanguages = containsEmbeddedLanguages;
this._seenLanguages = [];
}
public getInitialState(): IState {
@ -35,6 +32,13 @@ export class TMTokenization extends Disposable implements ITokenizationSupport {
throw new Error('Not supported!');
}
public createBackgroundTokenizer(textModel: ITextModel, store: IBackgroundTokenizationStore): IBackgroundTokenizer | undefined {
if (this._createBackgroundTokenizer) {
return this._createBackgroundTokenizer(textModel, store);
}
return undefined;
}
public tokenizeEncoded(line: string, hasEOL: boolean, state: StateStack): EncodedTokenizationResult {
const textMateResult = this._grammar.tokenizeLine2(line, state, 500);
@ -66,7 +70,6 @@ export class TMTokenization extends Disposable implements ITokenizationSupport {
endState = state;
} else {
endState = textMateResult.ruleStack;
}
return new EncodedTokenizationResult(textMateResult.tokens, endState);

View file

@ -0,0 +1,57 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { LanguageId } from 'vs/editor/common/encodedTokenAttributes';
import { EncodedTokenizationResult, IBackgroundTokenizationStore, IBackgroundTokenizer, IState, ITokenizationSupport, TokenizationResult } from 'vs/editor/common/languages';
import { nullTokenizeEncoded } from 'vs/editor/common/languages/nullTokenize';
import { ITextModel } from 'vs/editor/common/model';
import { IConfigurationService } from 'vs/platform/configuration/common/configuration';
export class TokenizationSupportWithLineLimit implements ITokenizationSupport {
private _maxTokenizationLineLength: number;
constructor(
private readonly _languageId: string,
private readonly _encodedLanguageId: LanguageId,
private readonly _actual: ITokenizationSupport,
@IConfigurationService private readonly _configurationService: IConfigurationService,
) {
this._maxTokenizationLineLength = this._configurationService.getValue<number>('editor.maxTokenizationLineLength', {
overrideIdentifier: this._languageId
});
this._configurationService.onDidChangeConfiguration(e => {
if (e.affectsConfiguration('editor.maxTokenizationLineLength')) {
this._maxTokenizationLineLength = this._configurationService.getValue<number>('editor.maxTokenizationLineLength', {
overrideIdentifier: this._languageId
});
}
});
}
getInitialState(): IState {
return this._actual.getInitialState();
}
tokenize(line: string, hasEOL: boolean, state: IState): TokenizationResult {
throw new Error('Not supported!');
}
tokenizeEncoded(line: string, hasEOL: boolean, state: IState): EncodedTokenizationResult {
// Do not attempt to tokenize if a line is too long
if (line.length >= this._maxTokenizationLineLength) {
return nullTokenizeEncoded(this._encodedLanguageId, state);
}
return this._actual.tokenizeEncoded(line, hasEOL, state);
}
createBackgroundTokenizer(textModel: ITextModel, store: IBackgroundTokenizationStore): IBackgroundTokenizer | undefined {
if (this._actual.createBackgroundTokenizer) {
return this._actual.createBackgroundTokenizer(textModel, store);
} else {
return undefined;
}
}
}

View file

@ -0,0 +1,147 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { URI, UriComponents } from 'vs/base/common/uri';
import { LanguageId } from 'vs/editor/common/encodedTokenAttributes';
import { IModelChangedEvent } from 'vs/editor/common/model/mirrorTextModel';
import { IWorkerContext } from 'vs/editor/common/services/editorSimpleWorker';
import type { StateDeltas, TextMateWorkerHost } from 'vs/workbench/services/textMate/browser/workerHost/textMateWorkerHost';
import { ICreateGrammarResult, TMGrammarFactory } from 'vs/workbench/services/textMate/common/TMGrammarFactory';
import { IValidEmbeddedLanguagesMap, IValidGrammarDefinition, IValidTokenTypeMap } from 'vs/workbench/services/textMate/common/TMScopeRegistry';
import { IOnigLib, IRawTheme } from 'vscode-textmate';
import { TextMateWorkerModel } from './textMateWorkerModel';
export interface ICreateData {
grammarDefinitions: IValidGrammarDefinitionDTO[];
textmateMainUri: string;
onigurumaMainUri: string;
onigurumaWASMUri: string;
}
export interface IValidGrammarDefinitionDTO {
location: UriComponents;
language?: string;
scopeName: string;
embeddedLanguages: IValidEmbeddedLanguagesMap;
tokenTypes: IValidTokenTypeMap;
injectTo?: string[];
balancedBracketSelectors: string[];
unbalancedBracketSelectors: string[];
}
export class TextMateTokenizationWorker {
private readonly _host: TextMateWorkerHost;
private readonly _models: { [uri: string]: TextMateWorkerModel } = Object.create(null);
private readonly _grammarCache: Promise<ICreateGrammarResult>[] = [];
private readonly _grammarFactory: Promise<TMGrammarFactory | null>;
constructor(ctx: IWorkerContext<TextMateWorkerHost>, private readonly createData: ICreateData) {
this._host = ctx.host;
const grammarDefinitions = createData.grammarDefinitions.map<IValidGrammarDefinition>((def) => {
return {
location: URI.revive(def.location),
language: def.language,
scopeName: def.scopeName,
embeddedLanguages: def.embeddedLanguages,
tokenTypes: def.tokenTypes,
injectTo: def.injectTo,
balancedBracketSelectors: def.balancedBracketSelectors,
unbalancedBracketSelectors: def.unbalancedBracketSelectors,
};
});
this._grammarFactory = this._loadTMGrammarFactory(grammarDefinitions);
}
private async _loadTMGrammarFactory(grammarDefinitions: IValidGrammarDefinition[]): Promise<TMGrammarFactory> {
const uri = this.createData.textmateMainUri;
const vscodeTextmate = await import(uri);
const vscodeOniguruma = await import(this.createData.onigurumaMainUri);
const response = await fetch(this.createData.onigurumaWASMUri);
// Using the response directly only works if the server sets the MIME type 'application/wasm'.
// Otherwise, a TypeError is thrown when using the streaming compiler.
// We therefore use the non-streaming compiler :(.
const bytes = await response.arrayBuffer();
await vscodeOniguruma.loadWASM(bytes);
const onigLib: Promise<IOnigLib> = Promise.resolve({
createOnigScanner: (sources) => vscodeOniguruma.createOnigScanner(sources),
createOnigString: (str) => vscodeOniguruma.createOnigString(str)
});
return new TMGrammarFactory({
logTrace: (msg: string) => {/* console.log(msg) */ },
logError: (msg: string, err: any) => console.error(msg, err),
readFile: (resource: URI) => this._host.readFile(resource)
}, grammarDefinitions, vscodeTextmate, onigLib);
}
// #region called by renderer
public acceptNewModel(data: IRawModelData): void {
const uri = URI.revive(data.uri);
const key = uri.toString();
this._models[key] = new TextMateWorkerModel(uri, data.lines, data.EOL, data.versionId, this, data.languageId, data.encodedLanguageId);
}
public acceptModelChanged(strURL: string, e: IModelChangedEvent): void {
this._models[strURL].onEvents(e);
}
public retokenize(strURL: string, startLineNumber: number, endLineNumberExclusive: number): void {
this._models[strURL].retokenize(startLineNumber, endLineNumberExclusive);
}
public acceptModelLanguageChanged(strURL: string, newLanguageId: string, newEncodedLanguageId: LanguageId): void {
this._models[strURL].onLanguageId(newLanguageId, newEncodedLanguageId);
}
public acceptRemovedModel(strURL: string): void {
if (this._models[strURL]) {
this._models[strURL].dispose();
delete this._models[strURL];
}
}
public async acceptTheme(theme: IRawTheme, colorMap: string[]): Promise<void> {
const grammarFactory = await this._grammarFactory;
grammarFactory?.setTheme(theme, colorMap);
}
// #endregion
// #region called by worker model
public async getOrCreateGrammar(languageId: string, encodedLanguageId: LanguageId): Promise<ICreateGrammarResult | null> {
const grammarFactory = await this._grammarFactory;
if (!grammarFactory) {
return Promise.resolve(null);
}
if (!this._grammarCache[encodedLanguageId]) {
this._grammarCache[encodedLanguageId] = grammarFactory.createGrammar(languageId, encodedLanguageId);
}
return this._grammarCache[encodedLanguageId];
}
public setTokensAndStates(resource: URI, versionId: number, tokens: Uint8Array, stateDeltas: StateDeltas[]): void {
this._host.setTokensAndStates(resource, versionId, tokens, stateDeltas);
}
// #endregion
}
export interface IRawModelData {
uri: UriComponents;
versionId: number;
lines: string[];
EOL: string;
languageId: string;
encodedLanguageId: LanguageId;
}
export function create(ctx: IWorkerContext<TextMateWorkerHost>, createData: ICreateData): TextMateTokenizationWorker {
return new TextMateTokenizationWorker(ctx, createData);
}

View file

@ -0,0 +1,161 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { URI } from 'vs/base/common/uri';
import { LanguageId } from 'vs/editor/common/encodedTokenAttributes';
import { IModelChangedEvent, MirrorTextModel } from 'vs/editor/common/model/mirrorTextModel';
import { TokenizationStateStore } from 'vs/editor/common/model/textModelTokens';
import { diffStateStacksRefEq, StateStack, StackDiff } from 'vscode-textmate';
import { ContiguousMultilineTokensBuilder } from 'vs/editor/common/tokens/contiguousMultilineTokensBuilder';
import { countEOL } from 'vs/editor/common/core/eolCounter';
import { LineTokens } from 'vs/editor/common/tokens/lineTokens';
import { TextMateTokenizationSupport } from 'vs/workbench/services/textMate/browser/tokenizationSupport/textMateTokenizationSupport';
import { StateDeltas } from 'vs/workbench/services/textMate/browser/workerHost/textMateWorkerHost';
import { RunOnceScheduler } from 'vs/base/common/async';
import { TextMateTokenizationWorker } from './textMate.worker';
export class TextMateWorkerModel extends MirrorTextModel {
private _tokenizationStateStore: TokenizationStateStore | null;
private readonly _worker: TextMateTokenizationWorker;
private _languageId: string;
private _encodedLanguageId: LanguageId;
private _isDisposed: boolean;
constructor(uri: URI, lines: string[], eol: string, versionId: number, worker: TextMateTokenizationWorker, languageId: string, encodedLanguageId: LanguageId) {
super(uri, lines, eol, versionId);
this._tokenizationStateStore = null;
this._worker = worker;
this._languageId = languageId;
this._encodedLanguageId = encodedLanguageId;
this._isDisposed = false;
this._resetTokenization();
}
public override dispose(): void {
this._isDisposed = true;
super.dispose();
}
public onLanguageId(languageId: string, encodedLanguageId: LanguageId): void {
this._languageId = languageId;
this._encodedLanguageId = encodedLanguageId;
this._resetTokenization();
}
private readonly tokenizeDebouncer = new RunOnceScheduler(() => this._tokenize(), 10);
override onEvents(e: IModelChangedEvent): void {
super.onEvents(e);
if (this._tokenizationStateStore) {
for (let i = e.changes.length - 1; i >= 0; i--) {
const change = e.changes[i];
const [eolCount] = countEOL(change.text);
this._tokenizationStateStore.applyEdits(change.range, eolCount);
}
}
this.tokenizeDebouncer.schedule();
}
public retokenize(startLineNumber: number, endLineNumberExclusive: number) {
if (this._tokenizationStateStore) {
for (let lineNumber = startLineNumber; lineNumber < endLineNumberExclusive; lineNumber++) {
this._tokenizationStateStore.markMustBeTokenized(lineNumber - 1);
}
this.tokenizeDebouncer.schedule();
}
}
private _resetTokenization(): void {
this._tokenizationStateStore = null;
const languageId = this._languageId;
const encodedLanguageId = this._encodedLanguageId;
this._worker.getOrCreateGrammar(languageId, encodedLanguageId).then((r) => {
if (this._isDisposed || languageId !== this._languageId || encodedLanguageId !== this._encodedLanguageId || !r) {
return;
}
if (r.grammar) {
const tokenizationSupport = new TextMateTokenizationSupport(r.grammar, r.initialState, false);
this._tokenizationStateStore = new TokenizationStateStore(tokenizationSupport, tokenizationSupport.getInitialState());
} else {
this._tokenizationStateStore = null;
}
this._tokenize();
});
}
private _tokenize(): void {
if (!this._tokenizationStateStore) {
return;
}
const startTime = new Date().getTime();
while (true) {
const builder = new ContiguousMultilineTokensBuilder();
const lineCount = this._lines.length;
let tokenizedLines = 0;
const stateDeltaBuilder = new StateDeltaBuilder();
// Validate all states up to and including endLineIndex
while (this._tokenizationStateStore.invalidLineStartIndex < lineCount) {
const lineIndex = this._tokenizationStateStore.invalidLineStartIndex;
tokenizedLines++;
// TODO don't spam the renderer
if (tokenizedLines > 200) {
break;
}
const text = this._lines[lineIndex];
const lineStartState = this._tokenizationStateStore.getBeginState(lineIndex) as StateStack;
const tokenizeResult = this._tokenizationStateStore.tokenizationSupport.tokenizeEncoded(text, true, lineStartState);
if (this._tokenizationStateStore.setEndState(lineCount, lineIndex, tokenizeResult.endState)) {
const delta = diffStateStacksRefEq(lineStartState, tokenizeResult.endState as StateStack);
stateDeltaBuilder.setState(lineIndex + 1, delta);
}
LineTokens.convertToEndOffset(tokenizeResult.tokens, text.length);
builder.add(lineIndex + 1, tokenizeResult.tokens);
}
if (tokenizedLines === 0) {
break;
}
const stateDeltas = stateDeltaBuilder.getStateDeltas();
this._worker.setTokensAndStates(this._uri, this._versionId, builder.serialize(), stateDeltas);
const deltaMs = new Date().getTime() - startTime;
if (deltaMs > 20) {
// yield to check for changes
setTimeout(() => this._tokenize(), 3);
break;
}
}
}
}
class StateDeltaBuilder {
private _lastStartLineNumber: number = -1;
private _stateDeltas: StateDeltas[] = [];
public setState(lineNumber: number, stackDiff: StackDiff): void {
if (lineNumber === this._lastStartLineNumber + 1) {
this._stateDeltas[this._stateDeltas.length - 1].stateDeltas.push(stackDiff);
} else {
this._stateDeltas.push({ startLineNumber: lineNumber, stateDeltas: [stackDiff] });
}
this._lastStartLineNumber = lineNumber;
}
public getStateDeltas(): StateDeltas[] {
return this._stateDeltas;
}
}

View file

@ -0,0 +1,208 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { BugIndicatingError } from 'vs/base/common/errors';
import { DisposableStore, IDisposable, toDisposable } from 'vs/base/common/lifecycle';
import { AppResourcePath, FileAccess, nodeModulesAsarPath, nodeModulesPath } from 'vs/base/common/network';
import { URI, UriComponents } from 'vs/base/common/uri';
import { createWebWorker, MonacoWebWorker } from 'vs/editor/browser/services/webWorker';
import { IBackgroundTokenizationStore, IBackgroundTokenizer } from 'vs/editor/common/languages';
import { ILanguageService } from 'vs/editor/common/languages/language';
import { ILanguageConfigurationService } from 'vs/editor/common/languages/languageConfigurationRegistry';
import { ITextModel } from 'vs/editor/common/model';
import { IModelService } from 'vs/editor/common/services/model';
import { IConfigurationService } from 'vs/platform/configuration/common/configuration';
import { IExtensionResourceLoaderService } from 'vs/platform/extensionResourceLoader/common/extensionResourceLoader';
import { ICreateData, TextMateTokenizationWorker } from 'vs/workbench/services/textMate/browser/worker/textMate.worker';
import { TextMateWorkerTokenizerController } from 'vs/workbench/services/textMate/browser/workerHost/textMateWorkerTokenizerController';
import { IValidGrammarDefinition } from 'vs/workbench/services/textMate/common/TMScopeRegistry';
import { INITIAL, IRawTheme, StackDiff } from 'vscode-textmate';
export class TextMateWorkerHost implements IDisposable {
private _workerProxyPromise: Promise<TextMateTokenizationWorker | null> | null = null;
private _worker: MonacoWebWorker<TextMateTokenizationWorker> | null = null;
private _workerProxy: TextMateTokenizationWorker | null = null;
private readonly _workerTokenizerControllers = new Map</* uri.toString() */ string, TextMateWorkerTokenizerController>();
private _currentTheme: IRawTheme | null = null;
private _currentTokenColorMap: string[] | null = null;
private _grammarDefinitions: IValidGrammarDefinition[] = [];
constructor(
@IExtensionResourceLoaderService private readonly _extensionResourceLoaderService: IExtensionResourceLoaderService,
@IModelService private readonly _modelService: IModelService,
@ILanguageConfigurationService private readonly _languageConfigurationService: ILanguageConfigurationService,
@IConfigurationService private readonly _configurationService: IConfigurationService,
@ILanguageService private readonly _languageService: ILanguageService,
) {
}
public setGrammarDefinitions(grammarDefinitions: IValidGrammarDefinition[]): void {
this._grammarDefinitions = grammarDefinitions;
this._killWorker();
}
dispose(): void {
this._killWorker();
}
public acceptTheme(theme: IRawTheme, colorMap: string[]): void {
this._currentTheme = theme;
this._currentTokenColorMap = colorMap;
if (this._currentTheme && this._currentTokenColorMap && this._workerProxy) {
this._workerProxy.acceptTheme(this._currentTheme, this._currentTokenColorMap);
}
}
private getWorkerProxy(): Promise<TextMateTokenizationWorker | null> {
if (!this._workerProxyPromise) {
this._workerProxyPromise = this.createWorkerProxy();
}
return this._workerProxyPromise;
}
private async createWorkerProxy(): Promise<TextMateTokenizationWorker | null> {
const textmateModuleLocation: AppResourcePath = `${nodeModulesPath}/vscode-textmate`;
const textmateModuleLocationAsar: AppResourcePath = `${nodeModulesAsarPath}/vscode-textmate`;
const onigurumaModuleLocation: AppResourcePath = `${nodeModulesPath}/vscode-oniguruma`;
const onigurumaModuleLocationAsar: AppResourcePath = `${nodeModulesAsarPath}/vscode-oniguruma`;
const textmateLocation: AppResourcePath = true ? textmateModuleLocation : textmateModuleLocationAsar;
const onigurumaLocation: AppResourcePath = true ? onigurumaModuleLocation : onigurumaModuleLocationAsar;
const textmateMain: AppResourcePath = `${textmateLocation}/release/main.js`;
const onigurumaMain: AppResourcePath = `${onigurumaLocation}/release/main.js`;
const onigurumaWASM: AppResourcePath = `${onigurumaLocation}/release/onig.wasm`;
const uri = FileAccess.asBrowserUri(textmateMain).toString(true);
const createData: ICreateData = {
grammarDefinitions: this._grammarDefinitions,
textmateMainUri: uri,
onigurumaMainUri: FileAccess.asBrowserUri(onigurumaMain).toString(true),
onigurumaWASMUri: FileAccess.asBrowserUri(onigurumaWASM).toString(true),
};
const worker = createWebWorker<TextMateTokenizationWorker>(this._modelService, this._languageConfigurationService, {
createData,
label: 'textMateWorker',
moduleId: 'vs/workbench/services/textMate/browser/worker/textMate.worker',
host: this,
});
this._worker = worker;
const proxy = await worker.getProxy();
if (this._worker !== worker) {
// disposed in the meantime
return null;
}
this._workerProxy = proxy;
if (this._currentTheme && this._currentTokenColorMap) {
this._workerProxy.acceptTheme(this._currentTheme, this._currentTokenColorMap);
}
return proxy;
}
private _killWorker(): void {
for (const controller of this._workerTokenizerControllers.values()) {
controller.dispose();
}
this._workerTokenizerControllers.clear();
if (this._worker) {
this._worker.dispose();
this._worker = null;
}
this._workerProxy = null;
this._workerProxyPromise = null;
}
// Will be recreated when worker is killed (because tokenizer is re-registered when languages change)
public createBackgroundTokenizer(textModel: ITextModel, tokenStore: IBackgroundTokenizationStore): IBackgroundTokenizer | undefined {
if (this._workerTokenizerControllers.has(textModel.uri.toString())) {
throw new BugIndicatingError();
}
const shouldTokenizeAsync = this._configurationService.getValue<boolean>('editor.experimental.asyncTokenization');
if (shouldTokenizeAsync !== true) {
return undefined;
}
if (textModel.isTooLargeForSyncing()) {
// fallback to default sync background tokenizer
return undefined;
}
const store = new DisposableStore();
this.getWorkerProxy().then((workerProxy) => {
if (store.isDisposed || !workerProxy) {
return;
}
store.add(keepAliveWhenAttached(textModel, () => {
const controller = new TextMateWorkerTokenizerController(textModel, workerProxy, this._languageService.languageIdCodec, tokenStore, INITIAL);
this._workerTokenizerControllers.set(textModel.uri.toString(), controller);
return toDisposable(() => {
this._workerTokenizerControllers.delete(textModel.uri.toString());
controller.dispose();
});
}));
});
return {
dispose() {
store.dispose();
},
requestTokens: (startLineNumber, endLineNumberExclusive) => {
this.getWorkerProxy().then((workerProxy) => {
workerProxy?.retokenize(textModel.uri.toString(), startLineNumber, endLineNumberExclusive);
});
},
};
}
// #region called by the worker
async readFile(_resource: UriComponents): Promise<string> {
const resource = URI.revive(_resource);
return this._extensionResourceLoaderService.readExtensionResource(resource);
}
async setTokensAndStates(_resource: UriComponents, versionId: number, tokens: Uint8Array, lineEndStateDeltas: StateDeltas[]): Promise<void> {
const resource = URI.revive(_resource);
const controller = this._workerTokenizerControllers.get(resource.toString());
if (controller) {
// When a model detaches, it is removed synchronously from the map.
// However, the worker might still be sending tokens for that model.
controller.setTokensAndStates(versionId, tokens, lineEndStateDeltas);
}
}
// #endregion
}
export interface StateDeltas {
startLineNumber: number;
stateDeltas: StackDiff[];
}
function keepAliveWhenAttached(textModel: ITextModel, factory: () => IDisposable): IDisposable {
const disposableStore = new DisposableStore();
const subStore = disposableStore.add(new DisposableStore());
function checkAttached() {
if (textModel.isAttachedToEditor()) {
subStore.add(factory());
} else {
subStore.clear();
}
}
checkAttached();
disposableStore.add(textModel.onDidChangeAttached(() => {
checkAttached();
}));
return disposableStore;
}

View file

@ -0,0 +1,155 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { Disposable } from 'vs/base/common/lifecycle';
import { countEOL } from 'vs/editor/common/core/eolCounter';
import { IBackgroundTokenizationStore, ILanguageIdCodec } from 'vs/editor/common/languages';
import { ITextModel } from 'vs/editor/common/model';
import { ContiguousGrowingArray } from 'vs/editor/common/model/textModelTokens';
import { IModelContentChange, IModelContentChangedEvent } from 'vs/editor/common/textModelEvents';
import { ContiguousMultilineTokensBuilder } from 'vs/editor/common/tokens/contiguousMultilineTokensBuilder';
import { ArrayEdit, MonotonousIndexTransformer, SingleArrayEdit } from 'vs/workbench/services/textMate/browser/arrayOperation';
import { TextMateTokenizationWorker } from 'vs/workbench/services/textMate/browser/worker/textMate.worker';
import type { StateDeltas } from 'vs/workbench/services/textMate/browser/workerHost/textMateWorkerHost';
import { applyStateStackDiff, StateStack } from 'vscode-textmate';
export class TextMateWorkerTokenizerController extends Disposable {
private _pendingChanges: IModelContentChangedEvent[] = [];
/**
* These states will eventually equal the worker states.
* _states[i] stores the state at the end of line number i+1.
*/
private readonly _states = new ContiguousGrowingArray<StateStack | null>(null);
constructor(
private readonly _model: ITextModel,
private readonly _worker: TextMateTokenizationWorker,
private readonly _languageIdCodec: ILanguageIdCodec,
private readonly _backgroundTokenizationStore: IBackgroundTokenizationStore,
private readonly _initialState: StateStack,
) {
super();
this._register(this._model.onDidChangeContent((e) => {
this._worker.acceptModelChanged(this._model.uri.toString(), e);
this._pendingChanges.push(e);
}));
this._register(this._model.onDidChangeLanguage((e) => {
const languageId = this._model.getLanguageId();
const encodedLanguageId =
this._languageIdCodec.encodeLanguageId(languageId);
this._worker.acceptModelLanguageChanged(
this._model.uri.toString(),
languageId,
encodedLanguageId
);
}));
const languageId = this._model.getLanguageId();
const encodedLanguageId = this._languageIdCodec.encodeLanguageId(languageId);
this._worker.acceptNewModel({
uri: this._model.uri,
versionId: this._model.getVersionId(),
lines: this._model.getLinesContent(),
EOL: this._model.getEOL(),
languageId,
encodedLanguageId,
});
}
public override dispose(): void {
super.dispose();
this._worker.acceptRemovedModel(this._model.uri.toString());
}
/**
* This method is called from the worker through the worker host.
*/
public setTokensAndStates(versionId: number, rawTokens: ArrayBuffer, stateDeltas: StateDeltas[]): void {
// _states state, change{k}, ..., change{versionId}, state delta base, change{j}, ..., change{m}, current renderer state
// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^
// | past changes | future states
// Apply past changes to _states
while (
this._pendingChanges.length > 0 &&
this._pendingChanges[0].versionId <= versionId
) {
const change = this._pendingChanges.shift()!;
const op = lineArrayEditFromModelContentChange(change.changes);
op.applyTo(this._states);
}
const curToFutureTransformer1 = MonotonousIndexTransformer.fromMany(
this._pendingChanges.map((c) => lineArrayEditFromModelContentChange(c.changes))
);
const tokens = ContiguousMultilineTokensBuilder.deserialize(
new Uint8Array(rawTokens)
);
// Apply future changes to tokens
for (const change of this._pendingChanges) {
for (const innerChanges of change.changes) {
for (let j = 0; j < tokens.length; j++) {
tokens[j].applyEdit(innerChanges.range, innerChanges.text);
}
}
}
// Filter tokens in lines that got changed in the future to prevent flickering
// These tokens are recomputed anyway.
const b = new ContiguousMultilineTokensBuilder();
for (const t of tokens) {
for (let i = t.startLineNumber; i <= t.endLineNumber; i++) {
const result = curToFutureTransformer1.transform(i - 1);
if (result !== undefined) {
b.add(i, t.getLineTokens(i) as Uint32Array);
}
}
}
this._backgroundTokenizationStore.setTokens(b.finalize());
const curToFutureTransformer = MonotonousIndexTransformer.fromMany(
this._pendingChanges.map((c) => lineArrayEditFromModelContentChange(c.changes))
);
// Apply state deltas to _states and _backgroundTokenizationStore
for (const d of stateDeltas) {
let prevState = d.startLineNumber <= 1 ? this._initialState : this._states.get(d.startLineNumber - 1 - 1);
for (let i = 0; i < d.stateDeltas.length; i++) {
const delta = d.stateDeltas[i];
const state = applyStateStackDiff(prevState, delta)!;
this._states.set(d.startLineNumber + i - 1, state);
const offset = curToFutureTransformer.transform(d.startLineNumber + i - 1);
if (offset !== undefined) {
this._backgroundTokenizationStore.setEndState(offset + 1, state);
}
if (d.startLineNumber + i >= this._model.getLineCount() - 1) {
this._backgroundTokenizationStore.backgroundTokenizationFinished();
}
prevState = state;
}
}
}
}
function lineArrayEditFromModelContentChange(c: IModelContentChange[]): ArrayEdit {
return new ArrayEdit(
c.map(
(c) =>
new SingleArrayEdit(
c.range.startLineNumber - 1,
c.range.endLineNumber - c.range.startLineNumber,
countEOL(c.text)[0]
)
)
);
}

View file

@ -37,7 +37,7 @@ export class TMGrammarFactory extends Disposable {
super();
this._host = host;
this._initialState = vscodeTextmate.INITIAL;
this._scopeRegistry = this._register(new TMScopeRegistry());
this._scopeRegistry = new TMScopeRegistry();
this._injections = {};
this._injectedEmbeddedLanguages = {};
this._languageToScope = new Map<string, string>();

View file

@ -5,7 +5,6 @@
import * as resources from 'vs/base/common/resources';
import { URI } from 'vs/base/common/uri';
import { Disposable } from 'vs/base/common/lifecycle';
import { LanguageId, StandardTokenType } from 'vs/editor/common/encodedTokenAttributes';
export interface IValidGrammarDefinition {
@ -27,12 +26,11 @@ export interface IValidEmbeddedLanguagesMap {
[scopeName: string]: LanguageId;
}
export class TMScopeRegistry extends Disposable {
export class TMScopeRegistry {
private _scopeNameToLanguageRegistration: { [scopeName: string]: IValidGrammarDefinition };
constructor() {
super();
this._scopeNameToLanguageRegistration = Object.create(null);
}

View file

@ -100,6 +100,7 @@ import 'vs/workbench/services/assignment/common/assignmentService';
import 'vs/workbench/services/outline/browser/outlineService';
import 'vs/workbench/services/languageDetection/browser/languageDetectionWorkerServiceImpl';
import 'vs/editor/common/services/languageFeaturesService';
import 'vs/workbench/services/textMate/browser/textMateTokenizationFeature.contribution';
import { InstantiationType, registerSingleton } from 'vs/platform/instantiation/common/extensions';
import { ExtensionGalleryService } from 'vs/platform/extensionManagement/common/extensionGalleryService';

View file

@ -37,7 +37,6 @@ import 'vs/workbench/electron-sandbox/parts/dialogs/dialog.contribution';
import 'vs/workbench/services/textfile/electron-sandbox/nativeTextFileService';
import 'vs/workbench/services/dialogs/electron-sandbox/fileDialogService';
import 'vs/workbench/services/workspaces/electron-sandbox/workspacesService';
import 'vs/workbench/services/textMate/browser/nativeTextMateService';
import 'vs/workbench/services/menubar/electron-sandbox/menubarService';
import 'vs/workbench/services/issue/electron-sandbox/issueService';
import 'vs/workbench/services/update/electron-sandbox/updateService';

View file

@ -35,7 +35,6 @@ import 'vs/workbench/browser/web.main';
//#region --- workbench services
import 'vs/workbench/services/integrity/browser/integrityService';
import 'vs/workbench/services/textMate/browser/browserTextMateService';
import 'vs/workbench/services/search/browser/searchService';
import 'vs/workbench/services/textfile/browser/browserTextFileService';
import 'vs/workbench/services/keybinding/browser/keyboardLayoutService';