Merge pull request #146552 from microsoft/hediet/tokenization

Moves tokenization logic from text model to its own text model part.
This commit is contained in:
Alexandru Dima 2022-04-06 10:12:42 +02:00 committed by GitHub
commit 65ac7bde2a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
50 changed files with 922 additions and 653 deletions

View file

@ -2333,7 +2333,7 @@ class InlineViewZonesComputer extends ViewZonesComputer {
let viewLineCounts: number[] | null = null;
for (let lineNumber = lineChange.originalStartLineNumber; lineNumber <= lineChange.originalEndLineNumber; lineNumber++) {
const lineIndex = lineNumber - lineChange.originalStartLineNumber;
const lineTokens = this._originalModel.getLineTokens(lineNumber);
const lineTokens = this._originalModel.tokenization.getLineTokens(lineNumber);
const lineContent = lineTokens.getLineContent();
const lineBreakData = lineBreaks[lineBreakIndex++];
const actualDecorations = LineDecoration.filter(decorations, lineNumber, 1, lineContent.length + 1);

View file

@ -146,7 +146,7 @@ export class ShiftCommand implements ICommand {
if (contentStartVisibleColumn % indentSize !== 0) {
// The current line is "miss-aligned", so let's see if this is expected...
// This can only happen when it has trailing commas in the indent
if (model.isCheapToTokenize(lineNumber - 1)) {
if (model.tokenization.isCheapToTokenize(lineNumber - 1)) {
const enterAction = getEnterAction(this._opts.autoIndent, model, new Range(lineNumber - 1, model.getLineMaxColumn(lineNumber - 1), lineNumber - 1, model.getLineMaxColumn(lineNumber - 1)), this._languageConfigurationService);
if (enterAction) {
extraSpaces = previousLineExtraSpaces;

View file

@ -227,7 +227,7 @@ export class TypeOperations {
const lineText = model.getLineContent(selection.startLineNumber);
if (/^\s*$/.test(lineText) && model.isCheapToTokenize(selection.startLineNumber)) {
if (/^\s*$/.test(lineText) && model.tokenization.isCheapToTokenize(selection.startLineNumber)) {
let goodIndent = this._goodIndentForLine(config, model, selection.startLineNumber);
goodIndent = goodIndent || '\t';
const possibleTypeText = config.normalizeIndentation(goodIndent);
@ -300,7 +300,7 @@ export class TypeOperations {
if (config.autoIndent === EditorAutoIndentStrategy.None) {
return TypeOperations._typeCommand(range, '\n', keepPosition);
}
if (!model.isCheapToTokenize(range.getStartPosition().lineNumber) || config.autoIndent === EditorAutoIndentStrategy.Keep) {
if (!model.tokenization.isCheapToTokenize(range.getStartPosition().lineNumber) || config.autoIndent === EditorAutoIndentStrategy.Keep) {
const lineText = model.getLineContent(range.startLineNumber);
const indentation = strings.getLeadingWhitespace(lineText).substring(0, range.startColumn - 1);
return TypeOperations._typeCommand(range, '\n' + config.normalizeIndentation(indentation), keepPosition);
@ -385,7 +385,7 @@ export class TypeOperations {
}
for (let i = 0, len = selections.length; i < len; i++) {
if (!model.isCheapToTokenize(selections[i].getEndPosition().lineNumber)) {
if (!model.tokenization.isCheapToTokenize(selections[i].getEndPosition().lineNumber)) {
return false;
}
}
@ -642,13 +642,13 @@ export class TypeOperations {
}
}
if (!model.isCheapToTokenize(lineNumber)) {
if (!model.tokenization.isCheapToTokenize(lineNumber)) {
// Do not force tokenization
return null;
}
model.forceTokenization(lineNumber);
const lineTokens = model.getLineTokens(lineNumber);
model.tokenization.forceTokenization(lineNumber);
const lineTokens = model.tokenization.getLineTokens(lineNumber);
const scopedLineTokens = createScopedLineTokens(lineTokens, beforeColumn - 1);
if (!pair.shouldAutoClose(scopedLineTokens, beforeColumn - scopedLineTokens.firstCharOffset)) {
return null;
@ -664,7 +664,7 @@ export class TypeOperations {
//
const neutralCharacter = pair.findNeutralCharacter();
if (neutralCharacter) {
const tokenType = model.getTokenTypeIfInsertingCharacter(lineNumber, beforeColumn, neutralCharacter);
const tokenType = model.tokenization.getTokenTypeIfInsertingCharacter(lineNumber, beforeColumn, neutralCharacter);
if (!pair.isOK(tokenType)) {
return null;
}
@ -757,7 +757,7 @@ export class TypeOperations {
}
private static _isTypeInterceptorElectricChar(config: CursorConfiguration, model: ITextModel, selections: Selection[]) {
if (selections.length === 1 && model.isCheapToTokenize(selections[0].getEndPosition().lineNumber)) {
if (selections.length === 1 && model.tokenization.isCheapToTokenize(selections[0].getEndPosition().lineNumber)) {
return true;
}
return false;
@ -769,8 +769,8 @@ export class TypeOperations {
}
const position = selection.getPosition();
model.forceTokenization(position.lineNumber);
const lineTokens = model.getLineTokens(position.lineNumber);
model.tokenization.forceTokenization(position.lineNumber);
const lineTokens = model.tokenization.getLineTokens(position.lineNumber);
let electricAction: IElectricAction | null;
try {

View file

@ -14,9 +14,11 @@ import { getScopedLineTokens, ILanguageConfigurationService } from 'vs/editor/co
import { LineTokens } from 'vs/editor/common/tokens/lineTokens';
export interface IVirtualModel {
getLineTokens(lineNumber: number): LineTokens;
getLanguageId(): string;
getLanguageIdAtPosition(lineNumber: number, column: number): string;
tokenization: {
getLineTokens(lineNumber: number): LineTokens;
getLanguageId(): string;
getLanguageIdAtPosition(lineNumber: number, column: number): string;
};
getLineContent(lineNumber: number): string;
}
@ -34,13 +36,13 @@ export interface IIndentConverter {
* else: nearest preceding line of the same language
*/
function getPrecedingValidLine(model: IVirtualModel, lineNumber: number, indentRulesSupport: IndentRulesSupport) {
const languageId = model.getLanguageIdAtPosition(lineNumber, 0);
const languageId = model.tokenization.getLanguageIdAtPosition(lineNumber, 0);
if (lineNumber > 1) {
let lastLineNumber: number;
let resultLineNumber = -1;
for (lastLineNumber = lineNumber - 1; lastLineNumber >= 1; lastLineNumber--) {
if (model.getLanguageIdAtPosition(lastLineNumber, 0) !== languageId) {
if (model.tokenization.getLanguageIdAtPosition(lastLineNumber, 0) !== languageId) {
return resultLineNumber;
}
const text = model.getLineContent(lastLineNumber);
@ -79,7 +81,7 @@ export function getInheritIndentForLine(
return null;
}
const indentRulesSupport = languageConfigurationService.getLanguageConfiguration(model.getLanguageId()).indentRulesSupport;
const indentRulesSupport = languageConfigurationService.getLanguageConfiguration(model.tokenization.getLanguageId()).indentRulesSupport;
if (!indentRulesSupport) {
return null;
}
@ -283,8 +285,8 @@ export function getIndentForEnter(
if (autoIndent < EditorAutoIndentStrategy.Full) {
return null;
}
model.forceTokenization(range.startLineNumber);
const lineTokens = model.getLineTokens(range.startLineNumber);
model.tokenization.forceTokenization(range.startLineNumber);
const lineTokens = model.tokenization.getLineTokens(range.startLineNumber);
const scopedLineTokens = createScopedLineTokens(lineTokens, range.startColumn - 1);
const scopedLineText = scopedLineTokens.getLineContent();
@ -315,14 +317,16 @@ export function getIndentForEnter(
const beforeEnterIndent = strings.getLeadingWhitespace(beforeEnterText);
const virtualModel: IVirtualModel = {
getLineTokens: (lineNumber: number) => {
return model.getLineTokens(lineNumber);
},
getLanguageId: () => {
return model.getLanguageId();
},
getLanguageIdAtPosition: (lineNumber: number, column: number) => {
return model.getLanguageIdAtPosition(lineNumber, column);
tokenization: {
getLineTokens: (lineNumber: number) => {
return model.tokenization.getLineTokens(lineNumber);
},
getLanguageId: () => {
return model.getLanguageId();
},
getLanguageIdAtPosition: (lineNumber: number, column: number) => {
return model.getLanguageIdAtPosition(lineNumber, column);
},
},
getLineContent: (lineNumber: number) => {
if (lineNumber === range.startLineNumber) {

View file

@ -179,8 +179,8 @@ export function getIndentationAtPosition(model: ITextModel, lineNumber: number,
}
export function getScopedLineTokens(model: ITextModel, lineNumber: number, columnNumber?: number): ScopedLineTokens {
model.forceTokenization(lineNumber);
const lineTokens = model.getLineTokens(lineNumber);
model.tokenization.forceTokenization(lineNumber);
const lineTokens = model.tokenization.getLineTokens(lineNumber);
const column = (typeof columnNumber === 'undefined' ? model.getLineMaxColumn(lineNumber) - 1 : columnNumber - 1);
return createScopedLineTokens(lineTokens, column);
}

View file

@ -6,23 +6,21 @@
import { Event } from 'vs/base/common/event';
import { IMarkdownString } from 'vs/base/common/htmlContent';
import { IDisposable } from 'vs/base/common/lifecycle';
import { equals } from 'vs/base/common/objects';
import { URI } from 'vs/base/common/uri';
import { LineTokens } from 'vs/editor/common/tokens/lineTokens';
import { ISingleEditOperation } from 'vs/editor/common/core/editOperation';
import { IPosition, Position } from 'vs/editor/common/core/position';
import { IRange, Range } from 'vs/editor/common/core/range';
import { Selection } from 'vs/editor/common/core/selection';
import { IModelContentChange, IModelContentChangedEvent, IModelDecorationsChangedEvent, IModelLanguageChangedEvent, IModelLanguageConfigurationChangedEvent, IModelOptionsChangedEvent, IModelTokensChangedEvent, InternalModelContentChangeEvent, ModelInjectedTextChangedEvent } from 'vs/editor/common/textModelEvents';
import { WordCharacterClassifier } from 'vs/editor/common/core/wordCharacterClassifier';
import { FormattingOptions, StandardTokenType } from 'vs/editor/common/languages';
import { ThemeColor } from 'vs/platform/theme/common/themeService';
import { ContiguousMultilineTokens } from 'vs/editor/common/tokens/contiguousMultilineTokens';
import { SparseMultilineTokens } from 'vs/editor/common/tokens/sparseMultilineTokens';
import { TextChange } from 'vs/editor/common/core/textChange';
import { equals } from 'vs/base/common/objects';
import { IBracketPairsTextModelPart } from 'vs/editor/common/textModelBracketPairs';
import { IGuidesTextModelPart } from 'vs/editor/common/textModelGuides';
import { WordCharacterClassifier } from 'vs/editor/common/core/wordCharacterClassifier';
import { IWordAtPosition } from 'vs/editor/common/core/wordHelper';
import { ISingleEditOperation } from 'vs/editor/common/core/editOperation';
import { FormattingOptions } from 'vs/editor/common/languages';
import { IBracketPairsTextModelPart } from 'vs/editor/common/textModelBracketPairs';
import { IModelContentChange, IModelContentChangedEvent, IModelDecorationsChangedEvent, IModelLanguageChangedEvent, IModelLanguageConfigurationChangedEvent, IModelOptionsChangedEvent, IModelTokensChangedEvent, InternalModelContentChangeEvent, ModelInjectedTextChangedEvent } from 'vs/editor/common/textModelEvents';
import { IGuidesTextModelPart } from 'vs/editor/common/textModelGuides';
import { ITokenizationTextModelPart } from 'vs/editor/common/tokenizationTextModelPart';
import { ThemeColor } from 'vs/platform/theme/common/themeService';
/**
* Vertical Lane in the overview ruler of the editor.
@ -779,11 +777,6 @@ export interface ITextModel {
*/
isDisposed(): boolean;
/**
* @internal
*/
tokenizeViewport(startLineNumber: number, endLineNumber: number): void;
/**
* This model is so large that it would not be a good idea to sync it over
* to web workers or other places.
@ -844,63 +837,6 @@ export interface ITextModel {
*/
findPreviousMatch(searchString: string, searchStart: IPosition, isRegex: boolean, matchCase: boolean, wordSeparators: string | null, captureMatches: boolean): FindMatch | null;
/**
* @internal
*/
setTokens(tokens: ContiguousMultilineTokens[]): void;
/**
* @internal
*/
setSemanticTokens(tokens: SparseMultilineTokens[] | null, isComplete: boolean): void;
/**
* @internal
*/
setPartialSemanticTokens(range: Range, tokens: SparseMultilineTokens[] | null): void;
/**
* @internal
*/
hasCompleteSemanticTokens(): boolean;
/**
* @internal
*/
hasSomeSemanticTokens(): boolean;
/**
* Flush all tokenization state.
* @internal
*/
resetTokenization(): void;
/**
* Force tokenization information for `lineNumber` to be accurate.
* @internal
*/
forceTokenization(lineNumber: number): void;
/**
* If it is cheap, force tokenization information for `lineNumber` to be accurate.
* This is based on a heuristic.
* @internal
*/
tokenizeIfCheap(lineNumber: number): void;
/**
* Check if calling `forceTokenization` for this `lineNumber` will be cheap (time-wise).
* This is based on a heuristic.
* @internal
*/
isCheapToTokenize(lineNumber: number): boolean;
/**
* Get the tokens for the line `lineNumber`.
* The tokens might be inaccurate. Use `forceTokenization` to ensure accurate tokens.
* @internal
*/
getLineTokens(lineNumber: number): LineTokens;
/**
* Get the language associated with this model.
@ -920,18 +856,6 @@ export interface ITextModel {
*/
getLanguageIdAtPosition(lineNumber: number, column: number): string;
/**
* Returns the standard token type for a character if the character were to be inserted at
* the given position. If the result cannot be accurate, it returns null.
* @internal
*/
getTokenTypeIfInsertingCharacter(lineNumber: number, column: number, character: string): StandardTokenType;
/**
* @internal
*/
tokenizeLineWithEdit(position: IPosition, length: number, newText: string): LineTokens | null;
/**
* Get the word under or besides `position`.
* @param position The position to look for a word.
@ -1253,6 +1177,11 @@ export interface ITextModel {
* @internal
*/
readonly guides: IGuidesTextModelPart;
/**
* @internal
*/
readonly tokenization: ITokenizationTextModelPart;
}
export const enum PositionAffinity {

View file

@ -178,7 +178,7 @@ export class BracketPairsTextModelPart extends Disposable implements IBracketPai
private _matchBracket(position: Position, continueSearchPredicate: ContinueBracketSearchPredicate): [Range, Range] | null {
const lineNumber = position.lineNumber;
const lineTokens = this.textModel.getLineTokens(lineNumber);
const lineTokens = this.textModel.tokenization.getLineTokens(lineNumber);
const lineText = this.textModel.getLineContent(lineNumber);
const tokenIndex = lineTokens.findTokenIndexAtOffset(position.column - 1);
@ -309,7 +309,7 @@ export class BracketPairsTextModelPart extends Disposable implements IBracketPai
};
for (let lineNumber = position.lineNumber; lineNumber >= 1; lineNumber--) {
const lineTokens = this.textModel.getLineTokens(lineNumber);
const lineTokens = this.textModel.tokenization.getLineTokens(lineNumber);
const tokenCount = lineTokens.getCount();
const lineText = this.textModel.getLineContent(lineNumber);
@ -397,7 +397,7 @@ export class BracketPairsTextModelPart extends Disposable implements IBracketPai
const lineCount = this.textModel.getLineCount();
for (let lineNumber = position.lineNumber; lineNumber <= lineCount; lineNumber++) {
const lineTokens = this.textModel.getLineTokens(lineNumber);
const lineTokens = this.textModel.tokenization.getLineTokens(lineNumber);
const tokenCount = lineTokens.getCount();
const lineText = this.textModel.getLineContent(lineNumber);
@ -454,7 +454,7 @@ export class BracketPairsTextModelPart extends Disposable implements IBracketPai
let languageId: string | null = null;
let modeBrackets: RichEditBrackets | null = null;
for (let lineNumber = position.lineNumber; lineNumber >= 1; lineNumber--) {
const lineTokens = this.textModel.getLineTokens(lineNumber);
const lineTokens = this.textModel.tokenization.getLineTokens(lineNumber);
const tokenCount = lineTokens.getCount();
const lineText = this.textModel.getLineContent(lineNumber);
@ -532,7 +532,7 @@ export class BracketPairsTextModelPart extends Disposable implements IBracketPai
let languageId: string | null = null;
let modeBrackets: RichEditBrackets | null = null;
for (let lineNumber = position.lineNumber; lineNumber <= lineCount; lineNumber++) {
const lineTokens = this.textModel.getLineTokens(lineNumber);
const lineTokens = this.textModel.tokenization.getLineTokens(lineNumber);
const tokenCount = lineTokens.getCount();
const lineText = this.textModel.getLineContent(lineNumber);
@ -653,7 +653,7 @@ export class BracketPairsTextModelPart extends Disposable implements IBracketPai
let languageId: string | null = null;
let modeBrackets: RichEditBrackets | null = null;
for (let lineNumber = position.lineNumber; lineNumber <= lineCount; lineNumber++) {
const lineTokens = this.textModel.getLineTokens(lineNumber);
const lineTokens = this.textModel.tokenization.getLineTokens(lineNumber);
const tokenCount = lineTokens.getCount();
const lineText = this.textModel.getLineContent(lineNumber);

View file

@ -8,7 +8,7 @@ import { Disposable } from 'vs/base/common/lifecycle';
import { Range } from 'vs/editor/common/core/range';
import { ITextModel } from 'vs/editor/common/model';
import { BracketInfo, BracketPairWithMinIndentationInfo } from 'vs/editor/common/textModelBracketPairs';
import { BackgroundTokenizationState, TextModel } from 'vs/editor/common/model/textModel';
import { TextModel } from 'vs/editor/common/model/textModel';
import { IModelContentChangedEvent, IModelTokensChangedEvent } from 'vs/editor/common/textModelEvents';
import { ResolvedLanguageConfiguration } from 'vs/editor/common/languages/languageConfigurationRegistry';
import { AstNode, AstNodeKind } from './ast';
@ -18,6 +18,7 @@ import { Length, lengthAdd, lengthGreaterThanEqual, lengthLessThanEqual, lengthO
import { parseDocument } from './parser';
import { DenseKeyProvider } from './smallImmutableSet';
import { FastTokenizer, TextBufferTokenizer } from './tokenizer';
import { BackgroundTokenizationState } from 'vs/editor/common/tokenizationTextModelPart';
export class BracketPairsTree extends Disposable {
private readonly didChangeEmitter = new Emitter<void>();
@ -49,18 +50,18 @@ export class BracketPairsTree extends Disposable {
) {
super();
if (textModel.backgroundTokenizationState === BackgroundTokenizationState.Uninitialized) {
if (textModel.tokenization.backgroundTokenizationState === BackgroundTokenizationState.Uninitialized) {
// There are no token information yet
const brackets = this.brackets.getSingleLanguageBracketTokens(this.textModel.getLanguageId());
const tokenizer = new FastTokenizer(this.textModel.getValue(), brackets);
this.initialAstWithoutTokens = parseDocument(tokenizer, [], undefined, true);
this.astWithTokens = this.initialAstWithoutTokens;
} else if (textModel.backgroundTokenizationState === BackgroundTokenizationState.Completed) {
} else if (textModel.tokenization.backgroundTokenizationState === BackgroundTokenizationState.Completed) {
// Skip the initial ast, as there is no flickering.
// Directly create the tree with token information.
this.initialAstWithoutTokens = undefined;
this.astWithTokens = this.parseDocumentFromTextBuffer([], undefined, false);
} else if (textModel.backgroundTokenizationState === BackgroundTokenizationState.InProgress) {
} else if (textModel.tokenization.backgroundTokenizationState === BackgroundTokenizationState.InProgress) {
this.initialAstWithoutTokens = this.parseDocumentFromTextBuffer([], undefined, true);
this.astWithTokens = this.initialAstWithoutTokens;
}
@ -69,7 +70,7 @@ export class BracketPairsTree extends Disposable {
//#region TextModel events
public handleDidChangeBackgroundTokenizationState(): void {
if (this.textModel.backgroundTokenizationState === BackgroundTokenizationState.Completed) {
if (this.textModel.tokenization.backgroundTokenizationState === BackgroundTokenizationState.Completed) {
const wasUndefined = this.initialAstWithoutTokens === undefined;
// Clear the initial tree as we can use the tree with token information now.
this.initialAstWithoutTokens = undefined;

View file

@ -54,7 +54,10 @@ export interface ITokenizerSource {
getValue(): string;
getLineCount(): number;
getLineLength(lineNumber: number): number;
getLineTokens(lineNumber: number): IViewLineTokens;
tokenization: {
getLineTokens(lineNumber: number): IViewLineTokens;
};
}
export class TextBufferTokenizer implements Tokenizer {
@ -166,7 +169,7 @@ class NonPeekableTextBufferTokenizer {
}
if (this.line === null) {
this.lineTokens = this.textModel.getLineTokens(this.lineIdx + 1);
this.lineTokens = this.textModel.tokenization.getLineTokens(this.lineIdx + 1);
this.line = this.lineTokens.getLineContent();
this.lineTokenOffset = this.lineCharOffset === 0 ? 0 : this.lineTokens!.findTokenIndexAtOffset(this.lineCharOffset);
}
@ -238,7 +241,7 @@ class NonPeekableTextBufferTokenizer {
break;
}
this.lineIdx++;
this.lineTokens = this.textModel.getLineTokens(this.lineIdx + 1);
this.lineTokens = this.textModel.tokenization.getLineTokens(this.lineIdx + 1);
this.lineTokenOffset = 0;
this.line = this.lineTokens.getLineContent();
this.lineCharOffset = 0;

View file

@ -76,7 +76,10 @@ class StaticTokenizerSource implements ITokenizerSource {
getLineLength(lineNumber: number): number {
return this.lines[lineNumber - 1].getLineContent().length;
}
getLineTokens(lineNumber: number): IViewLineTokens {
return this.lines[lineNumber - 1];
}
tokenization = {
getLineTokens: (lineNumber: number): IViewLineTokens => {
return this.lines[lineNumber - 1];
}
};
}

View file

@ -5,7 +5,6 @@
import { ArrayQueue, pushMany } from 'vs/base/common/arrays';
import { VSBuffer, VSBufferReadableStream } from 'vs/base/common/buffer';
import { CharCode } from 'vs/base/common/charCode';
import { Color } from 'vs/base/common/color';
import { onUnexpectedError } from 'vs/base/common/errors';
import { Emitter, Event } from 'vs/base/common/event';
@ -15,39 +14,35 @@ import { listenStream } from 'vs/base/common/stream';
import * as strings from 'vs/base/common/strings';
import { Constants } from 'vs/base/common/uint';
import { URI } from 'vs/base/common/uri';
import { LineTokens } from 'vs/editor/common/tokens/lineTokens';
import { ISingleEditOperation } from 'vs/editor/common/core/editOperation';
import { countEOL } from 'vs/editor/common/core/eolCounter';
import { normalizeIndentation } from 'vs/editor/common/core/indentation';
import { IPosition, Position } from 'vs/editor/common/core/position';
import { IRange, Range } from 'vs/editor/common/core/range';
import { Selection } from 'vs/editor/common/core/selection';
import { TextChange } from 'vs/editor/common/core/textChange';
import { EDITOR_MODEL_DEFAULTS } from 'vs/editor/common/core/textModelDefaults';
import { IWordAtPosition } from 'vs/editor/common/core/wordHelper';
import { FormattingOptions } from 'vs/editor/common/languages';
import { ILanguageService } from 'vs/editor/common/languages/language';
import { ILanguageConfigurationService } from 'vs/editor/common/languages/languageConfigurationRegistry';
import * as model from 'vs/editor/common/model';
import { IBracketPairsTextModelPart } from 'vs/editor/common/textModelBracketPairs';
import { BracketPairsTextModelPart } from 'vs/editor/common/model/bracketPairsTextModelPart/bracketPairsImpl';
import { ColorizedBracketPairsDecorationProvider } from 'vs/editor/common/model/bracketPairsTextModelPart/colorizedBracketPairsDecorationProvider';
import { EditStack } from 'vs/editor/common/model/editStack';
import { GuidesTextModelPart } from 'vs/editor/common/model/guidesTextModelPart';
import { IGuidesTextModelPart } from 'vs/editor/common/textModelGuides';
import { guessIndentation } from 'vs/editor/common/model/indentationGuesser';
import { IntervalNode, IntervalTree, recomputeMaxEnd } from 'vs/editor/common/model/intervalTree';
import { PieceTreeTextBuffer } from 'vs/editor/common/model/pieceTreeTextBuffer/pieceTreeTextBuffer';
import { PieceTreeTextBufferBuilder } from 'vs/editor/common/model/pieceTreeTextBuffer/pieceTreeTextBufferBuilder';
import { TextChange } from 'vs/editor/common/core/textChange';
import { IModelContentChangedEvent, IModelDecorationsChangedEvent, IModelLanguageChangedEvent, IModelLanguageConfigurationChangedEvent, IModelOptionsChangedEvent, IModelTokensChangedEvent, InternalModelContentChangeEvent, LineInjectedText, ModelInjectedTextChangedEvent, ModelRawChange, ModelRawContentChangedEvent, ModelRawEOLChanged, ModelRawFlush, ModelRawLineChanged, ModelRawLinesDeleted, ModelRawLinesInserted } from 'vs/editor/common/textModelEvents';
import { SearchParams, TextModelSearch } from 'vs/editor/common/model/textModelSearch';
import { TextModelTokenization } from 'vs/editor/common/model/textModelTokens';
import { countEOL } from 'vs/editor/common/core/eolCounter';
import { ContiguousMultilineTokens } from 'vs/editor/common/tokens/contiguousMultilineTokens';
import { SparseMultilineTokens } from 'vs/editor/common/tokens/sparseMultilineTokens';
import { ContiguousTokensStore } from 'vs/editor/common/tokens/contiguousTokensStore';
import { SparseTokensStore } from 'vs/editor/common/tokens/sparseTokensStore';
import { getWordAtText, IWordAtPosition } from 'vs/editor/common/core/wordHelper';
import { FormattingOptions, StandardTokenType } from 'vs/editor/common/languages';
import { ILanguageConfigurationService, ResolvedLanguageConfiguration } from 'vs/editor/common/languages/languageConfigurationRegistry';
import { ILanguageService } from 'vs/editor/common/languages/language';
import { TokenizationTextModelPart } from 'vs/editor/common/model/tokenizationTextModelPart';
import { IBracketPairsTextModelPart } from 'vs/editor/common/textModelBracketPairs';
import { IModelContentChangedEvent, IModelDecorationsChangedEvent, IModelOptionsChangedEvent, InternalModelContentChangeEvent, LineInjectedText, ModelInjectedTextChangedEvent, ModelRawChange, ModelRawContentChangedEvent, ModelRawEOLChanged, ModelRawFlush, ModelRawLineChanged, ModelRawLinesDeleted, ModelRawLinesInserted } from 'vs/editor/common/textModelEvents';
import { IGuidesTextModelPart } from 'vs/editor/common/textModelGuides';
import { ITokenizationTextModelPart } from 'vs/editor/common/tokenizationTextModelPart';
import { IColorTheme, ThemeColor } from 'vs/platform/theme/common/themeService';
import { IUndoRedoService, ResourceEditStackSnapshot } from 'vs/platform/undoRedo/common/undoRedo';
import { EDITOR_MODEL_DEFAULTS } from 'vs/editor/common/core/textModelDefaults';
import { normalizeIndentation } from 'vs/editor/common/core/indentation';
import { ISingleEditOperation } from 'vs/editor/common/core/editOperation';
function createTextBufferBuilder() {
return new PieceTreeTextBufferBuilder();
@ -172,12 +167,6 @@ const enum StringOffsetValidationType {
SurrogatePairs = 1,
}
export const enum BackgroundTokenizationState {
Uninitialized = 0,
InProgress = 1,
Completed = 2,
}
export class TextModel extends Disposable implements model.ITextModel, IDecorationsTreesHost {
private static readonly MODEL_SYNC_LIMIT = 50 * 1024 * 1024; // 50 MB
@ -227,14 +216,9 @@ export class TextModel extends Disposable implements model.ITextModel, IDecorati
private readonly _onDidChangeDecorations: DidChangeDecorationsEmitter = this._register(new DidChangeDecorationsEmitter(affectedInjectedTextLines => this.handleBeforeFireDecorationsChangedEvent(affectedInjectedTextLines)));
public readonly onDidChangeDecorations: Event<IModelDecorationsChangedEvent> = this._onDidChangeDecorations.event;
private readonly _onDidChangeLanguage: Emitter<IModelLanguageChangedEvent> = this._register(new Emitter<IModelLanguageChangedEvent>());
public readonly onDidChangeLanguage: Event<IModelLanguageChangedEvent> = this._onDidChangeLanguage.event;
private readonly _onDidChangeLanguageConfiguration: Emitter<IModelLanguageConfigurationChangedEvent> = this._register(new Emitter<IModelLanguageConfigurationChangedEvent>());
public readonly onDidChangeLanguageConfiguration: Event<IModelLanguageConfigurationChangedEvent> = this._onDidChangeLanguageConfiguration.event;
private readonly _onDidChangeTokens: Emitter<IModelTokensChangedEvent> = this._register(new Emitter<IModelTokensChangedEvent>());
public readonly onDidChangeTokens: Event<IModelTokensChangedEvent> = this._onDidChangeTokens.event;
public get onDidChangeLanguage() { return this._tokenizationTextModelPart.onDidChangeLanguage; }
public get onDidChangeLanguageConfiguration() { return this._tokenizationTextModelPart.onDidChangeLanguageConfiguration; }
public get onDidChangeTokens() { return this._tokenizationTextModelPart.onDidChangeTokens; }
private readonly _onDidChangeOptions: Emitter<IModelOptionsChangedEvent> = this._register(new Emitter<IModelOptionsChangedEvent>());
public readonly onDidChangeOptions: Event<IModelOptionsChangedEvent> = this._onDidChangeOptions.event;
@ -265,7 +249,8 @@ export class TextModel extends Disposable implements model.ITextModel, IDecorati
private _options: model.TextModelResolvedOptions;
private _isDisposed: boolean;
private _isDisposing: boolean;
private __isDisposing: boolean;
public _isDisposing(): boolean { return this.__isDisposing; }
private _versionId: number;
/**
* Unlike, versionId, this can go down (via undo) or go to previous values (via redo)
@ -294,40 +279,15 @@ export class TextModel extends Disposable implements model.ITextModel, IDecorati
private readonly _decorationProvider: ColorizedBracketPairsDecorationProvider;
//#endregion
//#region Tokenization
private _languageId: string;
private readonly _languageRegistryListener: IDisposable;
private readonly _tokens: ContiguousTokensStore;
private readonly _semanticTokens: SparseTokensStore;
private readonly _tokenization: TextModelTokenization;
//#endregion
private readonly _tokenizationTextModelPart: TokenizationTextModelPart;
public get tokenization(): ITokenizationTextModelPart { return this._tokenizationTextModelPart; }
private readonly _bracketPairColorizer: BracketPairsTextModelPart;
public get bracketPairs(): IBracketPairsTextModelPart { return this._bracketPairColorizer; }
private readonly _bracketPairs: BracketPairsTextModelPart;
public get bracketPairs(): IBracketPairsTextModelPart { return this._bracketPairs; }
private readonly _guidesTextModelPart: GuidesTextModelPart;
public get guides(): IGuidesTextModelPart { return this._guidesTextModelPart; }
private _backgroundTokenizationState = BackgroundTokenizationState.Uninitialized;
public get backgroundTokenizationState(): BackgroundTokenizationState {
return this._backgroundTokenizationState;
}
private handleTokenizationProgress(completed: boolean) {
if (this._backgroundTokenizationState === BackgroundTokenizationState.Completed) {
// We already did a full tokenization and don't go back to progressing.
return;
}
const newState = completed ? BackgroundTokenizationState.Completed : BackgroundTokenizationState.InProgress;
if (this._backgroundTokenizationState !== newState) {
this._backgroundTokenizationState = newState;
this._bracketPairColorizer.handleDidChangeBackgroundTokenizationState();
this._onBackgroundTokenizationStateChanged.fire();
}
}
private readonly _onBackgroundTokenizationStateChanged = this._register(new Emitter<void>());
public readonly onBackgroundTokenizationStateChanged: Event<void> = this._onBackgroundTokenizationStateChanged.event;
constructor(
source: string | model.ITextBufferFactory,
languageId: string,
@ -356,6 +316,17 @@ export class TextModel extends Disposable implements model.ITextModel, IDecorati
this._options = TextModel.resolveOptions(this._buffer, creationOptions);
this._bracketPairs = this._register(new BracketPairsTextModelPart(this, this._languageConfigurationService));
this._guidesTextModelPart = this._register(new GuidesTextModelPart(this, this._languageConfigurationService));
this._decorationProvider = this._register(new ColorizedBracketPairsDecorationProvider(this));
this._tokenizationTextModelPart = new TokenizationTextModelPart(
this._languageService,
this._languageConfigurationService,
this,
this._bracketPairs,
languageId
);
const bufferLineCount = this._buffer.getLineCount();
const bufferTextLength = this._buffer.getValueLengthInRange(new Range(1, 1, bufferLineCount, this._buffer.getLineLength(bufferLineCount) + 1), model.EndOfLinePreference.TextDefined);
@ -378,17 +349,7 @@ export class TextModel extends Disposable implements model.ITextModel, IDecorati
this._initialUndoRedoSnapshot = null;
this._isDisposed = false;
this._isDisposing = false;
this._languageId = languageId;
this._languageRegistryListener = this._languageConfigurationService.onDidChange(
e => {
if (e.affects(this._languageId)) {
this._onDidChangeLanguageConfiguration.fire({});
}
}
);
this.__isDisposing = false;
this._instanceId = strings.singleLetterHash(MODEL_ID);
this._lastDecorationId = 0;
@ -400,13 +361,6 @@ export class TextModel extends Disposable implements model.ITextModel, IDecorati
this._isRedoing = false;
this._trimAutoWhitespaceLines = null;
this._tokens = new ContiguousTokensStore(this._languageService.languageIdCodec);
this._semanticTokens = new SparseTokensStore(this._languageService.languageIdCodec);
this._tokenization = new TextModelTokenization(this, this._languageService.languageIdCodec);
this._bracketPairColorizer = this._register(new BracketPairsTextModelPart(this, this._languageConfigurationService));
this._guidesTextModelPart = this._register(new GuidesTextModelPart(this, this._languageConfigurationService));
this._decorationProvider = this._register(new ColorizedBracketPairsDecorationProvider(this));
this._register(this._decorationProvider.onDidChange(() => {
this._onDidChangeDecorations.beginDeferredEmit();
@ -416,14 +370,13 @@ export class TextModel extends Disposable implements model.ITextModel, IDecorati
}
public override dispose(): void {
this._isDisposing = true;
this.__isDisposing = true;
this._onWillDispose.fire();
this._languageRegistryListener.dispose();
this._tokenization.dispose();
this._tokenizationTextModelPart.dispose();
this._isDisposed = true;
super.dispose();
this._bufferDisposable.dispose();
this._isDisposing = false;
this.__isDisposing = false;
// Manually release reference to previous text buffer to avoid large leaks
// in case someone leaks a TextModel reference
const emptyDisposedTextBuffer = new PieceTreeTextBuffer([], '', '\n', false, false, true, true);
@ -436,14 +389,11 @@ export class TextModel extends Disposable implements model.ITextModel, IDecorati
return (
this._onWillDispose.hasListeners()
|| this._onDidChangeDecorations.hasListeners()
|| this._onDidChangeLanguage.hasListeners()
|| this._onDidChangeLanguageConfiguration.hasListeners()
|| this._onDidChangeTokens.hasListeners()
|| this._tokenizationTextModelPart._hasListeners()
|| this._onDidChangeOptions.hasListeners()
|| this._onDidChangeAttached.hasListeners()
|| this._onDidChangeInjectedText.hasListeners()
|| this._eventEmitter.hasListeners()
|| this._onBackgroundTokenizationStateChanged.hasListeners()
);
}
@ -464,12 +414,12 @@ export class TextModel extends Disposable implements model.ITextModel, IDecorati
}
private _emitContentChangedEvent(rawChange: ModelRawContentChangedEvent, change: IModelContentChangedEvent): void {
if (this._isDisposing) {
if (this.__isDisposing) {
// Do not confuse listeners by emitting any event after disposing
return;
}
this._bracketPairColorizer.handleDidChangeContent(change);
this._tokenization.handleDidChangeContent(change);
this._bracketPairs.handleDidChangeContent(change);
this._tokenizationTextModelPart.handleDidChangeContent(change);
this._eventEmitter.fire(new InternalModelContentChangeEvent(rawChange, change));
}
@ -513,8 +463,7 @@ export class TextModel extends Disposable implements model.ITextModel, IDecorati
this._increaseVersionId();
// Flush all tokens
this._tokens.flush();
this._semanticTokens.flush();
this._tokenizationTextModelPart.flush();
// Destroy all my decorations
this._decorations = Object.create(null);
@ -600,7 +549,7 @@ export class TextModel extends Disposable implements model.ITextModel, IDecorati
public onBeforeAttached(): void {
this._attachedEditorCount++;
if (this._attachedEditorCount === 1) {
this._tokenization.handleDidChangeAttached();
this._tokenizationTextModelPart.handleDidChangeAttached();
this._onDidChangeAttached.fire(undefined);
}
}
@ -608,7 +557,7 @@ export class TextModel extends Disposable implements model.ITextModel, IDecorati
public onBeforeDetached(): void {
this._attachedEditorCount--;
if (this._attachedEditorCount === 0) {
this._tokenization.handleDidChangeAttached();
this._tokenizationTextModelPart.handleDidChangeAttached();
this._onDidChangeAttached.fire(undefined);
}
}
@ -697,7 +646,7 @@ export class TextModel extends Disposable implements model.ITextModel, IDecorati
const e = this._options.createChangeEvent(newOpts);
this._options = newOpts;
this._bracketPairColorizer.handleDidChangeOptions(e);
this._bracketPairs.handleDidChangeOptions(e);
this._decorationProvider.handleDidChangeOptions(e);
this._onDidChangeOptions.fire(e);
}
@ -1463,8 +1412,7 @@ export class TextModel extends Disposable implements model.ITextModel, IDecorati
for (let i = 0, len = contentChanges.length; i < len; i++) {
const change = contentChanges[i];
const [eolCount, firstLineLength, lastLineLength] = countEOL(change.text);
this._tokens.acceptEdit(change.range, eolCount, firstLineLength);
this._semanticTokens.acceptEdit(change.range, eolCount, firstLineLength, lastLineLength, change.text.length > 0 ? change.text.charCodeAt(0) : CharCode.Null);
this._tokenizationTextModelPart.acceptEdit(change.range, change.text, eolCount, firstLineLength, lastLineLength);
this._decorationsTree.acceptReplace(change.rangeOffset, change.rangeLength, change.text.length, change.forceMoveMarkers);
}
@ -1944,270 +1892,28 @@ export class TextModel extends Disposable implements model.ITextModel, IDecorati
//#region Tokenization
public setLineTokens(lineNumber: number, tokens: Uint32Array | ArrayBuffer | null): void {
if (lineNumber < 1 || lineNumber > this.getLineCount()) {
throw new Error('Illegal value for lineNumber');
}
this._tokens.setTokens(this._languageId, lineNumber - 1, this._buffer.getLineLength(lineNumber), tokens, false);
}
public setTokens(tokens: ContiguousMultilineTokens[], backgroundTokenizationCompleted: boolean = false): void {
if (tokens.length !== 0) {
const ranges: { fromLineNumber: number; toLineNumber: number }[] = [];
for (let i = 0, len = tokens.length; i < len; i++) {
const element = tokens[i];
let minChangedLineNumber = 0;
let maxChangedLineNumber = 0;
let hasChange = false;
for (let lineNumber = element.startLineNumber; lineNumber <= element.endLineNumber; lineNumber++) {
if (hasChange) {
this._tokens.setTokens(this._languageId, lineNumber - 1, this._buffer.getLineLength(lineNumber), element.getLineTokens(lineNumber), false);
maxChangedLineNumber = lineNumber;
} else {
const lineHasChange = this._tokens.setTokens(this._languageId, lineNumber - 1, this._buffer.getLineLength(lineNumber), element.getLineTokens(lineNumber), true);
if (lineHasChange) {
hasChange = true;
minChangedLineNumber = lineNumber;
maxChangedLineNumber = lineNumber;
}
}
}
if (hasChange) {
ranges.push({ fromLineNumber: minChangedLineNumber, toLineNumber: maxChangedLineNumber });
}
}
if (ranges.length > 0) {
this._emitModelTokensChangedEvent({
tokenizationSupportChanged: false,
semanticTokensApplied: false,
ranges: ranges
});
}
}
this.handleTokenizationProgress(backgroundTokenizationCompleted);
}
public setSemanticTokens(tokens: SparseMultilineTokens[] | null, isComplete: boolean): void {
this._semanticTokens.set(tokens, isComplete);
this._emitModelTokensChangedEvent({
tokenizationSupportChanged: false,
semanticTokensApplied: tokens !== null,
ranges: [{ fromLineNumber: 1, toLineNumber: this.getLineCount() }]
});
}
public hasCompleteSemanticTokens(): boolean {
return this._semanticTokens.isComplete();
}
public hasSomeSemanticTokens(): boolean {
return !this._semanticTokens.isEmpty();
}
public setPartialSemanticTokens(range: Range, tokens: SparseMultilineTokens[]): void {
if (this.hasCompleteSemanticTokens()) {
return;
}
const changedRange = this.validateRange(this._semanticTokens.setPartial(range, tokens));
this._emitModelTokensChangedEvent({
tokenizationSupportChanged: false,
semanticTokensApplied: true,
ranges: [{ fromLineNumber: changedRange.startLineNumber, toLineNumber: changedRange.endLineNumber }]
});
}
public tokenizeViewport(startLineNumber: number, endLineNumber: number): void {
startLineNumber = Math.max(1, startLineNumber);
endLineNumber = Math.min(this._buffer.getLineCount(), endLineNumber);
this._tokenization.tokenizeViewport(startLineNumber, endLineNumber);
}
public clearTokens(): void {
this._tokens.flush();
this._emitModelTokensChangedEvent({
tokenizationSupportChanged: true,
semanticTokensApplied: false,
ranges: [{
fromLineNumber: 1,
toLineNumber: this._buffer.getLineCount()
}]
});
}
public clearSemanticTokens(): void {
this._semanticTokens.flush();
this._emitModelTokensChangedEvent({
tokenizationSupportChanged: false,
semanticTokensApplied: false,
ranges: [{ fromLineNumber: 1, toLineNumber: this.getLineCount() }]
});
}
private _emitModelTokensChangedEvent(e: IModelTokensChangedEvent): void {
if (!this._isDisposing) {
this._bracketPairColorizer.handleDidChangeTokens(e);
this._onDidChangeTokens.fire(e);
}
}
public resetTokenization(): void {
this._tokenization.reset();
}
public forceTokenization(lineNumber: number): void {
if (lineNumber < 1 || lineNumber > this.getLineCount()) {
throw new Error('Illegal value for lineNumber');
}
this._tokenization.forceTokenization(lineNumber);
}
public isCheapToTokenize(lineNumber: number): boolean {
return this._tokenization.isCheapToTokenize(lineNumber);
}
public tokenizeIfCheap(lineNumber: number): void {
if (this.isCheapToTokenize(lineNumber)) {
this.forceTokenization(lineNumber);
}
}
public getLineTokens(lineNumber: number): LineTokens {
if (lineNumber < 1 || lineNumber > this.getLineCount()) {
throw new Error('Illegal value for lineNumber');
}
return this._getLineTokens(lineNumber);
}
private _getLineTokens(lineNumber: number): LineTokens {
const lineText = this.getLineContent(lineNumber);
const syntacticTokens = this._tokens.getTokens(this._languageId, lineNumber - 1, lineText);
return this._semanticTokens.addSparseTokens(lineNumber, syntacticTokens);
}
// TODO move them to the tokenization part.
public getLanguageId(): string {
return this._languageId;
return this.tokenization.getLanguageId();
}
public setMode(languageId: string): void {
if (this._languageId === languageId) {
// There's nothing to do
return;
}
const e: IModelLanguageChangedEvent = {
oldLanguage: this._languageId,
newLanguage: languageId
};
this._languageId = languageId;
this._bracketPairColorizer.handleDidChangeLanguage(e);
this._tokenization.handleDidChangeLanguage(e);
this._onDidChangeLanguage.fire(e);
this._onDidChangeLanguageConfiguration.fire({});
this.tokenization.setLanguageId(languageId);
}
public getLanguageIdAtPosition(lineNumber: number, column: number): string {
const position = this.validatePosition(new Position(lineNumber, column));
const lineTokens = this.getLineTokens(position.lineNumber);
return lineTokens.getLanguageId(lineTokens.findTokenIndexAtOffset(position.column - 1));
return this.tokenization.getLanguageIdAtPosition(lineNumber, column);
}
public setLineTokens(lineNumber: number, tokens: Uint32Array | ArrayBuffer | null): void {
this._tokenizationTextModelPart.setLineTokens(lineNumber, tokens);
}
public getTokenTypeIfInsertingCharacter(lineNumber: number, column: number, character: string): StandardTokenType {
const position = this.validatePosition(new Position(lineNumber, column));
return this._tokenization.getTokenTypeIfInsertingCharacter(position, character);
}
tokenizeLineWithEdit(position: IPosition, length: number, newText: string): LineTokens | null {
const validatedPosition = this.validatePosition(position);
return this._tokenization.tokenizeLineWithEdit(validatedPosition, length, newText);
}
private getLanguageConfiguration(languageId: string): ResolvedLanguageConfiguration {
return this._languageConfigurationService.getLanguageConfiguration(languageId);
}
// Having tokens allows implementing additional helper methods
public getWordAtPosition(_position: IPosition): IWordAtPosition | null {
this._assertNotDisposed();
const position = this.validatePosition(_position);
const lineContent = this.getLineContent(position.lineNumber);
const lineTokens = this._getLineTokens(position.lineNumber);
const tokenIndex = lineTokens.findTokenIndexAtOffset(position.column - 1);
// (1). First try checking right biased word
const [rbStartOffset, rbEndOffset] = TextModel._findLanguageBoundaries(lineTokens, tokenIndex);
const rightBiasedWord = getWordAtText(
position.column,
this.getLanguageConfiguration(lineTokens.getLanguageId(tokenIndex)).getWordDefinition(),
lineContent.substring(rbStartOffset, rbEndOffset),
rbStartOffset
);
// Make sure the result touches the original passed in position
if (rightBiasedWord && rightBiasedWord.startColumn <= _position.column && _position.column <= rightBiasedWord.endColumn) {
return rightBiasedWord;
}
// (2). Else, if we were at a language boundary, check the left biased word
if (tokenIndex > 0 && rbStartOffset === position.column - 1) {
// edge case, where `position` sits between two tokens belonging to two different languages
const [lbStartOffset, lbEndOffset] = TextModel._findLanguageBoundaries(lineTokens, tokenIndex - 1);
const leftBiasedWord = getWordAtText(
position.column,
this.getLanguageConfiguration(lineTokens.getLanguageId(tokenIndex - 1)).getWordDefinition(),
lineContent.substring(lbStartOffset, lbEndOffset),
lbStartOffset
);
// Make sure the result touches the original passed in position
if (leftBiasedWord && leftBiasedWord.startColumn <= _position.column && _position.column <= leftBiasedWord.endColumn) {
return leftBiasedWord;
}
}
return null;
}
private static _findLanguageBoundaries(lineTokens: LineTokens, tokenIndex: number): [number, number] {
const languageId = lineTokens.getLanguageId(tokenIndex);
// go left until a different language is hit
let startOffset = 0;
for (let i = tokenIndex; i >= 0 && lineTokens.getLanguageId(i) === languageId; i--) {
startOffset = lineTokens.getStartOffset(i);
}
// go right until a different language is hit
let endOffset = lineTokens.getLineContent().length;
for (let i = tokenIndex, tokenCount = lineTokens.getCount(); i < tokenCount && lineTokens.getLanguageId(i) === languageId; i++) {
endOffset = lineTokens.getEndOffset(i);
}
return [startOffset, endOffset];
public getWordAtPosition(position: IPosition): IWordAtPosition | null {
return this._tokenizationTextModelPart.getWordAtPosition(position);
}
public getWordUntilPosition(position: IPosition): IWordAtPosition {
const wordAtPosition = this.getWordAtPosition(position);
if (!wordAtPosition) {
return {
word: '',
startColumn: position.column,
endColumn: position.column
};
}
return {
word: wordAtPosition.word.substr(0, position.column - wordAtPosition.startColumn),
startColumn: wordAtPosition.startColumn,
endColumn: position.column
};
return this._tokenizationTextModelPart.getWordUntilPosition(position);
}
//#endregion

View file

@ -3,12 +3,13 @@
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { IDisposable } from 'vs/base/common/lifecycle';
import { Disposable } from 'vs/base/common/lifecycle';
export class TextModelPart implements IDisposable {
export class TextModelPart extends Disposable {
private _isDisposed = false;
public dispose(): void {
public override dispose(): void {
super.dispose();
this._isDisposed = true;
}
protected assertNotDisposed(): void {

View file

@ -18,6 +18,7 @@ import { ContiguousMultilineTokensBuilder } from 'vs/editor/common/tokens/contig
import { runWhenIdle, IdleDeadline } from 'vs/base/common/async';
import { setTimeout0 } from 'vs/base/common/platform';
import { IModelContentChangedEvent, IModelLanguageChangedEvent } from 'vs/editor/common/textModelEvents';
import { TokenizationTextModelPart } from 'vs/editor/common/model/tokenizationTextModelPart';
const enum Constants {
CHEAP_TOKENIZATION_LENGTH_LIMIT = 2048
@ -166,6 +167,7 @@ export class TextModelTokenization extends Disposable {
constructor(
private readonly _textModel: TextModel,
private readonly _tokenizationPart: TokenizationTextModelPart,
private readonly _languageIdCodec: ILanguageIdCodec
) {
super();
@ -179,7 +181,7 @@ export class TextModelTokenization extends Disposable {
}
this._resetTokenizationState();
this._textModel.clearTokens();
this._tokenizationPart.clearTokens();
}));
this._resetTokenizationState();
@ -214,13 +216,13 @@ export class TextModelTokenization extends Disposable {
public handleDidChangeLanguage(e: IModelLanguageChangedEvent): void {
this._resetTokenizationState();
this._textModel.clearTokens();
this._tokenizationPart.clearTokens();
}
//#endregion
private _resetTokenizationState(): void {
const [tokenizationSupport, initialState] = initializeTokenization(this._textModel);
const [tokenizationSupport, initialState] = initializeTokenization(this._textModel, this._tokenizationPart);
if (tokenizationSupport && initialState) {
this._tokenizationStateStore = new TokenizationStateStore(tokenizationSupport, initialState);
} else {
@ -294,24 +296,24 @@ export class TextModelTokenization extends Disposable {
}
} while (this._hasLinesToTokenize());
this._textModel.setTokens(builder.finalize(), this._isTokenizationComplete());
this._tokenizationPart.setTokens(builder.finalize(), this._isTokenizationComplete());
}
public tokenizeViewport(startLineNumber: number, endLineNumber: number): void {
const builder = new ContiguousMultilineTokensBuilder();
this._tokenizeViewport(builder, startLineNumber, endLineNumber);
this._textModel.setTokens(builder.finalize(), this._isTokenizationComplete());
this._tokenizationPart.setTokens(builder.finalize(), this._isTokenizationComplete());
}
public reset(): void {
this._resetTokenizationState();
this._textModel.clearTokens();
this._tokenizationPart.clearTokens();
}
public forceTokenization(lineNumber: number): void {
const builder = new ContiguousMultilineTokensBuilder();
this._updateTokensUntilLine(builder, lineNumber);
this._textModel.setTokens(builder.finalize(), this._isTokenizationComplete());
this._tokenizationPart.setTokens(builder.finalize(), this._isTokenizationComplete());
}
public getTokenTypeIfInsertingCharacter(position: Position, character: string): StandardTokenType {
@ -499,11 +501,11 @@ export class TextModelTokenization extends Disposable {
}
}
function initializeTokenization(textModel: TextModel): [ITokenizationSupport, IState] | [null, null] {
function initializeTokenization(textModel: TextModel, tokenizationPart: TokenizationTextModelPart): [ITokenizationSupport, IState] | [null, null] {
if (textModel.isTooLargeForTokenization()) {
return [null, null];
}
const tokenizationSupport = TokenizationRegistry.get(textModel.getLanguageId());
const tokenizationSupport = TokenizationRegistry.get(tokenizationPart.getLanguageId());
if (!tokenizationSupport) {
return [null, null];
}

View file

@ -0,0 +1,506 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { Emitter, Event } from 'vs/base/common/event';
import { CharCode } from 'vs/base/common/charCode';
import { IDisposable } from 'vs/base/common/lifecycle';
import { IPosition, Position } from 'vs/editor/common/core/position';
import { IRange, Range } from 'vs/editor/common/core/range';
import { getWordAtText, IWordAtPosition } from 'vs/editor/common/core/wordHelper';
import { StandardTokenType } from 'vs/editor/common/languages';
import { ILanguageService } from 'vs/editor/common/languages/language';
import { ILanguageConfigurationService, ResolvedLanguageConfiguration } from 'vs/editor/common/languages/languageConfigurationRegistry';
import { TextModel } from 'vs/editor/common/model/textModel';
import { TextModelPart } from 'vs/editor/common/model/textModelPart';
import { TextModelTokenization } from 'vs/editor/common/model/textModelTokens';
import { IModelContentChangedEvent, IModelLanguageChangedEvent, IModelLanguageConfigurationChangedEvent, IModelTokensChangedEvent } from 'vs/editor/common/textModelEvents';
import { ContiguousMultilineTokens } from 'vs/editor/common/tokens/contiguousMultilineTokens';
import { ContiguousTokensStore } from 'vs/editor/common/tokens/contiguousTokensStore';
import { LineTokens } from 'vs/editor/common/tokens/lineTokens';
import { SparseMultilineTokens } from 'vs/editor/common/tokens/sparseMultilineTokens';
import { SparseTokensStore } from 'vs/editor/common/tokens/sparseTokensStore';
import { BracketPairsTextModelPart } from 'vs/editor/common/model/bracketPairsTextModelPart/bracketPairsImpl';
import { BackgroundTokenizationState, ITokenizationTextModelPart } from 'vs/editor/common/tokenizationTextModelPart';
export class TokenizationTextModelPart extends TextModelPart implements ITokenizationTextModelPart {
private readonly _onDidChangeLanguage: Emitter<IModelLanguageChangedEvent> = this._register(new Emitter<IModelLanguageChangedEvent>());
public readonly onDidChangeLanguage: Event<IModelLanguageChangedEvent> = this._onDidChangeLanguage.event;
private readonly _onDidChangeLanguageConfiguration: Emitter<IModelLanguageConfigurationChangedEvent> = this._register(new Emitter<IModelLanguageConfigurationChangedEvent>());
public readonly onDidChangeLanguageConfiguration: Event<IModelLanguageConfigurationChangedEvent> = this._onDidChangeLanguageConfiguration.event;
private readonly _onDidChangeTokens: Emitter<IModelTokensChangedEvent> = this._register(new Emitter<IModelTokensChangedEvent>());
public readonly onDidChangeTokens: Event<IModelTokensChangedEvent> = this._onDidChangeTokens.event;
private readonly _languageRegistryListener: IDisposable;
private readonly _tokens: ContiguousTokensStore;
private readonly _semanticTokens: SparseTokensStore;
private readonly _tokenization: TextModelTokenization;
constructor(
private readonly _languageService: ILanguageService,
private readonly _languageConfigurationService: ILanguageConfigurationService,
private readonly _textModel: TextModel,
private readonly bracketPairsTextModelPart: BracketPairsTextModelPart,
private _languageId: string,
) {
super();
this._tokens = new ContiguousTokensStore(
this._languageService.languageIdCodec
);
this._semanticTokens = new SparseTokensStore(
this._languageService.languageIdCodec
);
this._tokenization = new TextModelTokenization(
_textModel,
this,
this._languageService.languageIdCodec
);
this._languageRegistryListener = this._languageConfigurationService.onDidChange(
e => {
if (e.affects(this._languageId)) {
this._onDidChangeLanguageConfiguration.fire({});
}
}
);
}
_hasListeners(): boolean {
return (
this._onDidChangeLanguage.hasListeners()
|| this._onDidChangeLanguageConfiguration.hasListeners()
|| this._onDidChangeTokens.hasListeners()
|| this._onBackgroundTokenizationStateChanged.hasListeners()
);
}
public acceptEdit(
range: IRange,
text: string,
eolCount: number,
firstLineLength: number,
lastLineLength: number
): void {
this._tokens.acceptEdit(range, eolCount, firstLineLength);
this._semanticTokens.acceptEdit(
range,
eolCount,
firstLineLength,
lastLineLength,
text.length > 0 ? text.charCodeAt(0) : CharCode.Null
);
}
public handleDidChangeAttached(): void {
this._tokenization.handleDidChangeAttached();
}
public flush(): void {
this._tokens.flush();
this._semanticTokens.flush();
}
public handleDidChangeContent(change: IModelContentChangedEvent): void {
this._tokenization.handleDidChangeContent(change);
}
public override dispose(): void {
this._languageRegistryListener.dispose();
this._tokenization.dispose();
super.dispose();
}
private _backgroundTokenizationState = BackgroundTokenizationState.Uninitialized;
public get backgroundTokenizationState(): BackgroundTokenizationState {
return this._backgroundTokenizationState;
}
private handleTokenizationProgress(completed: boolean) {
if (this._backgroundTokenizationState === BackgroundTokenizationState.Completed) {
// We already did a full tokenization and don't go back to progressing.
return;
}
const newState = completed ? BackgroundTokenizationState.Completed : BackgroundTokenizationState.InProgress;
if (this._backgroundTokenizationState !== newState) {
this._backgroundTokenizationState = newState;
this.bracketPairsTextModelPart.handleDidChangeBackgroundTokenizationState();
this._onBackgroundTokenizationStateChanged.fire();
}
}
private readonly _onBackgroundTokenizationStateChanged = this._register(new Emitter<void>());
public readonly onBackgroundTokenizationStateChanged: Event<void> = this._onBackgroundTokenizationStateChanged.event;
public setLineTokens(
lineNumber: number,
tokens: Uint32Array | ArrayBuffer | null
): void {
if (lineNumber < 1 || lineNumber > this._textModel.getLineCount()) {
throw new Error('Illegal value for lineNumber');
}
this._tokens.setTokens(
this._languageId,
lineNumber - 1,
this._textModel.getLineLength(lineNumber),
tokens,
false
);
}
public setTokens(
tokens: ContiguousMultilineTokens[],
backgroundTokenizationCompleted: boolean = false
): void {
if (tokens.length !== 0) {
const ranges: { fromLineNumber: number; toLineNumber: number }[] = [];
for (let i = 0, len = tokens.length; i < len; i++) {
const element = tokens[i];
let minChangedLineNumber = 0;
let maxChangedLineNumber = 0;
let hasChange = false;
for (
let lineNumber = element.startLineNumber;
lineNumber <= element.endLineNumber;
lineNumber++
) {
if (hasChange) {
this._tokens.setTokens(
this._languageId,
lineNumber - 1,
this._textModel.getLineLength(lineNumber),
element.getLineTokens(lineNumber),
false
);
maxChangedLineNumber = lineNumber;
} else {
const lineHasChange = this._tokens.setTokens(
this._languageId,
lineNumber - 1,
this._textModel.getLineLength(lineNumber),
element.getLineTokens(lineNumber),
true
);
if (lineHasChange) {
hasChange = true;
minChangedLineNumber = lineNumber;
maxChangedLineNumber = lineNumber;
}
}
}
if (hasChange) {
ranges.push({
fromLineNumber: minChangedLineNumber,
toLineNumber: maxChangedLineNumber,
});
}
}
if (ranges.length > 0) {
this._emitModelTokensChangedEvent({
tokenizationSupportChanged: false,
semanticTokensApplied: false,
ranges: ranges,
});
}
}
this.handleTokenizationProgress(backgroundTokenizationCompleted);
}
public setSemanticTokens(
tokens: SparseMultilineTokens[] | null,
isComplete: boolean
): void {
this._semanticTokens.set(tokens, isComplete);
this._emitModelTokensChangedEvent({
tokenizationSupportChanged: false,
semanticTokensApplied: tokens !== null,
ranges: [{ fromLineNumber: 1, toLineNumber: this._textModel.getLineCount() }],
});
}
public hasCompleteSemanticTokens(): boolean {
return this._semanticTokens.isComplete();
}
public hasSomeSemanticTokens(): boolean {
return !this._semanticTokens.isEmpty();
}
public setPartialSemanticTokens(
range: Range,
tokens: SparseMultilineTokens[]
): void {
if (this.hasCompleteSemanticTokens()) {
return;
}
const changedRange = this._textModel.validateRange(
this._semanticTokens.setPartial(range, tokens)
);
this._emitModelTokensChangedEvent({
tokenizationSupportChanged: false,
semanticTokensApplied: true,
ranges: [
{
fromLineNumber: changedRange.startLineNumber,
toLineNumber: changedRange.endLineNumber,
},
],
});
}
public tokenizeViewport(
startLineNumber: number,
endLineNumber: number
): void {
startLineNumber = Math.max(1, startLineNumber);
endLineNumber = Math.min(this._textModel.getLineCount(), endLineNumber);
this._tokenization.tokenizeViewport(startLineNumber, endLineNumber);
}
public clearTokens(): void {
this._tokens.flush();
this._emitModelTokensChangedEvent({
tokenizationSupportChanged: true,
semanticTokensApplied: false,
ranges: [
{
fromLineNumber: 1,
toLineNumber: this._textModel.getLineCount(),
},
],
});
}
public clearSemanticTokens(): void {
this._semanticTokens.flush();
this._emitModelTokensChangedEvent({
tokenizationSupportChanged: false,
semanticTokensApplied: false,
ranges: [{ fromLineNumber: 1, toLineNumber: this._textModel.getLineCount() }],
});
}
private _emitModelTokensChangedEvent(e: IModelTokensChangedEvent): void {
if (!this._textModel._isDisposing) {
this.bracketPairsTextModelPart.handleDidChangeTokens(e);
this._onDidChangeTokens.fire(e);
}
}
public resetTokenization(): void {
this._tokenization.reset();
}
public forceTokenization(lineNumber: number): void {
if (lineNumber < 1 || lineNumber > this._textModel.getLineCount()) {
throw new Error('Illegal value for lineNumber');
}
this._tokenization.forceTokenization(lineNumber);
}
public isCheapToTokenize(lineNumber: number): boolean {
return this._tokenization.isCheapToTokenize(lineNumber);
}
public tokenizeIfCheap(lineNumber: number): void {
if (this.isCheapToTokenize(lineNumber)) {
this.forceTokenization(lineNumber);
}
}
public getLineTokens(lineNumber: number): LineTokens {
if (lineNumber < 1 || lineNumber > this._textModel.getLineCount()) {
throw new Error('Illegal value for lineNumber');
}
return this._getLineTokens(lineNumber);
}
private _getLineTokens(lineNumber: number): LineTokens {
const lineText = this._textModel.getLineContent(lineNumber);
const syntacticTokens = this._tokens.getTokens(
this._languageId,
lineNumber - 1,
lineText
);
return this._semanticTokens.addSparseTokens(lineNumber, syntacticTokens);
}
public getTokenTypeIfInsertingCharacter(
lineNumber: number,
column: number,
character: string
): StandardTokenType {
const position = this._textModel.validatePosition(new Position(lineNumber, column));
return this._tokenization.getTokenTypeIfInsertingCharacter(
position,
character
);
}
public tokenizeLineWithEdit(
position: IPosition,
length: number,
newText: string
): LineTokens | null {
const validatedPosition = this._textModel.validatePosition(position);
return this._tokenization.tokenizeLineWithEdit(
validatedPosition,
length,
newText
);
}
private getLanguageConfiguration(
languageId: string
): ResolvedLanguageConfiguration {
return this._languageConfigurationService.getLanguageConfiguration(
languageId
);
}
// Having tokens allows implementing additional helper methods
public getWordAtPosition(_position: IPosition): IWordAtPosition | null {
this.assertNotDisposed();
const position = this._textModel.validatePosition(_position);
const lineContent = this._textModel.getLineContent(position.lineNumber);
const lineTokens = this._getLineTokens(position.lineNumber);
const tokenIndex = lineTokens.findTokenIndexAtOffset(position.column - 1);
// (1). First try checking right biased word
const [rbStartOffset, rbEndOffset] = TokenizationTextModelPart._findLanguageBoundaries(
lineTokens,
tokenIndex
);
const rightBiasedWord = getWordAtText(
position.column,
this.getLanguageConfiguration(
lineTokens.getLanguageId(tokenIndex)
).getWordDefinition(),
lineContent.substring(rbStartOffset, rbEndOffset),
rbStartOffset
);
// Make sure the result touches the original passed in position
if (
rightBiasedWord &&
rightBiasedWord.startColumn <= _position.column &&
_position.column <= rightBiasedWord.endColumn
) {
return rightBiasedWord;
}
// (2). Else, if we were at a language boundary, check the left biased word
if (tokenIndex > 0 && rbStartOffset === position.column - 1) {
// edge case, where `position` sits between two tokens belonging to two different languages
const [lbStartOffset, lbEndOffset] = TokenizationTextModelPart._findLanguageBoundaries(
lineTokens,
tokenIndex - 1
);
const leftBiasedWord = getWordAtText(
position.column,
this.getLanguageConfiguration(
lineTokens.getLanguageId(tokenIndex - 1)
).getWordDefinition(),
lineContent.substring(lbStartOffset, lbEndOffset),
lbStartOffset
);
// Make sure the result touches the original passed in position
if (
leftBiasedWord &&
leftBiasedWord.startColumn <= _position.column &&
_position.column <= leftBiasedWord.endColumn
) {
return leftBiasedWord;
}
}
return null;
}
private static _findLanguageBoundaries(
lineTokens: LineTokens,
tokenIndex: number
): [number, number] {
const languageId = lineTokens.getLanguageId(tokenIndex);
// go left until a different language is hit
let startOffset = 0;
for (
let i = tokenIndex;
i >= 0 && lineTokens.getLanguageId(i) === languageId;
i--
) {
startOffset = lineTokens.getStartOffset(i);
}
// go right until a different language is hit
let endOffset = lineTokens.getLineContent().length;
for (
let i = tokenIndex, tokenCount = lineTokens.getCount();
i < tokenCount && lineTokens.getLanguageId(i) === languageId;
i++
) {
endOffset = lineTokens.getEndOffset(i);
}
return [startOffset, endOffset];
}
public getWordUntilPosition(position: IPosition): IWordAtPosition {
const wordAtPosition = this.getWordAtPosition(position);
if (!wordAtPosition) {
return {
word: '',
startColumn: position.column,
endColumn: position.column,
};
}
return {
word: wordAtPosition.word.substr(
0,
position.column - wordAtPosition.startColumn
),
startColumn: wordAtPosition.startColumn,
endColumn: position.column,
};
}
public getLanguageId(): string {
return this._languageId;
}
public getLanguageIdAtPosition(lineNumber: number, column: number): string {
const position = this._textModel.validatePosition(new Position(lineNumber, column));
const lineTokens = this.getLineTokens(position.lineNumber);
return lineTokens.getLanguageId(lineTokens.findTokenIndexAtOffset(position.column - 1));
}
public setLanguageId(languageId: string): void {
if (this._languageId === languageId) {
// There's nothing to do
return;
}
const e: IModelLanguageChangedEvent = {
oldLanguage: this._languageId,
newLanguage: languageId
};
this._languageId = languageId;
this.bracketPairsTextModelPart.handleDidChangeLanguage(e);
this._tokenization.handleDidChangeLanguage(e);
this._onDidChangeLanguage.fire(e);
this._onDidChangeLanguageConfiguration.fire({});
}
}

View file

@ -850,7 +850,7 @@ export class ModelSemanticColoring extends Disposable {
// there is no provider
if (this._currentDocumentResponse) {
// there are semantic tokens set
this._model.setSemanticTokens(null, false);
this._model.tokenization.setSemanticTokens(null, false);
}
return;
}
@ -925,11 +925,11 @@ export class ModelSemanticColoring extends Disposable {
return;
}
if (!provider || !styling) {
this._model.setSemanticTokens(null, false);
this._model.tokenization.setSemanticTokens(null, false);
return;
}
if (!tokens) {
this._model.setSemanticTokens(null, true);
this._model.tokenization.setSemanticTokens(null, true);
rescheduleIfNeeded();
return;
}
@ -937,7 +937,7 @@ export class ModelSemanticColoring extends Disposable {
if (isSemanticTokensEdits(tokens)) {
if (!currentResponse) {
// not possible!
this._model.setSemanticTokens(null, true);
this._model.tokenization.setSemanticTokens(null, true);
return;
}
if (tokens.edits.length === 0) {
@ -1006,9 +1006,9 @@ export class ModelSemanticColoring extends Disposable {
}
}
this._model.setSemanticTokens(result, true);
this._model.tokenization.setSemanticTokens(result, true);
} else {
this._model.setSemanticTokens(null, true);
this._model.tokenization.setSemanticTokens(null, true);
}
rescheduleIfNeeded();

View file

@ -0,0 +1,106 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { Event } from 'vs/base/common/event';
import { IPosition } from 'vs/editor/common/core/position';
import { Range } from 'vs/editor/common/core/range';
import { StandardTokenType } from 'vs/editor/common/languages';
import { ContiguousMultilineTokens } from 'vs/editor/common/tokens/contiguousMultilineTokens';
import { LineTokens } from 'vs/editor/common/tokens/lineTokens';
import { SparseMultilineTokens } from 'vs/editor/common/tokens/sparseMultilineTokens';
/**
* Provides tokenization related functionality of the text model.
*/
export interface ITokenizationTextModelPart {
/**
* @internal
*/
setTokens(tokens: ContiguousMultilineTokens[]): void;
/**
* @internal
*/
setSemanticTokens(tokens: SparseMultilineTokens[] | null, isComplete: boolean): void;
/**
* @internal
*/
setPartialSemanticTokens(range: Range, tokens: SparseMultilineTokens[] | null): void;
/**
* @internal
*/
hasCompleteSemanticTokens(): boolean;
/**
* @internal
*/
hasSomeSemanticTokens(): boolean;
/**
* Flush all tokenization state.
* @internal
*/
resetTokenization(): void;
/**
* Force tokenization information for `lineNumber` to be accurate.
* @internal
*/
forceTokenization(lineNumber: number): void;
/**
* If it is cheap, force tokenization information for `lineNumber` to be accurate.
* This is based on a heuristic.
* @internal
*/
tokenizeIfCheap(lineNumber: number): void;
/**
* Check if calling `forceTokenization` for this `lineNumber` will be cheap (time-wise).
* This is based on a heuristic.
* @internal
*/
isCheapToTokenize(lineNumber: number): boolean;
/**
* Get the tokens for the line `lineNumber`.
* The tokens might be inaccurate. Use `forceTokenization` to ensure accurate tokens.
* @internal
*/
getLineTokens(lineNumber: number): LineTokens;
/**
* Returns the standard token type for a character if the character were to be inserted at
* the given position. If the result cannot be accurate, it returns null.
* @internal
*/
getTokenTypeIfInsertingCharacter(lineNumber: number, column: number, character: string): StandardTokenType;
/**
* @internal
*/
tokenizeLineWithEdit(position: IPosition, length: number, newText: string): LineTokens | null;
/**
* @internal
*/
tokenizeViewport(startLineNumber: number, endLineNumber: number): void;
getLanguageId(): string;
getLanguageIdAtPosition(lineNumber: number, column: number): string;
setLanguageId(languageId: string): void;
readonly backgroundTokenizationState: BackgroundTokenizationState;
readonly onBackgroundTokenizationStateChanged: Event<void>;
}
export const enum BackgroundTokenizationState {
Uninitialized = 0,
InProgress = 1,
Completed = 2,
}

View file

@ -37,7 +37,9 @@ export interface IModelLineProjection {
}
export interface ISimpleModel {
getLineTokens(lineNumber: number): LineTokens;
tokenization: {
getLineTokens(lineNumber: number): LineTokens;
};
getLineContent(lineNumber: number): string;
getLineLength(lineNumber: number): number;
getLineMinColumn(lineNumber: number): number;
@ -211,13 +213,13 @@ class ModelLineProjection implements IModelLineProjection {
let lineWithInjections: LineTokens;
if (injectionOffsets) {
lineWithInjections = model.getLineTokens(modelLineNumber).withInserted(injectionOffsets.map((offset, idx) => ({
lineWithInjections = model.tokenization.getLineTokens(modelLineNumber).withInserted(injectionOffsets.map((offset, idx) => ({
offset,
text: injectionOptions![idx].content,
tokenMetadata: LineTokens.defaultTokenMetadata
})));
} else {
lineWithInjections = model.getLineTokens(modelLineNumber);
lineWithInjections = model.tokenization.getLineTokens(modelLineNumber);
}
for (let outputLineIndex = outputLineIdx; outputLineIndex < outputLineIdx + lineCount; outputLineIndex++) {
@ -339,7 +341,7 @@ class IdentityModelLineProjection implements IModelLineProjection {
}
public getViewLineData(model: ISimpleModel, modelLineNumber: number, _outputLineIndex: number): ViewLineData {
const lineTokens = model.getLineTokens(modelLineNumber);
const lineTokens = model.tokenization.getLineTokens(modelLineNumber);
const lineContent = lineTokens.getLineContent();
return new ViewLineData(
lineContent,

View file

@ -204,7 +204,7 @@ export function isModelDecorationInString(model: ITextModel, decoration: IModelD
*/
function testTokensInRange(model: ITextModel, range: Range, callback: (tokenType: StandardTokenType) => boolean): boolean {
for (let lineNumber = range.startLineNumber; lineNumber <= range.endLineNumber; lineNumber++) {
const lineTokens = model.getLineTokens(lineNumber);
const lineTokens = model.tokenization.getLineTokens(lineNumber);
const isFirstLine = lineNumber === range.startLineNumber;
const isEndLine = lineNumber === range.endLineNumber;

View file

@ -193,7 +193,7 @@ export class ViewModel extends Disposable implements IViewModel {
const modelVisibleRanges = this._toModelVisibleRanges(viewVisibleRange);
for (const modelVisibleRange of modelVisibleRanges) {
this.model.tokenizeViewport(modelVisibleRange.startLineNumber, modelVisibleRange.endLineNumber);
this.model.tokenization.tokenizeViewport(modelVisibleRange.startLineNumber, modelVisibleRange.endLineNumber);
}
}
@ -914,7 +914,7 @@ export class ViewModel extends Disposable implements IViewModel {
let result = '';
for (let lineNumber = startLineNumber; lineNumber <= endLineNumber; lineNumber++) {
const lineTokens = this.model.getLineTokens(lineNumber);
const lineTokens = this.model.tokenization.getLineTokens(lineNumber);
const lineContent = lineTokens.getLineContent();
const startOffset = (lineNumber === startLineNumber ? startColumn - 1 : 0);
const endOffset = (lineNumber === endLineNumber ? endColumn - 1 : lineContent.length);

View file

@ -1200,7 +1200,7 @@ export class ViewModelLinesFromModelAsIs implements IViewModelLines {
}
public getViewLineData(viewLineNumber: number): ViewLineData {
const lineTokens = this.model.getLineTokens(viewLineNumber);
const lineTokens = this.model.tokenization.getLineTokens(viewLineNumber);
const lineContent = lineTokens.getLineContent();
return new ViewLineData(
lineContent,

View file

@ -170,7 +170,7 @@ export class BlockCommentCommand implements ICommand {
const startLineNumber = this._selection.startLineNumber;
const startColumn = this._selection.startColumn;
model.tokenizeIfCheap(startLineNumber);
model.tokenization.tokenizeIfCheap(startLineNumber);
const languageId = model.getLanguageIdAtPosition(startLineNumber, startColumn);
const config = this.languageConfigurationService.getLanguageConfiguration(languageId).comments;
if (!config || !config.blockCommentStartToken || !config.blockCommentEndToken) {

View file

@ -85,7 +85,7 @@ export class LineCommentCommand implements ICommand {
*/
private static _gatherPreflightCommentStrings(model: ITextModel, startLineNumber: number, endLineNumber: number, languageConfigurationService: ILanguageConfigurationService): ILinePreflightData[] | null {
model.tokenizeIfCheap(startLineNumber);
model.tokenization.tokenizeIfCheap(startLineNumber);
const languageId = model.getLanguageIdAtPosition(startLineNumber, 1);
const config = languageConfigurationService.getLanguageConfiguration(languageId).comments;
@ -282,7 +282,7 @@ export class LineCommentCommand implements ICommand {
* Given an unsuccessful analysis, delegate to the block comment command
*/
private _executeBlockComment(model: ITextModel, builder: IEditOperationBuilder, s: Selection): void {
model.tokenizeIfCheap(s.startLineNumber);
model.tokenization.tokenizeIfCheap(s.startLineNumber);
let languageId = model.getLanguageIdAtPosition(s.startLineNumber, 1);
const config = this.languageConfigurationService.getLanguageConfiguration(languageId).comments;
if (!config || !config.blockCommentStartToken || !config.blockCommentEndToken) {

View file

@ -473,7 +473,7 @@ export class AutoIndentOnPaste implements IEditorContribution {
return;
}
if (!model.isCheapToTokenize(range.getStartPosition().lineNumber)) {
if (!model.tokenization.isCheapToTokenize(range.getStartPosition().lineNumber)) {
return;
}
const autoIndent = this.editor.getOption(EditorOption.autoIndent);
@ -546,14 +546,16 @@ export class AutoIndentOnPaste implements IEditorContribution {
if (startLineNumber !== range.endLineNumber) {
let virtualModel = {
getLineTokens: (lineNumber: number) => {
return model.getLineTokens(lineNumber);
},
getLanguageId: () => {
return model.getLanguageId();
},
getLanguageIdAtPosition: (lineNumber: number, column: number) => {
return model.getLanguageIdAtPosition(lineNumber, column);
tokenization: {
getLineTokens: (lineNumber: number) => {
return model.tokenization.getLineTokens(lineNumber);
},
getLanguageId: () => {
return model.getLanguageId();
},
getLanguageIdAtPosition: (lineNumber: number, column: number) => {
return model.getLanguageIdAtPosition(lineNumber, column);
},
},
getLineContent: (lineNumber: number) => {
if (lineNumber === firstLineNumber) {
@ -597,12 +599,12 @@ export class AutoIndentOnPaste implements IEditorContribution {
}
private shouldIgnoreLine(model: ITextModel, lineNumber: number): boolean {
model.forceTokenization(lineNumber);
model.tokenization.forceTokenization(lineNumber);
let nonWhitespaceColumn = model.getLineFirstNonWhitespaceColumn(lineNumber);
if (nonWhitespaceColumn === 0) {
return true;
}
let tokens = model.getLineTokens(lineNumber);
let tokens = model.tokenization.getLineTokens(lineNumber);
if (tokens.getCount() > 0) {
let firstNonWhitespaceTokenIndex = tokens.findTokenIndexAtOffset(nonWhitespaceColumn);
if (firstNonWhitespaceTokenIndex >= 0 && tokens.getStandardTokenType(firstNonWhitespaceTokenIndex) === StandardTokenType.Comment) {

View file

@ -139,8 +139,8 @@ export class InlayHintsFragments {
return new Range(line, word.startColumn, line, word.endColumn);
}
model.tokenizeIfCheap(line);
const tokens = model.getLineTokens(line);
model.tokenization.tokenizeIfCheap(line);
const tokens = model.tokenization.getLineTokens(line);
const offset = position.column - 1;
const idx = tokens.findTokenIndexAtOffset(offset);

View file

@ -784,7 +784,7 @@ function closeBrackets(text: string, position: Position, model: ITextModel, lang
const lineStart = model.getLineContent(position.lineNumber).substring(0, position.column - 1);
const newLine = lineStart + text;
const newTokens = model.tokenizeLineWithEdit(position, newLine.length - (position.column - 1), text);
const newTokens = model.tokenization.tokenizeLineWithEdit(position, newLine.length - (position.column - 1), text);
const slicedTokens = newTokens?.sliceAndInflate(position.column - 1, newLine.length, 0);
if (!slicedTokens) {
return text;

View file

@ -14,7 +14,7 @@ import { CompleteEnterAction, IndentAction } from 'vs/editor/common/languages/la
import { ILanguageConfigurationService } from 'vs/editor/common/languages/languageConfigurationRegistry';
import { IndentConsts } from 'vs/editor/common/languages/supports/indentRules';
import * as indentUtils from 'vs/editor/contrib/indentation/browser/indentUtils';
import { getGoodIndentForLine, getIndentMetadata, IIndentConverter } from 'vs/editor/common/languages/autoIndent';
import { getGoodIndentForLine, getIndentMetadata, IIndentConverter, IVirtualModel } from 'vs/editor/common/languages/autoIndent';
import { getEnterAction } from 'vs/editor/common/languages/enterAction';
export class MoveLinesCommand implements ICommand {
@ -63,15 +63,17 @@ export class MoveLinesCommand implements ICommand {
const { tabSize, indentSize, insertSpaces } = model.getOptions();
let indentConverter = this.buildIndentConverter(tabSize, indentSize, insertSpaces);
let virtualModel = {
getLineTokens: (lineNumber: number) => {
return model.getLineTokens(lineNumber);
},
getLanguageId: () => {
return model.getLanguageId();
},
getLanguageIdAtPosition: (lineNumber: number, column: number) => {
return model.getLanguageIdAtPosition(lineNumber, column);
let virtualModel: IVirtualModel = {
tokenization: {
getLineTokens: (lineNumber: number) => {
return model.tokenization.getLineTokens(lineNumber);
},
getLanguageId: () => {
return model.getLanguageId();
},
getLanguageIdAtPosition: (lineNumber: number, column: number) => {
return model.getLanguageIdAtPosition(lineNumber, column);
},
},
getLineContent: null as unknown as (lineNumber: number) => string,
};
@ -361,7 +363,7 @@ export class MoveLinesCommand implements ICommand {
return false;
}
// if it's not easy to tokenize, we stop auto indent.
if (!model.isCheapToTokenize(selection.startLineNumber)) {
if (!model.tokenization.isCheapToTokenize(selection.startLineNumber)) {
return false;
}
let languageAtSelectionStart = model.getLanguageIdAtPosition(selection.startLineNumber, 1);

View file

@ -120,8 +120,8 @@ class SuggestInlineCompletions implements InlineCompletionsProvider<InlineComple
return;
}
model.tokenizeIfCheap(position.lineNumber);
const lineTokens = model.getLineTokens(position.lineNumber);
model.tokenization.tokenizeIfCheap(position.lineNumber);
const lineTokens = model.tokenization.getLineTokens(position.lineNumber);
const tokenType = lineTokens.getStandardTokenType(lineTokens.findTokenIndexAtOffset(Math.max(position.column - 1 - 1, 0)));
if (QuickSuggestionsOptions.valueFor(config, tokenType) !== 'inline') {
// quick suggest is off (for this token)

View file

@ -61,7 +61,7 @@ export class LineContext {
}
const model = editor.getModel();
const pos = editor.getPosition();
model.tokenizeIfCheap(pos.lineNumber);
model.tokenization.tokenizeIfCheap(pos.lineNumber);
const word = model.getWordAtPosition(pos);
if (!word) {
@ -408,8 +408,8 @@ export class SuggestModel implements IDisposable {
if (!QuickSuggestionsOptions.isAllOn(config)) {
// Check the type of the token that triggered this
model.tokenizeIfCheap(pos.lineNumber);
const lineTokens = model.getLineTokens(pos.lineNumber);
model.tokenization.tokenizeIfCheap(pos.lineNumber);
const lineTokens = model.tokenization.getLineTokens(pos.lineNumber);
const tokenType = lineTokens.getStandardTokenType(lineTokens.findTokenIndexAtOffset(Math.max(pos.column - 1 - 1, 0)));
if (QuickSuggestionsOptions.valueFor(config, tokenType) !== 'on') {
return;

View file

@ -23,9 +23,9 @@ class ForceRetokenizeAction extends EditorAction {
return;
}
const model = editor.getModel();
model.resetTokenization();
model.tokenization.resetTokenization();
const sw = new StopWatch(true);
model.forceTokenization(model.getLineCount());
model.tokenization.forceTokenization(model.getLineCount());
sw.stop();
console.log(`tokenization took ${sw.elapsed()}`);

View file

@ -103,18 +103,18 @@ class ViewportSemanticTokensContribution extends Disposable implements IEditorCo
return;
}
const model = this._editor.getModel();
if (model.hasCompleteSemanticTokens()) {
if (model.tokenization.hasCompleteSemanticTokens()) {
return;
}
if (!isSemanticColoringEnabled(model, this._themeService, this._configurationService)) {
if (model.hasSomeSemanticTokens()) {
model.setSemanticTokens(null, false);
if (model.tokenization.hasSomeSemanticTokens()) {
model.tokenization.setSemanticTokens(null, false);
}
return;
}
if (!hasDocumentRangeSemanticTokensProvider(this._provider, model)) {
if (model.hasSomeSemanticTokens()) {
model.setSemanticTokens(null, false);
if (model.tokenization.hasSomeSemanticTokens()) {
model.tokenization.setSemanticTokens(null, false);
}
return;
}
@ -134,7 +134,7 @@ class ViewportSemanticTokensContribution extends Disposable implements IEditorCo
}
const { provider, tokens: result } = r;
const styling = this._modelService.getSemanticTokensProviderStyling(provider);
model.setPartialSemanticTokens(range, toMultilineTokens2(result, styling, model.getLanguageId()));
model.tokenization.setPartialSemanticTokens(range, toMultilineTokens2(result, styling, model.getLanguageId()));
}).then(() => this._removeOutstandingRequest(request), () => this._removeOutstandingRequest(request));
return request;
}

View file

@ -99,8 +99,8 @@ export class Colorizer {
public static colorizeModelLine(model: ITextModel, lineNumber: number, tabSize: number = 4): string {
const content = model.getLineContent(lineNumber);
model.forceTokenization(lineNumber);
const tokens = model.getLineTokens(lineNumber);
model.tokenization.forceTokenization(lineNumber);
const tokens = model.tokenization.getLineTokens(lineNumber);
const inflatedTokens = tokens.inflate();
return this.colorizeLine(content, model.mightContainNonBasicASCII(), model.mightContainRTL(), inflatedTokens, tabSize);
}

View file

@ -2751,7 +2751,7 @@ suite('Editor Controller', () => {
withTestCodeEditor(model, {}, (editor2, cursor2) => {
editor1.onDidChangeCursorPosition(() => {
model.tokenizeIfCheap(1);
model.tokenization.tokenizeIfCheap(1);
});
model.applyEdits([{ range: new Range(1, 1, 1, 1), text: '-' }]);
@ -3680,7 +3680,7 @@ suite('Editor Controller', () => {
assertCursor(viewModel, new Selection(1, 12, 1, 12));
viewModel.type('\n', 'keyboard');
model.forceTokenization(model.getLineCount());
model.tokenization.forceTokenization(model.getLineCount());
assertCursor(viewModel, new Selection(2, 2, 2, 2));
moveTo(editor, viewModel, 3, 13, false);
@ -3743,7 +3743,7 @@ suite('Editor Controller', () => {
assertCursor(viewModel, new Selection(2, 14, 2, 14));
viewModel.type('\n', 'keyboard');
model.forceTokenization(model.getLineCount());
model.tokenization.forceTokenization(model.getLineCount());
assertCursor(viewModel, new Selection(3, 1, 3, 1));
moveTo(editor, viewModel, 5, 16, false);
@ -3771,7 +3771,7 @@ suite('Editor Controller', () => {
assertCursor(viewModel, new Selection(2, 11, 2, 11));
viewModel.type('\n', 'keyboard');
model.forceTokenization(model.getLineCount());
model.tokenization.forceTokenization(model.getLineCount());
assertCursor(viewModel, new Selection(3, 3, 3, 3));
viewModel.type('console.log();', 'keyboard');
@ -3856,7 +3856,7 @@ suite('Editor Controller', () => {
viewModel.type('\n', 'keyboard');
assertCursor(viewModel, new Selection(2, 5, 2, 5));
model.forceTokenization(model.getLineCount());
model.tokenization.forceTokenization(model.getLineCount());
moveTo(editor, viewModel, 3, 13, false);
assertCursor(viewModel, new Selection(3, 13, 3, 13));
@ -3878,7 +3878,7 @@ suite('Editor Controller', () => {
assertCursor(viewModel, new Selection(1, 12, 1, 12));
viewModel.type('\n', 'keyboard');
model.forceTokenization(model.getLineCount());
model.tokenization.forceTokenization(model.getLineCount());
assertCursor(viewModel, new Selection(2, 5, 2, 5));
moveTo(editor, viewModel, 3, 16, false);
@ -3903,7 +3903,7 @@ suite('Editor Controller', () => {
assertCursor(viewModel, new Selection(1, 12, 1, 12));
viewModel.type('\n', 'keyboard');
model.forceTokenization(model.getLineCount());
model.tokenization.forceTokenization(model.getLineCount());
assertCursor(viewModel, new Selection(2, 2, 2, 2));
moveTo(editor, viewModel, 3, 16, false);
@ -4614,13 +4614,13 @@ suite('Editor Controller', () => {
assertCursor(viewModel, new Selection(1, 9, 1, 9));
viewModel.type('\n', 'keyboard');
model.forceTokenization(model.getLineCount());
model.tokenization.forceTokenization(model.getLineCount());
assertCursor(viewModel, new Selection(2, 2, 2, 2));
moveTo(editor, viewModel, 1, 9, false);
assertCursor(viewModel, new Selection(1, 9, 1, 9));
viewModel.type('\n', 'keyboard');
model.forceTokenization(model.getLineCount());
model.tokenization.forceTokenization(model.getLineCount());
assertCursor(viewModel, new Selection(2, 2, 2, 2));
});
});
@ -4931,7 +4931,7 @@ suite('Editor Controller', () => {
text: ['const markup = highlight'],
languageId: autoClosingLanguageId
}, (editor, model, viewModel) => {
model.forceTokenization(1);
model.tokenization.forceTokenization(1);
assertType(editor, model, viewModel, 1, 25, '`', '``', `auto closes \` @ (1, 25)`);
});
});
@ -4944,7 +4944,7 @@ suite('Editor Controller', () => {
{},
(editor, viewModel) => {
const model = viewModel.model;
model.forceTokenization(1);
model.tokenization.forceTokenization(1);
assertType(editor, model, viewModel, 1, 28, '`', '`', `does not auto close \` @ (1, 28)`);
}
);
@ -4980,7 +4980,7 @@ suite('Editor Controller', () => {
const autoCloseColumns = extractAutoClosingSpecialColumns(model.getLineMaxColumn(lineNumber), autoClosePositions[i]);
for (let column = 1; column < autoCloseColumns.length; column++) {
model.forceTokenization(lineNumber);
model.tokenization.forceTokenization(lineNumber);
if (autoCloseColumns[column] === AutoClosingColumnType.Special1) {
assertType(editor, model, viewModel, lineNumber, column, '(', '()', `auto closes @ (${lineNumber}, ${column})`);
} else {
@ -5024,7 +5024,7 @@ suite('Editor Controller', () => {
const autoCloseColumns = extractAutoClosingSpecialColumns(model.getLineMaxColumn(lineNumber), autoClosePositions[i]);
for (let column = 1; column < autoCloseColumns.length; column++) {
model.forceTokenization(lineNumber);
model.tokenization.forceTokenization(lineNumber);
if (autoCloseColumns[column] === AutoClosingColumnType.Special1) {
assertType(editor, model, viewModel, lineNumber, column, '(', '()', `auto closes @ (${lineNumber}, ${column})`);
} else {
@ -5055,7 +5055,7 @@ suite('Editor Controller', () => {
const autoCloseColumns = extractAutoClosingSpecialColumns(model.getLineMaxColumn(lineNumber), autoClosePositions[i]);
for (let column = 1; column < autoCloseColumns.length; column++) {
model.forceTokenization(lineNumber);
model.tokenization.forceTokenization(lineNumber);
if (autoCloseColumns[column] === AutoClosingColumnType.Special1) {
assertType(editor, model, viewModel, lineNumber, column, '(', '()', `auto closes @ (${lineNumber}, ${column})`);
} else {
@ -5085,7 +5085,7 @@ suite('Editor Controller', () => {
const autoCloseColumns = extractAutoClosingSpecialColumns(model.getLineMaxColumn(lineNumber), autoClosePositions[i]);
for (let column = 1; column < autoCloseColumns.length; column++) {
model.forceTokenization(lineNumber);
model.tokenization.forceTokenization(lineNumber);
if (autoCloseColumns[column] === AutoClosingColumnType.Special1) {
assertType(editor, model, viewModel, lineNumber, column, '\'', '\'\'', `auto closes @ (${lineNumber}, ${column})`);
} else {
@ -5131,7 +5131,7 @@ suite('Editor Controller', () => {
const autoCloseColumns = extractAutoClosingSpecialColumns(model.getLineMaxColumn(lineNumber), autoClosePositions[i]);
for (let column = 1; column < autoCloseColumns.length; column++) {
model.forceTokenization(lineNumber);
model.tokenization.forceTokenization(lineNumber);
if (autoCloseColumns[column] === AutoClosingColumnType.Special1) {
assertType(editor, model, viewModel, lineNumber, column, '(', '()', `auto closes @ (${lineNumber}, ${column})`);
} else {
@ -5176,7 +5176,7 @@ suite('Editor Controller', () => {
const autoCloseColumns = extractAutoClosingSpecialColumns(model.getLineMaxColumn(lineNumber), autoClosePositions[i]);
for (let column = 1; column < autoCloseColumns.length; column++) {
model.forceTokenization(lineNumber);
model.tokenization.forceTokenization(lineNumber);
if (autoCloseColumns[column] === AutoClosingColumnType.Special1) {
assertType(editor, model, viewModel, lineNumber, column, '(', '()', `auto closes @ (${lineNumber}, ${column})`);
assertType(editor, model, viewModel, lineNumber, column, '"', '""', `auto closes @ (${lineNumber}, ${column})`);
@ -5310,7 +5310,7 @@ suite('Editor Controller', () => {
const autoCloseColumns = extractAutoClosingSpecialColumns(model.getLineMaxColumn(lineNumber), autoClosePositions[i]);
for (let column = 1; column < autoCloseColumns.length; column++) {
model.forceTokenization(lineNumber);
model.tokenization.forceTokenization(lineNumber);
if (autoCloseColumns[column] === AutoClosingColumnType.Special1) {
assertType(editor, model, viewModel, lineNumber, column, '\'', '\'\'', `auto closes @ (${lineNumber}, ${column})`);
} else if (autoCloseColumns[column] === AutoClosingColumnType.Special2) {
@ -5408,15 +5408,15 @@ suite('Editor Controller', () => {
],
languageId: languageId
}, (editor, model, viewModel) => {
model.forceTokenization(model.getLineCount());
model.tokenization.forceTokenization(model.getLineCount());
assertType(editor, model, viewModel, 1, 4, '"', '"', `does not double quote when ending with open`);
model.forceTokenization(model.getLineCount());
model.tokenization.forceTokenization(model.getLineCount());
assertType(editor, model, viewModel, 2, 4, '"', '"', `does not double quote when ending with open`);
model.forceTokenization(model.getLineCount());
model.tokenization.forceTokenization(model.getLineCount());
assertType(editor, model, viewModel, 3, 4, '"', '"', `does not double quote when ending with open`);
model.forceTokenization(model.getLineCount());
model.tokenization.forceTokenization(model.getLineCount());
assertType(editor, model, viewModel, 4, 2, '"', '"', `does not double quote when ending with open`);
model.forceTokenization(model.getLineCount());
model.tokenization.forceTokenization(model.getLineCount());
assertType(editor, model, viewModel, 4, 3, '"', '"', `does not double quote when ending with open`);
});
});
@ -5447,50 +5447,50 @@ suite('Editor Controller', () => {
}
// First gif
model.forceTokenization(model.getLineCount());
model.tokenization.forceTokenization(model.getLineCount());
typeCharacters(viewModel, 'teste1 = teste\' ok');
assert.strictEqual(model.getLineContent(1), 'teste1 = teste\' ok');
viewModel.setSelections('test', [new Selection(1, 1000, 1, 1000)]);
typeCharacters(viewModel, '\n');
model.forceTokenization(model.getLineCount());
model.tokenization.forceTokenization(model.getLineCount());
typeCharacters(viewModel, 'teste2 = teste \'ok');
assert.strictEqual(model.getLineContent(2), 'teste2 = teste \'ok\'');
viewModel.setSelections('test', [new Selection(2, 1000, 2, 1000)]);
typeCharacters(viewModel, '\n');
model.forceTokenization(model.getLineCount());
model.tokenization.forceTokenization(model.getLineCount());
typeCharacters(viewModel, 'teste3 = teste" ok');
assert.strictEqual(model.getLineContent(3), 'teste3 = teste" ok');
viewModel.setSelections('test', [new Selection(3, 1000, 3, 1000)]);
typeCharacters(viewModel, '\n');
model.forceTokenization(model.getLineCount());
model.tokenization.forceTokenization(model.getLineCount());
typeCharacters(viewModel, 'teste4 = teste "ok');
assert.strictEqual(model.getLineContent(4), 'teste4 = teste "ok"');
// Second gif
viewModel.setSelections('test', [new Selection(4, 1000, 4, 1000)]);
typeCharacters(viewModel, '\n');
model.forceTokenization(model.getLineCount());
model.tokenization.forceTokenization(model.getLineCount());
typeCharacters(viewModel, 'teste \'');
assert.strictEqual(model.getLineContent(5), 'teste \'\'');
viewModel.setSelections('test', [new Selection(5, 1000, 5, 1000)]);
typeCharacters(viewModel, '\n');
model.forceTokenization(model.getLineCount());
model.tokenization.forceTokenization(model.getLineCount());
typeCharacters(viewModel, 'teste "');
assert.strictEqual(model.getLineContent(6), 'teste ""');
viewModel.setSelections('test', [new Selection(6, 1000, 6, 1000)]);
typeCharacters(viewModel, '\n');
model.forceTokenization(model.getLineCount());
model.tokenization.forceTokenization(model.getLineCount());
typeCharacters(viewModel, 'teste\'');
assert.strictEqual(model.getLineContent(7), 'teste\'');
viewModel.setSelections('test', [new Selection(7, 1000, 7, 1000)]);
typeCharacters(viewModel, '\n');
model.forceTokenization(model.getLineCount());
model.tokenization.forceTokenization(model.getLineCount());
typeCharacters(viewModel, 'teste"');
assert.strictEqual(model.getLineContent(8), 'teste"');
});

View file

@ -34,7 +34,7 @@ export function testCommand(
const viewModel = editor.getViewModel()!;
if (forceTokenization) {
model.forceTokenization(model.getLineCount());
model.tokenization.forceTokenization(model.getLineCount());
}
viewModel.setSelections('tests', [selection]);

View file

@ -352,7 +352,7 @@ suite('SplitLinesCollection', () => {
languageRegistration = languages.TokenizationRegistry.register(LANGUAGE_ID, tokenizationSupport);
model = createTextModel(_text.join('\n'), LANGUAGE_ID);
// force tokenization
model.forceTokenization(model.getLineCount());
model.tokenization.forceTokenization(model.getLineCount());
});
teardown(() => {
@ -988,8 +988,10 @@ function createLineBreakData(breakingLengths: number[], breakingOffsetsVisibleCo
function createModel(text: string): ISimpleModel {
return {
getLineTokens: (lineNumber: number) => {
return null!;
tokenization: {
getLineTokens: (lineNumber: number) => {
return null!;
},
},
getLineContent: (lineNumber: number) => {
return text;

View file

@ -40,7 +40,7 @@ suite('Bracket Pair Colorizer - Tokenizer', () => {
}));
const model = disposableStore.add(instantiateTextModel(instantiationService, document.getText(), mode1));
model.forceTokenization(model.getLineCount());
model.tokenization.forceTokenization(model.getLineCount());
const brackets = new LanguageAgnosticBracketTokens(denseKeyProvider, l => languageConfigurationService.getLanguageConfiguration(l));

View file

@ -125,7 +125,7 @@ suite('ModelLinesTokens', () => {
for (let lineIndex = 0; lineIndex < expected.length; lineIndex++) {
const actualLine = model.getLineContent(lineIndex + 1);
const actualTokens = model.getLineTokens(lineIndex + 1);
const actualTokens = model.tokenization.getLineTokens(lineIndex + 1);
assert.strictEqual(actualLine, expected[lineIndex].text);
assertLineTokens(actualTokens, expected[lineIndex].tokens);
}
@ -462,7 +462,7 @@ suite('ModelLinesTokens', () => {
text: 'a'
}]);
const actualTokens = model.getLineTokens(1);
const actualTokens = model.tokenization.getLineTokens(1);
assertLineTokens(actualTokens, [new TestToken(0, 1)]);
model.dispose();

View file

@ -56,98 +56,98 @@ suite('Editor Model - Model Modes 1', () => {
});
test('model calls syntax highlighter 1', () => {
thisModel.forceTokenization(1);
thisModel.tokenization.forceTokenization(1);
checkAndClear(['1']);
});
test('model calls syntax highlighter 2', () => {
thisModel.forceTokenization(2);
thisModel.tokenization.forceTokenization(2);
checkAndClear(['1', '2']);
thisModel.forceTokenization(2);
thisModel.tokenization.forceTokenization(2);
checkAndClear([]);
});
test('model caches states', () => {
thisModel.forceTokenization(1);
thisModel.tokenization.forceTokenization(1);
checkAndClear(['1']);
thisModel.forceTokenization(2);
thisModel.tokenization.forceTokenization(2);
checkAndClear(['2']);
thisModel.forceTokenization(3);
thisModel.tokenization.forceTokenization(3);
checkAndClear(['3']);
thisModel.forceTokenization(4);
thisModel.tokenization.forceTokenization(4);
checkAndClear(['4']);
thisModel.forceTokenization(5);
thisModel.tokenization.forceTokenization(5);
checkAndClear(['5']);
thisModel.forceTokenization(5);
thisModel.tokenization.forceTokenization(5);
checkAndClear([]);
});
test('model invalidates states for one line insert', () => {
thisModel.forceTokenization(5);
thisModel.tokenization.forceTokenization(5);
checkAndClear(['1', '2', '3', '4', '5']);
thisModel.applyEdits([EditOperation.insert(new Position(1, 1), '-')]);
thisModel.forceTokenization(5);
thisModel.tokenization.forceTokenization(5);
checkAndClear(['-']);
thisModel.forceTokenization(5);
thisModel.tokenization.forceTokenization(5);
checkAndClear([]);
});
test('model invalidates states for many lines insert', () => {
thisModel.forceTokenization(5);
thisModel.tokenization.forceTokenization(5);
checkAndClear(['1', '2', '3', '4', '5']);
thisModel.applyEdits([EditOperation.insert(new Position(1, 1), '0\n-\n+')]);
assert.strictEqual(thisModel.getLineCount(), 7);
thisModel.forceTokenization(7);
thisModel.tokenization.forceTokenization(7);
checkAndClear(['0', '-', '+']);
thisModel.forceTokenization(7);
thisModel.tokenization.forceTokenization(7);
checkAndClear([]);
});
test('model invalidates states for one new line', () => {
thisModel.forceTokenization(5);
thisModel.tokenization.forceTokenization(5);
checkAndClear(['1', '2', '3', '4', '5']);
thisModel.applyEdits([EditOperation.insert(new Position(1, 2), '\n')]);
thisModel.applyEdits([EditOperation.insert(new Position(2, 1), 'a')]);
thisModel.forceTokenization(6);
thisModel.tokenization.forceTokenization(6);
checkAndClear(['1', 'a']);
});
test('model invalidates states for one line delete', () => {
thisModel.forceTokenization(5);
thisModel.tokenization.forceTokenization(5);
checkAndClear(['1', '2', '3', '4', '5']);
thisModel.applyEdits([EditOperation.insert(new Position(1, 2), '-')]);
thisModel.forceTokenization(5);
thisModel.tokenization.forceTokenization(5);
checkAndClear(['1']);
thisModel.applyEdits([EditOperation.delete(new Range(1, 1, 1, 2))]);
thisModel.forceTokenization(5);
thisModel.tokenization.forceTokenization(5);
checkAndClear(['-']);
thisModel.forceTokenization(5);
thisModel.tokenization.forceTokenization(5);
checkAndClear([]);
});
test('model invalidates states for many lines delete', () => {
thisModel.forceTokenization(5);
thisModel.tokenization.forceTokenization(5);
checkAndClear(['1', '2', '3', '4', '5']);
thisModel.applyEdits([EditOperation.delete(new Range(1, 1, 3, 1))]);
thisModel.forceTokenization(3);
thisModel.tokenization.forceTokenization(3);
checkAndClear(['3']);
thisModel.forceTokenization(3);
thisModel.tokenization.forceTokenization(3);
checkAndClear([]);
});
});
@ -208,55 +208,55 @@ suite('Editor Model - Model Modes 2', () => {
});
test('getTokensForInvalidLines one text insert', () => {
thisModel.forceTokenization(5);
thisModel.tokenization.forceTokenization(5);
checkAndClear(['Line1', 'Line2', 'Line3', 'Line4', 'Line5']);
thisModel.applyEdits([EditOperation.insert(new Position(1, 6), '-')]);
thisModel.forceTokenization(5);
thisModel.tokenization.forceTokenization(5);
checkAndClear(['Line1-', 'Line2']);
});
test('getTokensForInvalidLines two text insert', () => {
thisModel.forceTokenization(5);
thisModel.tokenization.forceTokenization(5);
checkAndClear(['Line1', 'Line2', 'Line3', 'Line4', 'Line5']);
thisModel.applyEdits([
EditOperation.insert(new Position(1, 6), '-'),
EditOperation.insert(new Position(3, 6), '-')
]);
thisModel.forceTokenization(5);
thisModel.tokenization.forceTokenization(5);
checkAndClear(['Line1-', 'Line2', 'Line3-', 'Line4']);
});
test('getTokensForInvalidLines one multi-line text insert, one small text insert', () => {
thisModel.forceTokenization(5);
thisModel.tokenization.forceTokenization(5);
checkAndClear(['Line1', 'Line2', 'Line3', 'Line4', 'Line5']);
thisModel.applyEdits([EditOperation.insert(new Position(1, 6), '\nNew line\nAnother new line')]);
thisModel.applyEdits([EditOperation.insert(new Position(5, 6), '-')]);
thisModel.forceTokenization(7);
thisModel.tokenization.forceTokenization(7);
checkAndClear(['Line1', 'New line', 'Another new line', 'Line2', 'Line3-', 'Line4']);
});
test('getTokensForInvalidLines one delete text', () => {
thisModel.forceTokenization(5);
thisModel.tokenization.forceTokenization(5);
checkAndClear(['Line1', 'Line2', 'Line3', 'Line4', 'Line5']);
thisModel.applyEdits([EditOperation.delete(new Range(1, 1, 1, 5))]);
thisModel.forceTokenization(5);
thisModel.tokenization.forceTokenization(5);
checkAndClear(['1', 'Line2']);
});
test('getTokensForInvalidLines one line delete text', () => {
thisModel.forceTokenization(5);
thisModel.tokenization.forceTokenization(5);
checkAndClear(['Line1', 'Line2', 'Line3', 'Line4', 'Line5']);
thisModel.applyEdits([EditOperation.delete(new Range(1, 1, 2, 1))]);
thisModel.forceTokenization(4);
thisModel.tokenization.forceTokenization(4);
checkAndClear(['Line2']);
});
test('getTokensForInvalidLines multiple lines delete text', () => {
thisModel.forceTokenization(5);
thisModel.tokenization.forceTokenization(5);
checkAndClear(['Line1', 'Line2', 'Line3', 'Line4', 'Line5']);
thisModel.applyEdits([EditOperation.delete(new Range(1, 1, 3, 3))]);
thisModel.forceTokenization(3);
thisModel.tokenization.forceTokenization(3);
checkAndClear(['ne3', 'Line4']);
});
});

View file

@ -438,9 +438,9 @@ suite('TextModelWithTokens', () => {
mode1
));
model.forceTokenization(1);
model.forceTokenization(2);
model.forceTokenization(3);
model.tokenization.forceTokenization(1);
model.tokenization.forceTokenization(2);
model.tokenization.forceTokenization(3);
assert.deepStrictEqual(model.bracketPairs.matchBracket(new Position(2, 14)), [new Range(2, 13, 2, 14), new Range(2, 18, 2, 19)]);
@ -517,9 +517,9 @@ suite('TextModelWithTokens', () => {
mode
));
model.forceTokenization(1);
model.forceTokenization(2);
model.forceTokenization(3);
model.tokenization.forceTokenization(1);
model.tokenization.forceTokenization(2);
model.tokenization.forceTokenization(3);
assert.deepStrictEqual(model.bracketPairs.matchBracket(new Position(2, 23)), null);
assert.deepStrictEqual(model.bracketPairs.matchBracket(new Position(2, 20)), null);
@ -534,9 +534,9 @@ suite('TextModelWithTokens regression tests', () => {
test('microsoft/monaco-editor#122: Unhandled Exception: TypeError: Unable to get property \'replace\' of undefined or null reference', () => {
function assertViewLineTokens(model: TextModel, lineNumber: number, forceTokenization: boolean, expected: TestLineToken[]): void {
if (forceTokenization) {
model.forceTokenization(lineNumber);
model.tokenization.forceTokenization(lineNumber);
}
let _actual = model.getLineTokens(lineNumber).inflate();
let _actual = model.tokenization.getLineTokens(lineNumber).inflate();
interface ISimpleViewToken {
endIndex: number;
foreground: number;
@ -688,7 +688,7 @@ suite('TextModelWithTokens regression tests', () => {
const model = disposables.add(instantiateTextModel(instantiationService, 'A model with one line', outerMode));
model.forceTokenization(1);
model.tokenization.forceTokenization(1);
assert.strictEqual(model.getLanguageIdAtPosition(1, 1), innerMode);
disposables.dispose();

View file

@ -74,7 +74,7 @@ suite('TokensStore', () => {
function extractState(model: TextModel): string[] {
let result: string[] = [];
for (let lineNumber = 1; lineNumber <= model.getLineCount(); lineNumber++) {
const lineTokens = model.getLineTokens(lineNumber);
const lineTokens = model.tokenization.getLineTokens(lineNumber);
const lineContent = model.getLineContent(lineNumber);
let lineText = '';
@ -101,7 +101,7 @@ suite('TokensStore', () => {
function testTokensAdjustment(rawInitialState: string[], edits: ISingleEditOperation[], rawFinalState: string[]) {
const initialState = parseTokensState(rawInitialState);
const model = createTextModel(initialState.text);
model.setSemanticTokens([initialState.tokens], true);
model.tokenization.setSemanticTokens([initialState.tokens], true);
model.applyEdits(edits);
@ -174,7 +174,7 @@ suite('TokensStore', () => {
test('issue #91936: Semantic token color highlighting fails on line with selected text', () => {
const model = createTextModel(' else if ($s = 08) then \'\\b\'');
model.setSemanticTokens([
model.tokenization.setSemanticTokens([
SparseMultilineTokens.create(1, new Uint32Array([
0, 20, 24, 0b0111100000000010000,
0, 25, 27, 0b0111100000000010000,
@ -187,7 +187,7 @@ suite('TokensStore', () => {
0, 43, 47, 0b0101100000000010000,
]))
], true);
const lineTokens = model.getLineTokens(1);
const lineTokens = model.tokenization.getLineTokens(1);
let decodedTokens: number[] = [];
for (let i = 0, len = lineTokens.getCount(); i < len; i++) {
decodedTokens.push(lineTokens.getEndOffset(i), lineTokens.getMetadata(i));

View file

@ -68,8 +68,8 @@ export class MainThreadLanguages implements MainThreadLanguagesShape {
if (!model) {
return undefined;
}
model.tokenizeIfCheap(position.lineNumber);
const tokens = model.getLineTokens(position.lineNumber);
model.tokenization.tokenizeIfCheap(position.lineNumber);
const tokens = model.tokenization.getLineTokens(position.lineNumber);
const idx = tokens.findTokenIndexAtOffset(position.column - 1);
return {
type: tokens.getStandardTokenType(idx),

View file

@ -227,14 +227,14 @@ export class BulkEditDataSource implements IAsyncDataSource<BulkFileOperations,
const range = Range.lift(edit.textEdit.textEdit.range);
//prefix-math
let startTokens = textModel.getLineTokens(range.startLineNumber);
let startTokens = textModel.tokenization.getLineTokens(range.startLineNumber);
let prefixLen = 23; // default value for the no tokens/grammar case
for (let idx = startTokens.findTokenIndexAtOffset(range.startColumn) - 1; prefixLen < 50 && idx >= 0; idx--) {
prefixLen = range.startColumn - startTokens.getStartOffset(idx);
}
//suffix-math
let endTokens = textModel.getLineTokens(range.endLineNumber);
let endTokens = textModel.tokenization.getLineTokens(range.endLineNumber);
let suffixLen = 0;
for (let idx = endTokens.findTokenIndexAtOffset(range.endColumn); suffixLen < 50 && idx < endTokens.getCount(); idx++) {
suffixLen += endTokens.getEndOffset(idx) - endTokens.getStartOffset(idx);

View file

@ -179,8 +179,8 @@ function getWordToLineNumbersMap(model: ITextModel | null): Map<string, number[]
continue;
}
model.forceTokenization(lineNumber);
const lineTokens = model.getLineTokens(lineNumber);
model.tokenization.forceTokenization(lineNumber);
const lineTokens = model.tokenization.getLineTokens(lineNumber);
for (let tokenIndex = 0, tokenCount = lineTokens.getCount(); tokenIndex < tokenCount; tokenIndex++) {
const tokenType = lineTokens.getStandardTokenType(tokenIndex);

View file

@ -93,7 +93,7 @@ export abstract class EmmetEditorAction extends EditorAction {
}
const position = selection.getStartPosition();
model.tokenizeIfCheap(position.lineNumber);
model.tokenization.tokenizeIfCheap(position.lineNumber);
const languageId = model.getLanguageIdAtPosition(position.lineNumber, position.column);
const syntax = languageId.split('.').pop();

View file

@ -65,7 +65,7 @@ class EditorTextRenderer {
let result = '';
for (let lineNumber = startLineNumber; lineNumber <= endLineNumber; lineNumber++) {
const lineTokens = model.getLineTokens(lineNumber);
const lineTokens = model.tokenization.getLineTokens(lineNumber);
const lineContent = lineTokens.getLineContent();
const startOffset = (lineNumber === startLineNumber ? startColumn - 1 : 0);
const endOffset = (lineNumber === endLineNumber ? endColumn - 1 : lineContent.length);

View file

@ -112,7 +112,7 @@ class InsertSnippetAction extends EditorAction {
}
languageId = langId;
} else {
editor.getModel().tokenizeIfCheap(lineNumber);
editor.getModel().tokenization.tokenizeIfCheap(lineNumber);
languageId = editor.getModel().getLanguageIdAtPosition(lineNumber, column);
// validate the `languageId` to ensure this is a user

View file

@ -182,7 +182,7 @@ export class SnippetCompletionProvider implements CompletionItemProvider {
// validate the `languageId` to ensure this is a user
// facing language with a name and the chance to have
// snippets, else fall back to the outer language
model.tokenizeIfCheap(position.lineNumber);
model.tokenization.tokenizeIfCheap(position.lineNumber);
let languageId = model.getLanguageIdAtPosition(position.lineNumber, position.column);
if (!this._languageService.getLanguageName(languageId)) {
languageId = model.getLanguageId();

View file

@ -38,7 +38,7 @@ registerAction2(class SurroundWithAction extends EditorAction2 {
}
const { lineNumber, column } = editor.getPosition();
editor.getModel().tokenizeIfCheap(lineNumber);
editor.getModel().tokenization.tokenizeIfCheap(lineNumber);
const languageId = editor.getModel().getLanguageIdAtPosition(lineNumber, column);
const allSnippets = await snippetService.getSnippets(languageId, { includeNoPrefixSnippets: true, includeDisabledSnippets: true });

View file

@ -91,7 +91,7 @@ export class TabCompletionController implements IEditorContribution {
// lots of dance for getting the
const selection = this._editor.getSelection();
const model = this._editor.getModel();
model.tokenizeIfCheap(selection.positionLineNumber);
model.tokenization.tokenizeIfCheap(selection.positionLineNumber);
const id = model.getLanguageIdAtPosition(selection.positionLineNumber, selection.positionColumn);
const snippets = this._snippetService.getSnippetsSync(id);

View file

@ -903,7 +903,7 @@ export class DefaultSettingsEditorModel extends AbstractSettingsModel implements
// Force tokenization now - otherwise it may be slightly delayed, causing a flash of white text
const tokenizeTo = Math.min(startLine + 60, this._model.getLineCount());
this._model.forceTokenization(tokenizeTo);
this._model.tokenization.forceTokenization(tokenizeTo);
return { matches, settingsGroups };
}

View file

@ -120,7 +120,7 @@ class ModelWorkerTextMateTokenizer extends Disposable {
}
}
this._model.setTokens(tokens);
this._model.tokenization.setTokens(tokens);
}
}