Handle invalid token type/modifier indexes. Fixes #96540

This commit is contained in:
Martin Aeschlimann 2020-05-01 16:48:19 +02:00
parent ddff80b3e6
commit fe5024cb35
4 changed files with 83 additions and 51 deletions

View file

@ -22,24 +22,31 @@ export function activate(context: vscode.ExtensionContext): any {
function addToken(value: string, startLine: number, startCharacter: number, length: number) {
const [type, ...modifiers] = value.split('.');
const selectedModifiers = [];
let tokenType = legend.tokenTypes.indexOf(type);
if (tokenType === -1) {
return;
}
let tokenModifiers = 0;
for (let i = 0; i < modifiers.length; i++) {
const index = legend.tokenModifiers.indexOf(modifiers[i]);
if (index !== -1) {
tokenModifiers = tokenModifiers | 1 << index;
if (type === 'notInLegend') {
tokenType = tokenTypes.length + 2;
} else {
return;
}
}
let tokenModifiers = 0;
for (const modifier of modifiers) {
const index = legend.tokenModifiers.indexOf(modifier);
if (index !== -1) {
tokenModifiers = tokenModifiers | 1 << index;
selectedModifiers.push(modifier);
} else if (modifier === 'notInLegend') {
tokenModifiers = tokenModifiers | 1 << (legend.tokenModifiers.length + 2);
selectedModifiers.push(modifier);
}
}
builder.push(startLine, startCharacter, length, tokenType, tokenModifiers);
const selectedModifiers = legend.tokenModifiers.filter((_val, bit) => tokenModifiers & (1 << bit)).join(' ');
outputChannel.appendLine(`line: ${startLine}, character: ${startCharacter}, length ${length}, ${legend.tokenTypes[tokenType]} (${tokenType}), ${selectedModifiers} ${tokenModifiers.toString(2)}`);
outputChannel.appendLine(`line: ${startLine}, character: ${startCharacter}, length ${length}, ${type} (${tokenType}), ${selectedModifiers} ${tokenModifiers.toString(2)}`);
}
outputChannel.appendLine('---');

View file

@ -4,6 +4,6 @@
"variable.declaration", "parameterNames",
"function.member.declaration",
"interface.declaration",
"function.member.declaration", "testToken.testModifier"
"function.member.declaration", "function.notInLegend"
]

View file

@ -29,50 +29,65 @@ export class SemanticTokensProviderStyling {
let metadata: number;
if (entry) {
metadata = entry.metadata;
} else {
const tokenType = this._legend.tokenTypes[tokenTypeIndex];
const tokenModifiers: string[] = [];
let modifierSet = tokenModifierSet;
for (let modifierIndex = 0; modifierSet > 0 && modifierIndex < this._legend.tokenModifiers.length; modifierIndex++) {
if (modifierSet & 1) {
tokenModifiers.push(this._legend.tokenModifiers[modifierIndex]);
}
modifierSet = modifierSet >> 1;
if (this._logService.getLevel() === LogLevel.Trace) {
this._logService.trace(`SemanticTokensProviderStyling [CACHED] ${tokenTypeIndex} / ${tokenModifierSet}: foreground ${TokenMetadata.getForeground(metadata)}, fontStyle ${TokenMetadata.getFontStyle(metadata).toString(2)}`);
}
} else {
let tokenType = this._legend.tokenTypes[tokenTypeIndex];
const tokenModifiers: string[] = [];
if (tokenType) {
let modifierSet = tokenModifierSet;
for (let modifierIndex = 0; modifierSet > 0 && modifierIndex < this._legend.tokenModifiers.length; modifierIndex++) {
if (modifierSet & 1) {
tokenModifiers.push(this._legend.tokenModifiers[modifierIndex]);
}
modifierSet = modifierSet >> 1;
}
if (modifierSet > 0 && this._logService.getLevel() === LogLevel.Trace) {
this._logService.trace(`SemanticTokensProviderStyling: unknown token modifier index: ${tokenModifierSet.toString(2)} for legend: ${JSON.stringify(this._legend.tokenModifiers)}`);
tokenModifiers.push('not-in-legend');
}
const tokenStyle = this._themeService.getColorTheme().getTokenStyleMetadata(tokenType, tokenModifiers, languageId.language);
if (typeof tokenStyle === 'undefined') {
metadata = SemanticTokensProviderStylingConstants.NO_STYLING;
} else {
metadata = 0;
if (typeof tokenStyle.italic !== 'undefined') {
const italicBit = (tokenStyle.italic ? FontStyle.Italic : 0) << MetadataConsts.FONT_STYLE_OFFSET;
metadata |= italicBit | MetadataConsts.SEMANTIC_USE_ITALIC;
}
if (typeof tokenStyle.bold !== 'undefined') {
const boldBit = (tokenStyle.bold ? FontStyle.Bold : 0) << MetadataConsts.FONT_STYLE_OFFSET;
metadata |= boldBit | MetadataConsts.SEMANTIC_USE_BOLD;
}
if (typeof tokenStyle.underline !== 'undefined') {
const underlineBit = (tokenStyle.underline ? FontStyle.Underline : 0) << MetadataConsts.FONT_STYLE_OFFSET;
metadata |= underlineBit | MetadataConsts.SEMANTIC_USE_UNDERLINE;
}
if (tokenStyle.foreground) {
const foregroundBits = (tokenStyle.foreground) << MetadataConsts.FOREGROUND_OFFSET;
metadata |= foregroundBits | MetadataConsts.SEMANTIC_USE_FOREGROUND;
}
if (metadata === 0) {
// Nothing!
const tokenStyle = this._themeService.getColorTheme().getTokenStyleMetadata(tokenType, tokenModifiers, languageId.language);
if (typeof tokenStyle === 'undefined') {
metadata = SemanticTokensProviderStylingConstants.NO_STYLING;
} else {
metadata = 0;
if (typeof tokenStyle.italic !== 'undefined') {
const italicBit = (tokenStyle.italic ? FontStyle.Italic : 0) << MetadataConsts.FONT_STYLE_OFFSET;
metadata |= italicBit | MetadataConsts.SEMANTIC_USE_ITALIC;
}
if (typeof tokenStyle.bold !== 'undefined') {
const boldBit = (tokenStyle.bold ? FontStyle.Bold : 0) << MetadataConsts.FONT_STYLE_OFFSET;
metadata |= boldBit | MetadataConsts.SEMANTIC_USE_BOLD;
}
if (typeof tokenStyle.underline !== 'undefined') {
const underlineBit = (tokenStyle.underline ? FontStyle.Underline : 0) << MetadataConsts.FONT_STYLE_OFFSET;
metadata |= underlineBit | MetadataConsts.SEMANTIC_USE_UNDERLINE;
}
if (tokenStyle.foreground) {
const foregroundBits = (tokenStyle.foreground) << MetadataConsts.FOREGROUND_OFFSET;
metadata |= foregroundBits | MetadataConsts.SEMANTIC_USE_FOREGROUND;
}
if (metadata === 0) {
// Nothing!
metadata = SemanticTokensProviderStylingConstants.NO_STYLING;
}
}
} else {
if (this._logService.getLevel() === LogLevel.Trace) {
this._logService.trace(`SemanticTokensProviderStyling: unknown token type index: ${tokenTypeIndex} for legend: ${JSON.stringify(this._legend.tokenTypes)}`);
}
metadata = SemanticTokensProviderStylingConstants.NO_STYLING;
tokenType = 'not-in-legend';
}
this._hashTable.add(tokenTypeIndex, tokenModifierSet, languageId.id, metadata);
if (this._logService.getLevel() === LogLevel.Trace) {
this._logService.trace(`SemanticTokensProviderStyling ${tokenTypeIndex} (${tokenType}) / ${tokenModifierSet} (${tokenModifiers.join(' ')}): foreground ${TokenMetadata.getForeground(metadata)}, fontStyle ${TokenMetadata.getFontStyle(metadata).toString(2)}`);
}
}
if (this._logService.getLevel() === LogLevel.Trace) {
const type = this._legend.tokenTypes[tokenTypeIndex];
const modifiers = tokenModifierSet ? ' ' + this._legend.tokenModifiers.filter((_, i) => tokenModifierSet & (1 << i)).join(' ') : '';
this._logService.trace(`tokenStyleMetadata ${entry ? '[CACHED] ' : ''}${type}${modifiers}: foreground ${TokenMetadata.getForeground(metadata)}, fontStyle ${TokenMetadata.getFontStyle(metadata).toString(2)}`);
}
return metadata;
}
}

View file

@ -514,8 +514,18 @@ class InspectEditorTokensWidget extends Disposable implements IContentWidget {
const line = lastLine + lineDelta; // 0-based
const character = lineDelta === 0 ? lastCharacter + charDelta : charDelta; // 0-based
if (posLine === line && character <= posCharacter && posCharacter < character + len) {
const type = semanticTokens.legend.tokenTypes[typeIdx];
const modifiers = semanticTokens.legend.tokenModifiers.filter((_, k) => modSet & 1 << k);
const type = semanticTokens.legend.tokenTypes[typeIdx] || 'not in legend (ignored)';
const modifiers = [];
let modifierSet = modSet;
for (let modifierIndex = 0; modifierSet > 0 && modifierIndex < semanticTokens.legend.tokenModifiers.length; modifierIndex++) {
if (modifierSet & 1) {
modifiers.push(semanticTokens.legend.tokenModifiers[modifierIndex]);
}
modifierSet = modifierSet >> 1;
}
if (modifierSet > 0) {
modifiers.push('not in legend (ignored)');
}
const range = new Range(line + 1, character + 1, line + 1, character + 1 + len);
const definitions = {};
const colorMap = this._themeService.getColorTheme().tokenColorMap;