rename to semanticTokenTypes, semanticTokenModifiers & semanticTokenStyleDefaults

This commit is contained in:
Martin Aeschlimann 2020-01-30 15:57:35 +01:00
parent ab175a0f03
commit c88756957a
2 changed files with 33 additions and 33 deletions

View file

@ -26,19 +26,19 @@
"vscode": "1.1.5"
},
"contributes": {
"tokenTypes": [
"semanticTokenTypes": [
{
"id": "testToken",
"description": "A test token"
}
],
"tokenModifiers": [
"semanticTokenModifiers": [
{
"id": "testModifier",
"description": "A test modifier"
}
],
"tokenStyleDefaults": [
"semanticTokenStyleDefaults": [
{
"selector": "testToken.testModifier",
"light": {

View file

@ -41,22 +41,22 @@ const colorPattern = '^#([0-9A-Fa-f]{6})([0-9A-Fa-f]{2})?$';
const tokenClassificationRegistry: ITokenClassificationRegistry = getTokenClassificationRegistry();
const tokenTypeExtPoint = ExtensionsRegistry.registerExtensionPoint<ITokenTypeExtensionPoint[]>({
extensionPoint: 'tokenTypes',
extensionPoint: 'semanticTokenTypes',
jsonSchema: {
description: nls.localize('contributes.tokenTypes', 'Contributes semantic token types.'),
description: nls.localize('contributes.semanticTokenTypes', 'Contributes semantic token types.'),
type: 'array',
items: {
type: 'object',
properties: {
id: {
type: 'string',
description: nls.localize('contributes.tokenTypes.id', 'The identifier of the token type'),
description: nls.localize('contributes.semanticTokenTypes.id', 'The identifier of the semantic token type'),
pattern: typeAndModifierIdPattern,
patternErrorMessage: nls.localize('contributes.tokenTypes.id.format', 'Identifiers should be in the form letterOrDigit[_-letterOrDigit]*'),
patternErrorMessage: nls.localize('contributes.semanticTokenTypes.id.format', 'Identifiers should be in the form letterOrDigit[_-letterOrDigit]*'),
},
description: {
type: 'string',
description: nls.localize('contributes.color.description', 'The description of the token type'),
description: nls.localize('contributes.color.description', 'The description of the semantic token type'),
}
}
}
@ -64,21 +64,21 @@ const tokenTypeExtPoint = ExtensionsRegistry.registerExtensionPoint<ITokenTypeEx
});
const tokenModifierExtPoint = ExtensionsRegistry.registerExtensionPoint<ITokenModifierExtensionPoint[]>({
extensionPoint: 'tokenModifiers',
extensionPoint: 'semanticTokenModifiers',
jsonSchema: {
description: nls.localize('contributes.tokenModifiers', 'Contributes semantic token modifiers.'),
description: nls.localize('contributes.semanticTokenModifiers', 'Contributes semantic token modifiers.'),
type: 'array',
items: {
type: 'object',
properties: {
id: {
type: 'string',
description: nls.localize('contributes.tokenModifiers.id', 'The identifier of the token modifier'),
description: nls.localize('contributes.semanticTokenModifiers.id', 'The identifier of the semantic token modifier'),
pattern: typeAndModifierIdPattern,
patternErrorMessage: nls.localize('contributes.tokenModifiers.id.format', 'Identifiers should be in the form letterOrDigit[_-letterOrDigit]*')
patternErrorMessage: nls.localize('contributes.semanticTokenModifiers.id.format', 'Identifiers should be in the form letterOrDigit[_-letterOrDigit]*')
},
description: {
description: nls.localize('contributes.tokenModifiers.description', 'The description of the token modifier')
description: nls.localize('contributes.semanticTokenModifiers.description', 'The description of the semantic token modifier')
}
}
}
@ -86,36 +86,36 @@ const tokenModifierExtPoint = ExtensionsRegistry.registerExtensionPoint<ITokenMo
});
const tokenStyleDefaultsExtPoint = ExtensionsRegistry.registerExtensionPoint<ITokenStyleDefaultExtensionPoint[]>({
extensionPoint: 'tokenStyleDefaults',
extensionPoint: 'semanticTokenStyleDefaults',
jsonSchema: {
description: nls.localize('contributes.tokenStyleDefaults', 'Contributes semantic token style default.'),
description: nls.localize('contributes.semanticTokenStyleDefaults', 'Contributes semantic token style defaults.'),
type: 'array',
items: {
type: 'object',
properties: {
selector: {
type: 'string',
description: nls.localize('contributes.tokenStyleDefaults.selector', 'The selector matching token types and modifiers.'),
description: nls.localize('contributes.semanticTokenStyleDefaults.selector', 'The selector matching token types and modifiers.'),
pattern: selectorPattern,
patternErrorMessage: nls.localize('contributes.tokenStyleDefaults.selector.format', 'Selectors should be in the form (type|*)(.modifier)*'),
patternErrorMessage: nls.localize('contributes.semanticTokenStyleDefaults.selector.format', 'Selectors should be in the form (type|*)(.modifier)*'),
},
scopes: {
type: 'array',
description: nls.localize('contributes.scopes.light', 'A list of textmate scopes that are matched against the current color theme to find a default style'),
description: nls.localize('contributes.scopes.light', 'A list of TextMate scopes that are matched against the current color theme to find a default style'),
items: {
type: 'string'
}
},
light: {
description: nls.localize('contributes.tokenStyleDefaults.light', 'The default style used for light themes'),
description: nls.localize('contributes.semanticTokenStyleDefaults.light', 'The default style used for light themes'),
$ref: textmateColorSettingsSchemaId
},
dark: {
description: nls.localize('contributes.tokenStyleDefaults.dark', 'The default style used for dark themes'),
description: nls.localize('contributes.semanticTokenStyleDefaults.dark', 'The default style used for dark themes'),
$ref: textmateColorSettingsSchemaId
},
highContrast: {
description: nls.localize('contributes.tokenStyleDefaults.hc', 'The default style used for high contrast themes'),
description: nls.localize('contributes.semanticTokenStyleDefaults.hc', 'The default style used for high contrast themes'),
$ref: textmateColorSettingsSchemaId
}
}
@ -154,7 +154,7 @@ export class TokenClassificationExtensionPoints {
}
if (style.fontStyle) {
if (typeof style.fontStyle !== 'string' || !style.fontStyle.match(fontStylePattern)) {
collector.error(nls.localize('invalid.fontStyle', "'configuration.{0}.fontStyle' must be a one or a compination of \'italic\', \'bold\' or \'underline\' or the empty string", extensionPoint));
collector.error(nls.localize('invalid.fontStyle', "'configuration.{0}.fontStyle' must be one or a combination of \'italic\', \'bold\' or \'underline\' or the empty string", extensionPoint));
return undefined;
}
}
@ -167,11 +167,11 @@ export class TokenClassificationExtensionPoints {
const collector = extension.collector;
if (!extensionValue || !Array.isArray(extensionValue)) {
collector.error(nls.localize('invalid.tokenTypeConfiguration', "'configuration.tokenType' must be an array"));
collector.error(nls.localize('invalid.semanticTokenTypeConfiguration', "'configuration.semanticTokenType' must be an array"));
return;
}
for (const contribution of extensionValue) {
if (validateTypeOrModifier(contribution, 'tokenType', collector)) {
if (validateTypeOrModifier(contribution, 'semanticTokenType', collector)) {
tokenClassificationRegistry.registerTokenType(contribution.id, contribution.description);
}
}
@ -189,11 +189,11 @@ export class TokenClassificationExtensionPoints {
const collector = extension.collector;
if (!extensionValue || !Array.isArray(extensionValue)) {
collector.error(nls.localize('invalid.tokenModifierConfiguration', "'configuration.tokenModifier' must be an array"));
collector.error(nls.localize('invalid.semanticTokenModifierConfiguration', "'configuration.semanticTokenModifier' must be an array"));
return;
}
for (const contribution of extensionValue) {
if (validateTypeOrModifier(contribution, 'tokenModifier', collector)) {
if (validateTypeOrModifier(contribution, 'semanticTokenModifier', collector)) {
tokenClassificationRegistry.registerTokenModifier(contribution.id, contribution.description);
}
}
@ -211,16 +211,16 @@ export class TokenClassificationExtensionPoints {
const collector = extension.collector;
if (!extensionValue || !Array.isArray(extensionValue)) {
collector.error(nls.localize('invalid.tokenStyleDefaultConfiguration', "'configuration.tokenStyleDefaults' must be an array"));
collector.error(nls.localize('invalid.semanticTokenStyleDefaultConfiguration', "'configuration.semanticTokenStyleDefaults' must be an array"));
return;
}
for (const contribution of extensionValue) {
if (typeof contribution.selector !== 'string' || contribution.selector.length === 0) {
collector.error(nls.localize('invalid.selector', "'configuration.tokenStyleDefaults.selector' must be defined and can not be empty"));
collector.error(nls.localize('invalid.selector', "'configuration.semanticTokenStyleDefaults.selector' must be defined and can not be empty"));
continue;
}
if (!contribution.selector.match(selectorPattern)) {
collector.error(nls.localize('invalid.selector.format', "'configuration.tokenStyleDefaults.selector' must be in the form (type|*)(.modifier)*"));
collector.error(nls.localize('invalid.selector.format', "'configuration.semanticTokenStyleDefaults.selector' must be in the form (type|*)(.modifier)*"));
continue;
}
@ -228,14 +228,14 @@ export class TokenClassificationExtensionPoints {
if (contribution.scopes) {
if ((!Array.isArray(contribution.scopes) || contribution.scopes.some(s => typeof s !== 'string'))) {
collector.error(nls.localize('invalid.scopes', "If defined, 'configuration.tokenStyleDefaults.scopes' must be an array or strings"));
collector.error(nls.localize('invalid.scopes', "If defined, 'configuration.semanticTokenStyleDefaults.scopes' must be an array or strings"));
continue;
}
tokenStyleDefault.scopesToProbe = [contribution.scopes];
}
tokenStyleDefault.light = validateStyle(contribution.light, 'tokenStyleDefaults.light', collector);
tokenStyleDefault.dark = validateStyle(contribution.dark, 'tokenStyleDefaults.dark', collector);
tokenStyleDefault.hc = validateStyle(contribution.highContrast, 'tokenStyleDefaults.highContrast', collector);
tokenStyleDefault.light = validateStyle(contribution.light, 'semanticTokenStyleDefaults.light', collector);
tokenStyleDefault.dark = validateStyle(contribution.dark, 'semanticTokenStyleDefaults.dark', collector);
tokenStyleDefault.hc = validateStyle(contribution.highContrast, 'semanticTokenStyleDefaults.highContrast', collector);
const [type, ...modifiers] = contribution.selector.split('.');
const classification = tokenClassificationRegistry.getTokenClassification(type, modifiers);