recompile build folder with const over let

This commit is contained in:
Johannes 2022-06-09 10:59:11 +02:00
parent 9792b8e561
commit 2c004b870f
No known key found for this signature in database
GPG key ID: 6DEF802A22264FCA
18 changed files with 213 additions and 207 deletions

View file

@ -14,7 +14,7 @@ shasum.update(fs.readFileSync(path.join(ROOT, 'build/.cachesalt')));
shasum.update(fs.readFileSync(path.join(ROOT, '.yarnrc')));
shasum.update(fs.readFileSync(path.join(ROOT, 'remote/.yarnrc')));
// Add `package.json` and `yarn.lock` files
for (let dir of dirs) {
for (const dir of dirs) {
const packageJsonPath = path.join(ROOT, dir, 'package.json');
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath).toString());
const relevantPackageJsonSections = {

View file

@ -19,7 +19,6 @@ function main() {
fileName: 'combined.nls.metadata.json',
jsonSpace: '',
edit: (parsedJson, file) => {
let key;
if (file.base === 'out-vscode-web-min') {
return { vscode: parsedJson };
}
@ -63,7 +62,7 @@ function main() {
break;
}
}
key = 'vscode.' + file.relative.split('/')[0];
const key = 'vscode.' + file.relative.split('/')[0];
return { [key]: parsedJson };
},
}))

View file

@ -39,7 +39,7 @@ async function main() {
outAppPath,
force: true
});
let productJson = await fs.readJson(productJsonPath);
const productJson = await fs.readJson(productJsonPath);
Object.assign(productJson, {
darwinUniversalAssetId: 'darwin-universal'
});

View file

@ -81,7 +81,7 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
out.push(file.contents);
}
}, function () {
let finish = () => {
const finish = () => {
{
const headerPickle = pickle.createEmpty();
headerPickle.writeString(JSON.stringify(filesystem.header));

View file

@ -102,7 +102,7 @@ function getBuiltInExtensions() {
const control = readControlFile();
const streams = [];
for (const extension of [...builtInExtensions, ...webBuiltInExtensions]) {
let controlState = control[extension.name] || 'marketplace';
const controlState = control[extension.name] || 'marketplace';
control[extension.name] = controlState;
streams.push(syncExtension(extension, controlState));
}

View file

@ -22,7 +22,7 @@ const watch = require('./watch');
const reporter = (0, reporter_1.createReporter)();
function getTypeScriptCompilerOptions(src) {
const rootDir = path.join(__dirname, `../../${src}`);
let options = {};
const options = {};
options.verbose = false;
options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
@ -80,7 +80,7 @@ function compileTask(src, out, build) {
}
const compile = createCompile(src, build, true);
const srcPipe = gulp.src(`${src}/**`, { base: `${src}` });
let generator = new MonacoGenerator(false);
const generator = new MonacoGenerator(false);
if (src === 'src') {
generator.execute();
}
@ -96,7 +96,7 @@ function watchTask(out, build) {
const compile = createCompile('src', build);
const src = gulp.src('src/**', { base: 'src' });
const watchSrc = watch('src/**', { base: 'src', readDelay: 200 });
let generator = new MonacoGenerator(true);
const generator = new MonacoGenerator(true);
generator.execute();
return watchSrc
.pipe(generator.stream)
@ -112,7 +112,7 @@ class MonacoGenerator {
this._isWatch = isWatch;
this.stream = es.through();
this._watchedFiles = {};
let onWillReadFile = (moduleId, filePath) => {
const onWillReadFile = (moduleId, filePath) => {
if (!this._isWatch) {
return;
}
@ -149,7 +149,7 @@ class MonacoGenerator {
}, 20);
}
_run() {
let r = monacodts.run3(this._declarationResolver);
const r = monacodts.run3(this._declarationResolver);
if (!r && !this._isWatch) {
// The build must always be able to generate the monaco.d.ts
throw new Error(`monaco.d.ts generation error - Cannot continue`);

View file

@ -41,7 +41,7 @@ module.exports = new (_a = class NoUnexternalizedStrings {
key = keyNode.value;
}
else if (keyNode.type === experimental_utils_1.AST_NODE_TYPES.ObjectExpression) {
for (let property of keyNode.properties) {
for (const property of keyNode.properties) {
if (property.type === experimental_utils_1.AST_NODE_TYPES.Property && !property.computed) {
if (property.key.type === experimental_utils_1.AST_NODE_TYPES.Identifier && property.key.name === 'key') {
if (isStringLiteral(property.value)) {
@ -83,7 +83,7 @@ module.exports = new (_a = class NoUnexternalizedStrings {
// (2)
// report all invalid NLS keys
if (!key.match(NoUnexternalizedStrings._rNlsKeys)) {
for (let value of values) {
for (const value of values) {
context.report({ loc: value.call.loc, messageId: 'badKey', data: { key } });
}
}

View file

@ -16,7 +16,7 @@ module.exports = new class ApiProviderNaming {
return {
['TSInterfaceDeclaration[id.name=/.+Provider/] TSMethodSignature[key.name=/^(provide|resolve).+/]']: (node) => {
let found = false;
for (let param of node.params) {
for (const param of node.params) {
if (param.type === experimental_utils_1.AST_NODE_TYPES.Identifier) {
found = found || param.name === 'token';
}

View file

@ -345,7 +345,7 @@ function scanBuiltinExtensions(extensionsRoot, exclude = []) {
if (!fs.existsSync(packageJSONPath)) {
continue;
}
let packageJSON = JSON.parse(fs.readFileSync(packageJSONPath).toString('utf8'));
const packageJSON = JSON.parse(fs.readFileSync(packageJSONPath).toString('utf8'));
if (!isWebExtension(packageJSON)) {
continue;
}
@ -373,7 +373,7 @@ function translatePackageJSON(packageJSON, packageNLSPath) {
const CharCode_PC = '%'.charCodeAt(0);
const packageNls = JSON.parse(fs.readFileSync(packageNLSPath).toString());
const translate = (obj) => {
for (let key in obj) {
for (const key in obj) {
const val = obj[key];
if (Array.isArray(val)) {
val.forEach(translate);

View file

@ -45,7 +45,7 @@ function getVersion(repo) {
}
const refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
let refsMatch;
let refs = {};
const refs = {};
while (refsMatch = refsRegex.exec(refsRaw)) {
refs[refsMatch[2]] = refsMatch[1];
}

View file

@ -46,7 +46,7 @@ exports.externalExtensionsWithTranslations = {
var LocalizeInfo;
(function (LocalizeInfo) {
function is(value) {
let candidate = value;
const candidate = value;
return Is.defined(candidate) && Is.string(candidate.key) && (Is.undef(candidate.comment) || (Is.array(candidate.comment) && candidate.comment.every(element => Is.string(element))));
}
LocalizeInfo.is = is;
@ -57,8 +57,8 @@ var BundledFormat;
if (Is.undef(value)) {
return false;
}
let candidate = value;
let length = Object.keys(value).length;
const candidate = value;
const length = Object.keys(value).length;
return length === 3 && Is.defined(candidate.keys) && Is.defined(candidate.messages) && Is.defined(candidate.bundles);
}
BundledFormat.is = is;
@ -70,7 +70,7 @@ var PackageJsonFormat;
return false;
}
return Object.keys(value).every(key => {
let element = value[key];
const element = value[key];
return Is.string(element) || (Is.object(element) && Is.defined(element.message) && Is.defined(element.comment));
});
}
@ -133,9 +133,9 @@ class XLF {
}
this.numberOfMessages += keys.length;
this.files[original] = [];
let existingKeys = new Set();
const existingKeys = new Set();
for (let i = 0; i < keys.length; i++) {
let key = keys[i];
const key = keys[i];
let realKey;
let comment;
if (Is.string(key)) {
@ -152,7 +152,7 @@ class XLF {
continue;
}
existingKeys.add(realKey);
let message = encodeEntities(messages[i]);
const message = encodeEntities(messages[i]);
this.files[original].push({ id: realKey, message: message, comment: comment });
}
}
@ -178,7 +178,7 @@ class XLF {
this.appendNewLine('</xliff>', 0);
}
appendNewLine(content, indent) {
let line = new Line(indent);
const line = new Line(indent);
line.append(content);
this.buffer.push(line.toString());
}
@ -186,8 +186,8 @@ class XLF {
exports.XLF = XLF;
XLF.parsePseudo = function (xlfString) {
return new Promise((resolve) => {
let parser = new xml2js.Parser();
let files = [];
const parser = new xml2js.Parser();
const files = [];
parser.parseString(xlfString, function (_err, result) {
const fileNodes = result['xliff']['file'];
fileNodes.forEach(file => {
@ -211,8 +211,8 @@ XLF.parsePseudo = function (xlfString) {
};
XLF.parse = function (xlfString) {
return new Promise((resolve, reject) => {
let parser = new xml2js.Parser();
let files = [];
const parser = new xml2js.Parser();
const files = [];
parser.parseString(xlfString, function (err, result) {
if (err) {
reject(new Error(`XLF parsing error: Failed to parse XLIFF string. ${err}`));
@ -226,7 +226,7 @@ XLF.parse = function (xlfString) {
if (!originalFilePath) {
reject(new Error(`XLF parsing error: XLIFF file node does not contain original attribute to determine the original location of the resource file.`));
}
let language = file.$['target-language'];
const language = file.$['target-language'];
if (!language) {
reject(new Error(`XLF parsing error: XLIFF file node does not contain target-language attribute to determine translated language.`));
}
@ -296,7 +296,7 @@ function stripComments(content) {
// Third group matches a multi line comment
// Forth group matches a single line comment
const regexp = /("[^"\\]*(?:\\.[^"\\]*)*")|('[^'\\]*(?:\\.[^'\\]*)*')|(\/\*[^\/\*]*(?:(?:\*|\/)[^\/\*]*)*?\*\/)|(\/{2,}.*?(?:(?:\r?\n)|$))/g;
let result = content.replace(regexp, (match, _m1, _m2, m3, m4) => {
const result = content.replace(regexp, (match, _m1, _m2, m3, m4) => {
// Only one of m1, m2, m3, m4 matches
if (m3) {
// A block comment. Replace with nothing
@ -356,20 +356,20 @@ function escapeCharacters(value) {
return result.join('');
}
function processCoreBundleFormat(fileHeader, languages, json, emitter) {
let keysSection = json.keys;
let messageSection = json.messages;
let bundleSection = json.bundles;
let statistics = Object.create(null);
let defaultMessages = Object.create(null);
let modules = Object.keys(keysSection);
const keysSection = json.keys;
const messageSection = json.messages;
const bundleSection = json.bundles;
const statistics = Object.create(null);
const defaultMessages = Object.create(null);
const modules = Object.keys(keysSection);
modules.forEach((module) => {
let keys = keysSection[module];
let messages = messageSection[module];
const keys = keysSection[module];
const messages = messageSection[module];
if (!messages || keys.length !== messages.length) {
emitter.emit('error', `Message for module ${module} corrupted. Mismatch in number of keys and messages.`);
return;
}
let messageMap = Object.create(null);
const messageMap = Object.create(null);
defaultMessages[module] = messageMap;
keys.map((key, i) => {
if (typeof key === 'string') {
@ -380,27 +380,27 @@ function processCoreBundleFormat(fileHeader, languages, json, emitter) {
}
});
});
let languageDirectory = path.join(__dirname, '..', '..', '..', 'vscode-loc', 'i18n');
const languageDirectory = path.join(__dirname, '..', '..', '..', 'vscode-loc', 'i18n');
if (!fs.existsSync(languageDirectory)) {
log(`No VS Code localization repository found. Looking at ${languageDirectory}`);
log(`To bundle translations please check out the vscode-loc repository as a sibling of the vscode repository.`);
}
let sortedLanguages = sortLanguages(languages);
const sortedLanguages = sortLanguages(languages);
sortedLanguages.forEach((language) => {
if (process.env['VSCODE_BUILD_VERBOSE']) {
log(`Generating nls bundles for: ${language.id}`);
}
statistics[language.id] = 0;
let localizedModules = Object.create(null);
let languageFolderName = language.translationId || language.id;
let i18nFile = path.join(languageDirectory, `vscode-language-pack-${languageFolderName}`, 'translations', 'main.i18n.json');
const localizedModules = Object.create(null);
const languageFolderName = language.translationId || language.id;
const i18nFile = path.join(languageDirectory, `vscode-language-pack-${languageFolderName}`, 'translations', 'main.i18n.json');
let allMessages;
if (fs.existsSync(i18nFile)) {
let content = stripComments(fs.readFileSync(i18nFile, 'utf8'));
const content = stripComments(fs.readFileSync(i18nFile, 'utf8'));
allMessages = JSON.parse(content);
}
modules.forEach((module) => {
let order = keysSection[module];
const order = keysSection[module];
let moduleMessage;
if (allMessages) {
moduleMessage = allMessages.contents[module];
@ -412,7 +412,7 @@ function processCoreBundleFormat(fileHeader, languages, json, emitter) {
moduleMessage = defaultMessages[module];
statistics[language.id] = statistics[language.id] + Object.keys(moduleMessage).length;
}
let localizedMessages = [];
const localizedMessages = [];
order.forEach((keyInfo) => {
let key = null;
if (typeof keyInfo === 'string') {
@ -434,14 +434,14 @@ function processCoreBundleFormat(fileHeader, languages, json, emitter) {
localizedModules[module] = localizedMessages;
});
Object.keys(bundleSection).forEach((bundle) => {
let modules = bundleSection[bundle];
let contents = [
const modules = bundleSection[bundle];
const contents = [
fileHeader,
`define("${bundle}.nls.${language.id}", {`
];
modules.forEach((module, index) => {
contents.push(`\t"${module}": [`);
let messages = localizedModules[module];
const messages = localizedModules[module];
if (!messages) {
emitter.emit('error', `Didn't find messages for module ${module}.`);
return;
@ -456,11 +456,11 @@ function processCoreBundleFormat(fileHeader, languages, json, emitter) {
});
});
Object.keys(statistics).forEach(key => {
let value = statistics[key];
const value = statistics[key];
log(`${key} has ${value} untranslated strings.`);
});
sortedLanguages.forEach(language => {
let stats = statistics[language.id];
const stats = statistics[language.id];
if (Is.undef(stats)) {
log(`\tNo translations found for language ${language.id}. Using default language instead.`);
}
@ -468,7 +468,7 @@ function processCoreBundleFormat(fileHeader, languages, json, emitter) {
}
function processNlsFiles(opts) {
return (0, event_stream_1.through)(function (file) {
let fileName = path.basename(file.path);
const fileName = path.basename(file.path);
if (fileName === 'nls.metadata.json') {
let json = null;
if (file.isBuffer()) {
@ -525,7 +525,7 @@ function createXlfFilesForCoreBundle() {
if (file.isBuffer()) {
const xlfs = Object.create(null);
const json = JSON.parse(file.contents.toString('utf8'));
for (let coreModule in json.keys) {
for (const coreModule in json.keys) {
const projectResource = getResource(coreModule);
const resource = projectResource.name;
const project = projectResource.project;
@ -544,7 +544,7 @@ function createXlfFilesForCoreBundle() {
xlf.addFile(`src/${coreModule}`, keys, messages);
}
}
for (let resource in xlfs) {
for (const resource in xlfs) {
const xlf = xlfs[resource];
const filePath = `${xlf.project}/${resource.replace(/\//g, '_')}.xlf`;
const xlfFile = new File({
@ -576,7 +576,7 @@ function createXlfFilesForExtensions() {
if (!stat.isDirectory()) {
return;
}
let extensionName = path.basename(extensionFolder.path);
const extensionName = path.basename(extensionFolder.path);
if (extensionName === 'node_modules') {
return;
}
@ -612,7 +612,7 @@ function createXlfFilesForExtensions() {
else if (basename === 'nls.metadata.json') {
const json = JSON.parse(buffer.toString('utf8'));
const relPath = path.relative(`.build/extensions/${extensionName}`, path.dirname(file.path));
for (let file in json) {
for (const file in json) {
const fileContent = json[file];
getXlf().addFile(`extensions/${extensionName}/${relPath}/${file}`, fileContent.keys, fileContent.messages);
}
@ -624,7 +624,7 @@ function createXlfFilesForExtensions() {
}
}, function () {
if (_xlf) {
let xlfFile = new File({
const xlfFile = new File({
path: path.join(extensionsProject, extensionName + '.xlf'),
contents: Buffer.from(_xlf.toString(), 'utf8')
});
@ -656,14 +656,14 @@ function createXlfFilesForIsl() {
else {
throw new Error(`Unknown input file ${file.path}`);
}
let xlf = new XLF(projectName), keys = [], messages = [];
let model = new TextModel(file.contents.toString());
const xlf = new XLF(projectName), keys = [], messages = [];
const model = new TextModel(file.contents.toString());
let inMessageSection = false;
model.lines.forEach(line => {
if (line.length === 0) {
return;
}
let firstChar = line.charAt(0);
const firstChar = line.charAt(0);
switch (firstChar) {
case ';':
// Comment line;
@ -675,13 +675,13 @@ function createXlfFilesForIsl() {
if (!inMessageSection) {
return;
}
let sections = line.split('=');
const sections = line.split('=');
if (sections.length !== 2) {
throw new Error(`Badly formatted message found: ${line}`);
}
else {
let key = sections[0];
let value = sections[1];
const key = sections[0];
const value = sections[1];
if (key.length > 0 && value.length > 0) {
keys.push(key);
messages.push(value);
@ -698,8 +698,8 @@ function createXlfFilesForIsl() {
}
exports.createXlfFilesForIsl = createXlfFilesForIsl;
function pushXlfFiles(apiHostname, username, password) {
let tryGetPromises = [];
let updateCreatePromises = [];
const tryGetPromises = [];
const updateCreatePromises = [];
return (0, event_stream_1.through)(function (file) {
const project = path.dirname(file.relative);
const fileName = path.basename(file.path);
@ -737,11 +737,11 @@ function getAllResources(project, apiHostname, username, password) {
method: 'GET'
};
const request = https.request(options, (res) => {
let buffer = [];
const buffer = [];
res.on('data', (chunk) => buffer.push(chunk));
res.on('end', () => {
if (res.statusCode === 200) {
let json = JSON.parse(Buffer.concat(buffer).toString());
const json = JSON.parse(Buffer.concat(buffer).toString());
if (Array.isArray(json)) {
resolve(json.map(o => o.slug));
return;
@ -760,7 +760,7 @@ function getAllResources(project, apiHostname, username, password) {
});
}
function findObsoleteResources(apiHostname, username, password) {
let resourcesByProject = Object.create(null);
const resourcesByProject = Object.create(null);
resourcesByProject[extensionsProject] = [].concat(exports.externalExtensionsWithTranslations); // clone
return (0, event_stream_1.through)(function (file) {
const project = path.dirname(file.relative);
@ -774,10 +774,10 @@ function findObsoleteResources(apiHostname, username, password) {
this.push(file);
}, function () {
const json = JSON.parse(fs.readFileSync('./build/lib/i18n.resources.json', 'utf8'));
let i18Resources = [...json.editor, ...json.workbench].map((r) => r.project + '/' + r.name.replace(/\//g, '_'));
let extractedResources = [];
for (let project of [workbenchProject, editorProject]) {
for (let resource of resourcesByProject[project]) {
const i18Resources = [...json.editor, ...json.workbench].map((r) => r.project + '/' + r.name.replace(/\//g, '_'));
const extractedResources = [];
for (const project of [workbenchProject, editorProject]) {
for (const resource of resourcesByProject[project]) {
if (resource !== 'setup_messages') {
extractedResources.push(project + '/' + resource);
}
@ -787,11 +787,11 @@ function findObsoleteResources(apiHostname, username, password) {
console.log(`[i18n] Obsolete resources in file 'build/lib/i18n.resources.json': JSON.stringify(${i18Resources.filter(p => extractedResources.indexOf(p) === -1)})`);
console.log(`[i18n] Missing resources in file 'build/lib/i18n.resources.json': JSON.stringify(${extractedResources.filter(p => i18Resources.indexOf(p) === -1)})`);
}
let promises = [];
for (let project in resourcesByProject) {
const promises = [];
for (const project in resourcesByProject) {
promises.push(getAllResources(project, apiHostname, username, password).then(resources => {
let expectedResources = resourcesByProject[project];
let unusedResources = resources.filter(resource => resource && expectedResources.indexOf(resource) === -1);
const expectedResources = resourcesByProject[project];
const unusedResources = resources.filter(resource => resource && expectedResources.indexOf(resource) === -1);
if (unusedResources.length) {
console.log(`[transifex] Obsolete resources in project '${project}': ${unusedResources.join(', ')}`);
}
@ -846,7 +846,7 @@ function createResource(project, slug, xlfFile, apiHostname, credentials) {
auth: credentials,
method: 'POST'
};
let request = https.request(options, (res) => {
const request = https.request(options, (res) => {
if (res.statusCode === 201) {
log(`Resource ${project}/${slug} successfully created on Transifex.`);
}
@ -878,7 +878,7 @@ function updateResource(project, slug, xlfFile, apiHostname, credentials) {
auth: credentials,
method: 'PUT'
};
let request = https.request(options, (res) => {
const request = https.request(options, (res) => {
if (res.statusCode === 200) {
res.setEncoding('utf8');
let responseBuffer = '';
@ -903,7 +903,7 @@ function updateResource(project, slug, xlfFile, apiHostname, credentials) {
});
}
function pullSetupXlfFiles(apiHostname, username, password, language, includeDefault) {
let setupResources = [{ name: 'setup_messages', project: workbenchProject }];
const setupResources = [{ name: 'setup_messages', project: workbenchProject }];
if (includeDefault) {
setupResources.push({ name: 'setup_default', project: setupProject });
}
@ -912,7 +912,7 @@ function pullSetupXlfFiles(apiHostname, username, password, language, includeDef
exports.pullSetupXlfFiles = pullSetupXlfFiles;
function pullXlfFiles(apiHostname, username, password, language, resources) {
const credentials = `${username}:${password}`;
let expectedTranslationsCount = resources.length;
const expectedTranslationsCount = resources.length;
let translationsRetrieved = 0, called = false;
return (0, event_stream_1.readable)(function (_count, callback) {
// Mark end of stream when all resources were retrieved
@ -939,7 +939,7 @@ function retrieveResource(language, resource, apiHostname, credentials) {
return limiter.queue(() => new Promise((resolve, reject) => {
const slug = resource.name.replace(/\//g, '_');
const project = resource.project;
let transifexLanguageId = language.id === 'ps' ? 'en' : language.translationId || language.id;
const transifexLanguageId = language.id === 'ps' ? 'en' : language.translationId || language.id;
const options = {
hostname: apiHostname,
path: `/api/2/project/${project}/resource/${slug}/translation/${transifexLanguageId}?file&mode=onlyreviewed`,
@ -948,8 +948,8 @@ function retrieveResource(language, resource, apiHostname, credentials) {
method: 'GET'
};
console.log('[transifex] Fetching ' + options.path);
let request = https.request(options, (res) => {
let xlfBuffer = [];
const request = https.request(options, (res) => {
const xlfBuffer = [];
res.on('data', (chunk) => xlfBuffer.push(chunk));
res.on('end', () => {
if (res.statusCode === 200) {
@ -971,14 +971,14 @@ function retrieveResource(language, resource, apiHostname, credentials) {
}));
}
function prepareI18nFiles() {
let parsePromises = [];
const parsePromises = [];
return (0, event_stream_1.through)(function (xlf) {
let stream = this;
let parsePromise = XLF.parse(xlf.contents.toString());
const stream = this;
const parsePromise = XLF.parse(xlf.contents.toString());
parsePromises.push(parsePromise);
parsePromise.then(resolvedFiles => {
resolvedFiles.forEach(file => {
let translatedFile = createI18nFile(file.originalFilePath, file.messages);
const translatedFile = createI18nFile(file.originalFilePath, file.messages);
stream.queue(translatedFile);
});
});
@ -990,7 +990,7 @@ function prepareI18nFiles() {
}
exports.prepareI18nFiles = prepareI18nFiles;
function createI18nFile(originalFilePath, messages) {
let result = Object.create(null);
const result = Object.create(null);
result[''] = [
'--------------------------------------------------------------------------------------------',
'Copyright (c) Microsoft Corporation. All rights reserved.',
@ -998,7 +998,7 @@ function createI18nFile(originalFilePath, messages) {
'--------------------------------------------------------------------------------------------',
'Do not edit this file. It is machine generated.'
];
for (let key of Object.keys(messages)) {
for (const key of Object.keys(messages)) {
result[key] = messages[key];
}
let content = JSON.stringify(result, null, '\t');
@ -1012,16 +1012,16 @@ function createI18nFile(originalFilePath, messages) {
}
const i18nPackVersion = '1.0.0';
function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pseudo = false) {
let parsePromises = [];
let mainPack = { version: i18nPackVersion, contents: {} };
let extensionsPacks = {};
let errors = [];
const parsePromises = [];
const mainPack = { version: i18nPackVersion, contents: {} };
const extensionsPacks = {};
const errors = [];
return (0, event_stream_1.through)(function (xlf) {
let project = path.basename(path.dirname(path.dirname(xlf.relative)));
let resource = path.basename(xlf.relative, '.xlf');
let contents = xlf.contents.toString();
const project = path.basename(path.dirname(path.dirname(xlf.relative)));
const resource = path.basename(xlf.relative, '.xlf');
const contents = xlf.contents.toString();
log(`Found ${project}: ${resource}`);
let parsePromise = pseudo ? XLF.parsePseudo(contents) : XLF.parse(contents);
const parsePromise = pseudo ? XLF.parsePseudo(contents) : XLF.parse(contents);
parsePromises.push(parsePromise);
parsePromise.then(resolvedFiles => {
resolvedFiles.forEach(file => {
@ -1057,7 +1057,7 @@ function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pse
const translatedMainFile = createI18nFile('./main', mainPack);
resultingTranslationPaths.push({ id: 'vscode', resourceName: 'main.i18n.json' });
this.queue(translatedMainFile);
for (let extension in extensionsPacks) {
for (const extension in extensionsPacks) {
const translatedExtFile = createI18nFile(`extensions/${extension}`, extensionsPacks[extension]);
this.queue(translatedExtFile);
const externalExtensionId = externalExtensions[extension];
@ -1077,14 +1077,14 @@ function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pse
}
exports.prepareI18nPackFiles = prepareI18nPackFiles;
function prepareIslFiles(language, innoSetupConfig) {
let parsePromises = [];
const parsePromises = [];
return (0, event_stream_1.through)(function (xlf) {
let stream = this;
let parsePromise = XLF.parse(xlf.contents.toString());
const stream = this;
const parsePromise = XLF.parse(xlf.contents.toString());
parsePromises.push(parsePromise);
parsePromise.then(resolvedFiles => {
resolvedFiles.forEach(file => {
let translatedFile = createIslFile(file.originalFilePath, file.messages, language, innoSetupConfig);
const translatedFile = createIslFile(file.originalFilePath, file.messages, language, innoSetupConfig);
stream.queue(translatedFile);
});
}).catch(reason => {
@ -1100,7 +1100,7 @@ function prepareIslFiles(language, innoSetupConfig) {
}
exports.prepareIslFiles = prepareIslFiles;
function createIslFile(originalFilePath, messages, language, innoSetup) {
let content = [];
const content = [];
let originalContent;
if (path.basename(originalFilePath) === 'Default') {
originalContent = new TextModel(fs.readFileSync(originalFilePath + '.isl', 'utf8'));
@ -1110,16 +1110,16 @@ function createIslFile(originalFilePath, messages, language, innoSetup) {
}
originalContent.lines.forEach(line => {
if (line.length > 0) {
let firstChar = line.charAt(0);
const firstChar = line.charAt(0);
if (firstChar === '[' || firstChar === ';') {
content.push(line);
}
else {
let sections = line.split('=');
let key = sections[0];
const sections = line.split('=');
const key = sections[0];
let translated = line;
if (key) {
let translatedMessage = messages[key];
const translatedMessage = messages[key];
if (translatedMessage) {
translated = `${key}=${translatedMessage}`;
}
@ -1137,9 +1137,9 @@ function createIslFile(originalFilePath, messages, language, innoSetup) {
});
}
function encodeEntities(value) {
let result = [];
const result = [];
for (let i = 0; i < value.length; i++) {
let ch = value[i];
const ch = value[i];
switch (ch) {
case '<':
result.push('&lt;');

View file

@ -27,7 +27,7 @@ function isDeclaration(ts, a) {
}
function visitTopLevelDeclarations(ts, sourceFile, visitor) {
let stop = false;
let visit = (node) => {
const visit = (node) => {
if (stop) {
return;
}
@ -49,19 +49,19 @@ function visitTopLevelDeclarations(ts, sourceFile, visitor) {
visit(sourceFile);
}
function getAllTopLevelDeclarations(ts, sourceFile) {
let all = [];
const all = [];
visitTopLevelDeclarations(ts, sourceFile, (node) => {
if (node.kind === ts.SyntaxKind.InterfaceDeclaration || node.kind === ts.SyntaxKind.ClassDeclaration || node.kind === ts.SyntaxKind.ModuleDeclaration) {
let interfaceDeclaration = node;
let triviaStart = interfaceDeclaration.pos;
let triviaEnd = interfaceDeclaration.name.pos;
let triviaText = getNodeText(sourceFile, { pos: triviaStart, end: triviaEnd });
const interfaceDeclaration = node;
const triviaStart = interfaceDeclaration.pos;
const triviaEnd = interfaceDeclaration.name.pos;
const triviaText = getNodeText(sourceFile, { pos: triviaStart, end: triviaEnd });
if (triviaText.indexOf('@internal') === -1) {
all.push(node);
}
}
else {
let nodeText = getNodeText(sourceFile, node);
const nodeText = getNodeText(sourceFile, node);
if (nodeText.indexOf('@internal') === -1) {
all.push(node);
}
@ -95,7 +95,7 @@ function getNodeText(sourceFile, node) {
function hasModifier(modifiers, kind) {
if (modifiers) {
for (let i = 0; i < modifiers.length; i++) {
let mod = modifiers[i];
const mod = modifiers[i];
if (mod.kind === kind) {
return true;
}
@ -113,14 +113,14 @@ function isDefaultExport(ts, declaration) {
function getMassagedTopLevelDeclarationText(ts, sourceFile, declaration, importName, usage, enums) {
let result = getNodeText(sourceFile, declaration);
if (declaration.kind === ts.SyntaxKind.InterfaceDeclaration || declaration.kind === ts.SyntaxKind.ClassDeclaration) {
let interfaceDeclaration = declaration;
const interfaceDeclaration = declaration;
const staticTypeName = (isDefaultExport(ts, interfaceDeclaration)
? `${importName}.default`
: `${importName}.${declaration.name.text}`);
let instanceTypeName = staticTypeName;
const typeParametersCnt = (interfaceDeclaration.typeParameters ? interfaceDeclaration.typeParameters.length : 0);
if (typeParametersCnt > 0) {
let arr = [];
const arr = [];
for (let i = 0; i < typeParametersCnt; i++) {
arr.push('any');
}
@ -129,7 +129,7 @@ function getMassagedTopLevelDeclarationText(ts, sourceFile, declaration, importN
const members = interfaceDeclaration.members;
members.forEach((member) => {
try {
let memberText = getNodeText(sourceFile, member);
const memberText = getNodeText(sourceFile, member);
if (memberText.indexOf('@internal') >= 0 || memberText.indexOf('private') >= 0) {
result = result.replace(memberText, '');
}
@ -152,7 +152,7 @@ function getMassagedTopLevelDeclarationText(ts, sourceFile, declaration, importN
result = result.replace(/export default /g, 'export ');
result = result.replace(/export declare /g, 'export ');
result = result.replace(/declare /g, '');
let lines = result.split(/\r\n|\r|\n/);
const lines = result.split(/\r\n|\r|\n/);
for (let i = 0; i < lines.length; i++) {
if (/\s*\*/.test(lines[i])) {
// very likely a comment
@ -177,9 +177,9 @@ function format(ts, text, endl) {
return text;
}
// Parse the source text
let sourceFile = ts.createSourceFile('file.ts', text, ts.ScriptTarget.Latest, /*setParentPointers*/ true);
const sourceFile = ts.createSourceFile('file.ts', text, ts.ScriptTarget.Latest, /*setParentPointers*/ true);
// Get the formatting edits on the input sources
let edits = ts.formatting.formatDocument(sourceFile, getRuleProvider(tsfmt), tsfmt);
const edits = ts.formatting.formatDocument(sourceFile, getRuleProvider(tsfmt), tsfmt);
// Apply the edits on the input code
return applyEdits(text, edits);
function countParensCurly(text) {
@ -202,7 +202,7 @@ function format(ts, text, endl) {
return r;
}
function preformat(text, endl) {
let lines = text.split(endl);
const lines = text.split(endl);
let inComment = false;
let inCommentDeltaIndent = 0;
let indent = 0;
@ -282,9 +282,9 @@ function format(ts, text, endl) {
// Apply edits in reverse on the existing text
let result = text;
for (let i = edits.length - 1; i >= 0; i--) {
let change = edits[i];
let head = result.slice(0, change.span.start);
let tail = result.slice(change.span.start + change.span.length);
const change = edits[i];
const head = result.slice(0, change.span.start);
const tail = result.slice(change.span.start + change.span.length);
result = head + change.newText + tail;
}
return result;
@ -300,15 +300,15 @@ function createReplacerFromDirectives(directives) {
}
function createReplacer(data) {
data = data || '';
let rawDirectives = data.split(';');
let directives = [];
const rawDirectives = data.split(';');
const directives = [];
rawDirectives.forEach((rawDirective) => {
if (rawDirective.length === 0) {
return;
}
let pieces = rawDirective.split('=>');
const pieces = rawDirective.split('=>');
let findStr = pieces[0];
let replaceStr = pieces[1];
const replaceStr = pieces[1];
findStr = findStr.replace(/[\-\\\{\}\*\+\?\|\^\$\.\,\[\]\(\)\#\s]/g, '\\$&');
findStr = '\\b' + findStr + '\\b';
directives.push([new RegExp(findStr, 'g'), replaceStr]);
@ -317,32 +317,32 @@ function createReplacer(data) {
}
function generateDeclarationFile(ts, recipe, sourceFileGetter) {
const endl = /\r\n/.test(recipe) ? '\r\n' : '\n';
let lines = recipe.split(endl);
let result = [];
const lines = recipe.split(endl);
const result = [];
let usageCounter = 0;
let usageImports = [];
let usage = [];
const usageImports = [];
const usage = [];
let failed = false;
usage.push(`var a: any;`);
usage.push(`var b: any;`);
const generateUsageImport = (moduleId) => {
let importName = 'm' + (++usageCounter);
const importName = 'm' + (++usageCounter);
usageImports.push(`import * as ${importName} from './${moduleId.replace(/\.d\.ts$/, '')}';`);
return importName;
};
let enums = [];
const enums = [];
let version = null;
lines.forEach(line => {
if (failed) {
return;
}
let m0 = line.match(/^\/\/dtsv=(\d+)$/);
const m0 = line.match(/^\/\/dtsv=(\d+)$/);
if (m0) {
version = m0[1];
}
let m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
const m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m1) {
let moduleId = m1[1];
const moduleId = m1[1];
const sourceFile = sourceFileGetter(moduleId);
if (!sourceFile) {
logErr(`While handling ${line}`);
@ -351,14 +351,14 @@ function generateDeclarationFile(ts, recipe, sourceFileGetter) {
return;
}
const importName = generateUsageImport(moduleId);
let replacer = createReplacer(m1[2]);
let typeNames = m1[3].split(/,/);
const replacer = createReplacer(m1[2]);
const typeNames = m1[3].split(/,/);
typeNames.forEach((typeName) => {
typeName = typeName.trim();
if (typeName.length === 0) {
return;
}
let declaration = getTopLevelDeclaration(ts, sourceFile, typeName);
const declaration = getTopLevelDeclaration(ts, sourceFile, typeName);
if (!declaration) {
logErr(`While handling ${line}`);
logErr(`Cannot find ${typeName}`);
@ -369,9 +369,9 @@ function generateDeclarationFile(ts, recipe, sourceFileGetter) {
});
return;
}
let m2 = line.match(/^\s*#includeAll\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
const m2 = line.match(/^\s*#includeAll\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m2) {
let moduleId = m2[1];
const moduleId = m2[1];
const sourceFile = sourceFileGetter(moduleId);
if (!sourceFile) {
logErr(`While handling ${line}`);
@ -380,10 +380,10 @@ function generateDeclarationFile(ts, recipe, sourceFileGetter) {
return;
}
const importName = generateUsageImport(moduleId);
let replacer = createReplacer(m2[2]);
let typeNames = m2[3].split(/,/);
let typesToExcludeMap = {};
let typesToExcludeArr = [];
const replacer = createReplacer(m2[2]);
const typeNames = m2[3].split(/,/);
const typesToExcludeMap = {};
const typesToExcludeArr = [];
typeNames.forEach((typeName) => {
typeName = typeName.trim();
if (typeName.length === 0) {
@ -400,7 +400,7 @@ function generateDeclarationFile(ts, recipe, sourceFileGetter) {
}
else {
// node is ts.VariableStatement
let nodeText = getNodeText(sourceFile, declaration);
const nodeText = getNodeText(sourceFile, declaration);
for (let i = 0; i < typesToExcludeArr.length; i++) {
if (nodeText.indexOf(typesToExcludeArr[i]) >= 0) {
return;
@ -605,7 +605,7 @@ class TypeScriptLanguageServiceHost {
}
}
function execute() {
let r = run3(new DeclarationResolver(new FSProvider()));
const r = run3(new DeclarationResolver(new FSProvider()));
if (!r) {
throw new Error(`monaco.d.ts generation error - Cannot continue`);
}

View file

@ -10,7 +10,7 @@ const path = require("path");
const tss = require("./treeshaking");
const REPO_ROOT = path.join(__dirname, '../../');
const SRC_DIR = path.join(REPO_ROOT, 'src');
let dirCache = {};
const dirCache = {};
function writeFile(filePath, contents) {
function ensureDirs(dirPath) {
if (dirCache[dirPath]) {
@ -53,13 +53,13 @@ function extractEditor(options) {
options.typings.push(`../node_modules/@types/${type}/index.d.ts`);
});
}
let result = tss.shake(options);
for (let fileName in result) {
const result = tss.shake(options);
for (const fileName in result) {
if (result.hasOwnProperty(fileName)) {
writeFile(path.join(options.destRoot, fileName), result[fileName]);
}
}
let copied = {};
const copied = {};
const copyFile = (fileName) => {
if (copied[fileName]) {
return;
@ -72,7 +72,7 @@ function extractEditor(options) {
const writeOutputFile = (fileName, contents) => {
writeFile(path.join(options.destRoot, fileName), contents);
};
for (let fileName in result) {
for (const fileName in result) {
if (result.hasOwnProperty(fileName)) {
const fileContents = result[fileName];
const info = ts.preProcessFile(fileContents);
@ -119,7 +119,7 @@ function createESMSourcesAndResources2(options) {
const OUT_FOLDER = path.join(REPO_ROOT, options.outFolder);
const OUT_RESOURCES_FOLDER = path.join(REPO_ROOT, options.outResourcesFolder);
const getDestAbsoluteFilePath = (file) => {
let dest = options.renames[file.replace(/\\/g, '/')] || file;
const dest = options.renames[file.replace(/\\/g, '/')] || file;
if (dest === 'tsconfig.json') {
return path.join(OUT_FOLDER, `tsconfig.json`);
}
@ -193,7 +193,7 @@ function createESMSourcesAndResources2(options) {
if (dir.charAt(dir.length - 1) !== '/' || dir.charAt(dir.length - 1) !== '\\') {
dir += '/';
}
let result = [];
const result = [];
_walkDirRecursive(dir, result, dir.length);
return result;
}
@ -215,7 +215,7 @@ function createESMSourcesAndResources2(options) {
}
writeFile(absoluteFilePath, contents);
function toggleComments(fileContents) {
let lines = fileContents.split(/\r\n|\r|\n/);
const lines = fileContents.split(/\r\n|\r|\n/);
let mode = 0;
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
@ -278,14 +278,14 @@ function transportCSS(module, enqueue, write) {
let DATA = ';base64,' + fileContents.toString('base64');
if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
let newText = fileContents.toString()
const newText = fileContents.toString()
.replace(/"/g, '\'')
.replace(/</g, '%3C')
.replace(/>/g, '%3E')
.replace(/&/g, '%26')
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
let encodedData = ',' + newText;
const encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
}

View file

@ -32,7 +32,7 @@ function printDiagnostics(options, diagnostics) {
result += `${path.join(options.sourcesRoot, diag.file.fileName)}`;
}
if (diag.file && diag.start) {
let location = diag.file.getLineAndCharacterOfPosition(diag.start);
const location = diag.file.getLineAndCharacterOfPosition(diag.start);
result += `:${location.line + 1}:${location.character}`;
}
result += ` - ` + JSON.stringify(diag.messageText);
@ -150,7 +150,7 @@ function processLibFiles(ts, options) {
result[key] = sourceText;
// precess dependencies and "recurse"
const info = ts.preProcessFile(sourceText);
for (let ref of info.libReferenceDirectives) {
for (const ref of info.libReferenceDirectives) {
stack.push(ref.fileName);
}
}
@ -503,7 +503,7 @@ function markNodes(ts, languageService, options) {
}
// queue the heritage clauses
if (declaration.heritageClauses) {
for (let heritageClause of declaration.heritageClauses) {
for (const heritageClause of declaration.heritageClauses) {
enqueue_black(heritageClause);
}
}
@ -551,7 +551,7 @@ function generateResult(ts, languageService, shakeLevel) {
if (!program) {
throw new Error('Could not get program from language service');
}
let result = {};
const result = {};
const writeFile = (filePath, contents) => {
result[filePath] = contents;
};
@ -567,7 +567,7 @@ function generateResult(ts, languageService, shakeLevel) {
}
return;
}
let text = sourceFile.text;
const text = sourceFile.text;
let result = '';
function keep(node) {
result += text.substring(node.pos, node.end);
@ -597,7 +597,7 @@ function generateResult(ts, languageService, shakeLevel) {
}
}
else {
let survivingImports = [];
const survivingImports = [];
for (const importNode of node.importClause.namedBindings.elements) {
if (getColor(importNode) === 2 /* NodeColor.Black */) {
survivingImports.push(importNode.getFullText(sourceFile));
@ -626,7 +626,7 @@ function generateResult(ts, languageService, shakeLevel) {
}
if (ts.isExportDeclaration(node)) {
if (node.exportClause && node.moduleSpecifier && ts.isNamedExports(node.exportClause)) {
let survivingExports = [];
const survivingExports = [];
for (const exportSpecifier of node.exportClause.elements) {
if (getColor(exportSpecifier) === 2 /* NodeColor.Black */) {
survivingExports.push(exportSpecifier.getFullText(sourceFile));
@ -647,8 +647,8 @@ function generateResult(ts, languageService, shakeLevel) {
// keep method
continue;
}
let pos = member.pos - node.pos;
let end = member.end - node.pos;
const pos = member.pos - node.pos;
const end = member.end - node.pos;
toWrite = toWrite.substring(0, pos) + toWrite.substring(end);
}
return write(toWrite);

View file

@ -28,7 +28,14 @@ function createTypeScriptBuilder(config, projectFile, cmd) {
log(colors.cyan(topic), message);
}
}
let host = new LanguageServiceHost(cmd, projectFile, _log), service = ts.createLanguageService(host, ts.createDocumentRegistry()), lastBuildVersion = Object.create(null), lastDtsHash = Object.create(null), userWantsDeclarations = cmd.options.declaration, oldErrors = Object.create(null), headUsed = process.memoryUsage().heapUsed, emitSourceMapsInStream = true;
const host = new LanguageServiceHost(cmd, projectFile, _log);
const service = ts.createLanguageService(host, ts.createDocumentRegistry());
const lastBuildVersion = Object.create(null);
const lastDtsHash = Object.create(null);
const userWantsDeclarations = cmd.options.declaration;
let oldErrors = Object.create(null);
let headUsed = process.memoryUsage().heapUsed;
let emitSourceMapsInStream = true;
// always emit declaraction files
host.getCompilationSettings().declaration = true;
function file(file) {
@ -85,8 +92,8 @@ function createTypeScriptBuilder(config, projectFile, cmd) {
process.nextTick(function () {
if (/\.d\.ts$/.test(fileName)) {
// if it's already a d.ts file just emit it signature
let snapshot = host.getScriptSnapshot(fileName);
let signature = crypto.createHash('md5')
const snapshot = host.getScriptSnapshot(fileName);
const signature = crypto.createHash('md5')
.update(snapshot.getText(0, snapshot.getLength()))
.digest('base64');
return resolve({
@ -95,10 +102,10 @@ function createTypeScriptBuilder(config, projectFile, cmd) {
files: []
});
}
let output = service.getEmitOutput(fileName);
let files = [];
const output = service.getEmitOutput(fileName);
const files = [];
let signature;
for (let file of output.outputFiles) {
for (const file of output.outputFiles) {
if (!emitSourceMapsInStream && /\.js\.map$/.test(file.name)) {
continue;
}
@ -111,19 +118,19 @@ function createTypeScriptBuilder(config, projectFile, cmd) {
continue;
}
}
let vinyl = new Vinyl({
const vinyl = new Vinyl({
path: file.name,
contents: Buffer.from(file.text),
base: !config._emitWithoutBasePath && baseFor(host.getScriptSnapshot(fileName)) || undefined
});
if (!emitSourceMapsInStream && /\.js$/.test(file.name)) {
let sourcemapFile = output.outputFiles.filter(f => /\.js\.map$/.test(f.name))[0];
const sourcemapFile = output.outputFiles.filter(f => /\.js\.map$/.test(f.name))[0];
if (sourcemapFile) {
let extname = path.extname(vinyl.relative);
let basename = path.basename(vinyl.relative, extname);
let dirname = path.dirname(vinyl.relative);
let tsname = (dirname === '.' ? '' : dirname + '/') + basename + '.ts';
let sourceMap = JSON.parse(sourcemapFile.text);
const extname = path.extname(vinyl.relative);
const basename = path.basename(vinyl.relative, extname);
const dirname = path.dirname(vinyl.relative);
const tsname = (dirname === '.' ? '' : dirname + '/') + basename + '.ts';
const sourceMap = JSON.parse(sourcemapFile.text);
sourceMap.sources[0] = tsname.replace(/\\/g, '/');
vinyl.sourceMap = sourceMap;
}
@ -138,15 +145,15 @@ function createTypeScriptBuilder(config, projectFile, cmd) {
});
});
}
let newErrors = Object.create(null);
let t1 = Date.now();
let toBeEmitted = [];
let toBeCheckedSyntactically = [];
let toBeCheckedSemantically = [];
let filesWithChangedSignature = [];
let dependentFiles = [];
let newLastBuildVersion = new Map();
for (let fileName of host.getScriptFileNames()) {
const newErrors = Object.create(null);
const t1 = Date.now();
const toBeEmitted = [];
const toBeCheckedSyntactically = [];
const toBeCheckedSemantically = [];
const filesWithChangedSignature = [];
const dependentFiles = [];
const newLastBuildVersion = new Map();
for (const fileName of host.getScriptFileNames()) {
if (lastBuildVersion[fileName] !== host.getScriptVersion(fileName)) {
toBeEmitted.push(fileName);
toBeCheckedSyntactically.push(fileName);
@ -154,8 +161,8 @@ function createTypeScriptBuilder(config, projectFile, cmd) {
}
}
return new Promise(resolve => {
let semanticCheckInfo = new Map();
let seenAsDependentFile = new Set();
const semanticCheckInfo = new Map();
const seenAsDependentFile = new Set();
function workOnNext() {
let promise;
// let fileName: string;
@ -168,9 +175,9 @@ function createTypeScriptBuilder(config, projectFile, cmd) {
}
// (1st) emit code
else if (toBeEmitted.length) {
let fileName = toBeEmitted.pop();
const fileName = toBeEmitted.pop();
promise = emitSoon(fileName).then(value => {
for (let file of value.files) {
for (const file of value.files) {
_log('[emit code]', file.path);
out(file);
}
@ -189,7 +196,7 @@ function createTypeScriptBuilder(config, projectFile, cmd) {
}
// (2nd) check syntax
else if (toBeCheckedSyntactically.length) {
let fileName = toBeCheckedSyntactically.pop();
const fileName = toBeCheckedSyntactically.pop();
_log('[check syntax]', fileName);
promise = checkSyntaxSoon(fileName).then(diagnostics => {
delete oldErrors[fileName];
@ -224,7 +231,7 @@ function createTypeScriptBuilder(config, projectFile, cmd) {
// (4th) check dependents
else if (filesWithChangedSignature.length) {
while (filesWithChangedSignature.length) {
let fileName = filesWithChangedSignature.pop();
const fileName = filesWithChangedSignature.pop();
if (!isExternalModule(service.getProgram().getSourceFile(fileName))) {
_log('[check semantics*]', fileName + ' is an internal module and it has changed shape -> check whatever hasn\'t been checked yet');
toBeCheckedSemantically.push(...host.getScriptFileNames());
@ -243,7 +250,7 @@ function createTypeScriptBuilder(config, projectFile, cmd) {
}
if (fileName) {
seenAsDependentFile.add(fileName);
let value = semanticCheckInfo.get(fileName);
const value = semanticCheckInfo.get(fileName);
if (value === 0) {
// already validated successfully -> look at dependents next
host.collectDependents(fileName, dependentFiles);
@ -400,7 +407,7 @@ class LanguageServiceHost {
}
if (!old || old.getVersion() !== snapshot.getVersion()) {
this._dependenciesRecomputeList.push(filename);
let node = this._dependencies.lookup(filename);
const node = this._dependencies.lookup(filename);
if (node) {
node.outgoing = Object.create(null);
}
@ -479,7 +486,7 @@ class LanguageServiceHost {
}
}
if (!found) {
for (let key in this._fileNameToDeclaredModule) {
for (const key in this._fileNameToDeclaredModule) {
if (this._fileNameToDeclaredModule[key] && ~this._fileNameToDeclaredModule[key].indexOf(ref.fileName)) {
this._dependencies.inertEdge(filename, key);
}

View file

@ -60,7 +60,7 @@ function create(projectPath, existingOptions, verbose = false, onError = _defaul
const result = (token) => createStream(token);
result.src = (opts) => {
let _pos = 0;
let _fileNames = cmdLine.fileNames.slice(0);
const _fileNames = cmdLine.fileNames.slice(0);
return new class extends stream_1.Readable {
constructor() {
super({ objectMode: true });

View file

@ -30,7 +30,7 @@ var collections;
}
collections.lookupOrInsert = lookupOrInsert;
function forEach(collection, callback) {
for (let key in collection) {
for (const key in collection) {
if (hasOwnProperty.call(collection, key)) {
callback({
key: key,

View file

@ -240,7 +240,7 @@ function _rreaddir(dirPath, prepend, result) {
}
}
function rreddir(dirPath) {
let result = [];
const result = [];
_rreaddir(dirPath, '', result);
return result;
}
@ -344,7 +344,7 @@ function createExternalLoaderConfig(webEndpoint, commit, quality) {
return undefined;
}
webEndpoint = webEndpoint + `/${quality}/${commit}`;
let nodePaths = acquireWebNodePaths();
const nodePaths = acquireWebNodePaths();
Object.keys(nodePaths).map(function (key, _) {
nodePaths[key] = `${webEndpoint}/node_modules/${key}/${nodePaths[key]}`;
});