Merge remote-tracking branch 'origin' into electron-18.x.y

This commit is contained in:
deepak1556 2022-06-10 18:29:22 +09:00
commit c3077080ec
1091 changed files with 11805 additions and 10397 deletions

View file

@ -14,6 +14,12 @@
"constructor-super": "warn",
"curly": "warn",
"eqeqeq": "warn",
"prefer-const": [
"warn",
{
"destructuring": "all"
}
],
"no-buffer-constructor": "warn",
"no-caller": "warn",
"no-case-declarations": "warn",

View file

@ -21,5 +21,6 @@ jobs:
with:
token: ${{secrets.VSCODE_ISSUE_TRIAGE_BOT_PAT}}
slack_token: ${{ secrets.SLACK_TOKEN }}
slack_user_token: ${{ secrets.SLACK_USER_TOKEN }}
slack_bot_name: "VSCodeBot"
notification_channel: codereview

7
.vscode/launch.json vendored
View file

@ -16,11 +16,7 @@
"request": "attach",
"restart": true,
"name": "Attach to Extension Host",
// set to a large number: if there is an issue we're debugging that keeps
// the extension host from coming up, or the renderer is paused/crashes
// before it happens, developers will get an annoying alert, e.g. #126826.
// This can be set to 0 in 1.59.
"timeout": 999999999,
"timeout": 0,
"port": 5870,
"outFiles": [
"${workspaceFolder}/out/**/*.js",
@ -244,6 +240,7 @@
"runtimeArgs": [
"--inspect=5875",
"--no-cached-data",
"--crash-reporter-directory=${workspaceFolder}/.profile-oss/crashes",
// for general runtime freezes: https://github.com/microsoft/vscode/issues/127861#issuecomment-904144910
"--disable-features=CalculateNativeWinOcclusion",
],

View file

@ -7,7 +7,7 @@
{
"kind": 2,
"language": "github-issues",
"value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"May 2022\""
"value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"June 2022\""
},
{
"kind": 1,

View file

@ -7,7 +7,7 @@
{
"kind": 2,
"language": "github-issues",
"value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-dev repo:microsoft/vscode-unpkg repo:microsoft/vscode-references-view repo:microsoft/vscode-anycode repo:microsoft/vscode-hexeditor repo:microsoft/vscode-extension-telemetry repo:microsoft/vscode-livepreview repo:microsoft/vscode-remotehub repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-remote-repositories-github repo:microsoft/monaco-editor repo:microsoft/vscode-vsce\n\n// current milestone name\n$milestone=milestone:\"May 2022\""
"value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-dev repo:microsoft/vscode-unpkg repo:microsoft/vscode-references-view repo:microsoft/vscode-anycode repo:microsoft/vscode-hexeditor repo:microsoft/vscode-extension-telemetry repo:microsoft/vscode-livepreview repo:microsoft/vscode-remotehub repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-remote-repositories-github repo:microsoft/monaco-editor repo:microsoft/vscode-vsce\n\n// current milestone name\n$milestone=milestone:\"June 2022\""
},
{
"kind": 1,

View file

@ -14,7 +14,7 @@ shasum.update(fs.readFileSync(path.join(ROOT, 'build/.cachesalt')));
shasum.update(fs.readFileSync(path.join(ROOT, '.yarnrc')));
shasum.update(fs.readFileSync(path.join(ROOT, 'remote/.yarnrc')));
// Add `package.json` and `yarn.lock` files
for (let dir of dirs) {
for (const dir of dirs) {
const packageJsonPath = path.join(ROOT, dir, 'package.json');
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath).toString());
const relevantPackageJsonSections = {

View file

@ -19,7 +19,7 @@ shasum.update(fs.readFileSync(path.join(ROOT, '.yarnrc')));
shasum.update(fs.readFileSync(path.join(ROOT, 'remote/.yarnrc')));
// Add `package.json` and `yarn.lock` files
for (let dir of dirs) {
for (const dir of dirs) {
const packageJsonPath = path.join(ROOT, dir, 'package.json');
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath).toString());
const relevantPackageJsonSections = {

View file

@ -1,4 +1,7 @@
steps:
- checkout: self
fetchDepth: 1
- task: NodeTool@0
inputs:
versionSpec: "16.x"
@ -8,39 +11,21 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: "github-distro-mixin-password,ESRP-PKI,esrp-aad-username,esrp-aad-password"
SecretsFilter: "ESRP-PKI,esrp-aad-username,esrp-aad-password"
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
- script: |
set -e
git fetch https://github.com/$(VSCODE_MIXIN_REPO).git $VSCODE_DISTRO_REF
echo "##vso[task.setvariable variable=VSCODE_DISTRO_COMMIT;]$(git rev-parse FETCH_HEAD)"
git checkout FETCH_HEAD
condition: and(succeeded(), ne(variables.VSCODE_DISTRO_REF, ' '))
displayName: Checkout override commit
- script: |
set -e
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
displayName: Merge distro
- task: Cache@2
inputs:
key: "buildNodeModules | $(Agent.OS) | $(VSCODE_ARCH) | build/yarn.lock"
path: build/node_modules
cacheHitVar: BUILD_NODE_MODULES_RESTORED
displayName: Restore build node_modules cache
- script: |
set -e
npx https://aka.ms/enablesecurefeed standAlone
timeoutInMinutes: 5
retryCountOnTaskFailure: 3
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'), ne(variables.BUILD_NODE_MODULES_RESTORED, 'true'))
displayName: Switch to Terrapin packages
- script: |
@ -54,6 +39,7 @@ steps:
echo "Yarn failed $i, trying again..."
done
displayName: Install build dependencies
condition: and(succeeded(), ne(variables.BUILD_NODE_MODULES_RESTORED, 'true'))
- download: current
artifact: unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive

View file

@ -158,7 +158,6 @@ steps:
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn npm-run-all -lp "electron $(VSCODE_ARCH)" "playwright-install"
displayName: Download Electron and Playwright
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}:
# Setting hardened entitlements is a requirement for:
@ -216,7 +215,7 @@ steps:
compile-extension:vscode-notebook-tests \
compile-extension:vscode-test-resolver
displayName: Build integration tests
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
- ${{ if eq(parameters.VSCODE_RUN_INTEGRATION_TESTS, true) }}:
- script: |
@ -319,7 +318,7 @@ steps:
targetPath: .build/logs
displayName: "Publish Log Files"
continueOnError: true
condition: failed()
condition: succeededOrFailed()
- task: PublishTestResults@2
displayName: Publish Tests Results

View file

@ -387,7 +387,7 @@ steps:
targetPath: .build/logs
displayName: "Publish Log Files"
continueOnError: true
condition: and(failed(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
condition: and(succeededOrFailed(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- task: PublishTestResults@2
displayName: Publish Tests Results

View file

@ -335,14 +335,15 @@ stages:
BUILDSECMON_OPT_IN: true
jobs:
- ${{ if eq(parameters.VSCODE_BUILD_MACOS, true) }}:
- job: macOSTest
timeoutInMinutes: 90
variables:
VSCODE_ARCH: x64
steps:
- template: darwin/product-build-darwin-test.yml
parameters:
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
- ${{ if eq(parameters.VSCODE_STEP_ON_IT, false) }}:
- job: macOSTest
timeoutInMinutes: 90
variables:
VSCODE_ARCH: x64
steps:
- template: darwin/product-build-darwin-test.yml
parameters:
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
- ${{ if eq(variables['VSCODE_CIBUILD'], false) }}:
- job: macOS

View file

@ -19,7 +19,6 @@ function main() {
fileName: 'combined.nls.metadata.json',
jsonSpace: '',
edit: (parsedJson, file) => {
let key;
if (file.base === 'out-vscode-web-min') {
return { vscode: parsedJson };
}
@ -63,7 +62,7 @@ function main() {
break;
}
}
key = 'vscode.' + file.relative.split('/')[0];
const key = 'vscode.' + file.relative.split('/')[0];
return { [key]: parsedJson };
},
}))

View file

@ -34,7 +34,6 @@ function main(): Promise<void> {
fileName: 'combined.nls.metadata.json',
jsonSpace: '',
edit: (parsedJson, file) => {
let key;
if (file.base === 'out-vscode-web-min') {
return { vscode: parsedJson };
}
@ -82,7 +81,7 @@ function main(): Promise<void> {
break;
}
}
key = 'vscode.' + file.relative.split('/')[0];
const key = 'vscode.' + file.relative.split('/')[0];
return { [key]: parsedJson };
},
}))
@ -113,4 +112,3 @@ main().catch(err => {
console.error(err);
process.exit(1);
});

View file

@ -339,7 +339,7 @@ steps:
targetPath: .build\logs
displayName: "Publish Log Files"
continueOnError: true
condition: and(failed(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
condition: and(succeededOrFailed(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- task: PublishTestResults@2
displayName: Publish Tests Results

View file

@ -39,7 +39,7 @@ async function main() {
outAppPath,
force: true
});
let productJson = await fs.readJson(productJsonPath);
const productJson = await fs.readJson(productJsonPath);
Object.assign(productJson, {
darwinUniversalAssetId: 'darwin-universal'
});

View file

@ -45,7 +45,7 @@ async function main() {
force: true
});
let productJson = await fs.readJson(productJsonPath);
const productJson = await fs.readJson(productJsonPath);
Object.assign(productJson, {
darwinUniversalAssetId: 'darwin-universal'
});

View file

@ -17,14 +17,14 @@ const compilation = require('./lib/compilation');
const monacoapi = require('./lib/monaco-api');
const fs = require('fs');
let root = path.dirname(__dirname);
let sha1 = util.getVersion(root);
let semver = require('./monaco/package.json').version;
let headerVersion = semver + '(' + sha1 + ')';
const root = path.dirname(__dirname);
const sha1 = util.getVersion(root);
const semver = require('./monaco/package.json').version;
const headerVersion = semver + '(' + sha1 + ')';
// Build
let editorEntryPoints = [
const editorEntryPoints = [
{
name: 'vs/editor/editor.main',
include: [],
@ -40,11 +40,11 @@ let editorEntryPoints = [
}
];
let editorResources = [
const editorResources = [
'out-editor-build/vs/base/browser/ui/codicons/**/*.ttf'
];
let BUNDLED_FILE_HEADER = [
const BUNDLED_FILE_HEADER = [
'/*!-----------------------------------------------------------',
' * Copyright (c) Microsoft Corporation. All rights reserved.',
' * Version: ' + headerVersion,
@ -224,7 +224,7 @@ const appendJSToESMImportsTask = task.define('append-js-to-esm-imports', () => {
result.push(line);
continue;
}
let modifiedLine = (
const modifiedLine = (
line
.replace(/^import(.*)\'([^']+)\'/, `import$1'$2.js'`)
.replace(/^export \* from \'([^']+)\'/, `export * from '$1.js'`)
@ -239,10 +239,10 @@ const appendJSToESMImportsTask = task.define('append-js-to-esm-imports', () => {
* @param {string} contents
*/
function toExternalDTS(contents) {
let lines = contents.split(/\r\n|\r|\n/);
const lines = contents.split(/\r\n|\r|\n/);
let killNextCloseCurlyBrace = false;
for (let i = 0; i < lines.length; i++) {
let line = lines[i];
const line = lines[i];
if (killNextCloseCurlyBrace) {
if ('}' === line) {
@ -316,7 +316,7 @@ const finalEditorResourcesTask = task.define('final-editor-resources', () => {
// package.json
gulp.src('build/monaco/package.json')
.pipe(es.through(function (data) {
let json = JSON.parse(data.contents.toString());
const json = JSON.parse(data.contents.toString());
json.private = false;
data.contents = Buffer.from(JSON.stringify(json, null, ' '));
this.emit('data', data);
@ -360,10 +360,10 @@ const finalEditorResourcesTask = task.define('final-editor-resources', () => {
return;
}
let relativePathToMap = path.relative(path.join(data.relative), path.join('min-maps', data.relative + '.map'));
const relativePathToMap = path.relative(path.join(data.relative), path.join('min-maps', data.relative + '.map'));
let strContents = data.contents.toString();
let newStr = '//# sourceMappingURL=' + relativePathToMap.replace(/\\/g, '/');
const newStr = '//# sourceMappingURL=' + relativePathToMap.replace(/\\/g, '/');
strContents = strContents.replace(/\/\/# sourceMappingURL=[^ ]+$/, newStr);
data.contents = Buffer.from(strContents);
@ -483,13 +483,13 @@ function createTscCompileTask(watch) {
cwd: path.join(__dirname, '..'),
// stdio: [null, 'pipe', 'inherit']
});
let errors = [];
let reporter = createReporter('monaco');
const errors = [];
const reporter = createReporter('monaco');
/** @type {NodeJS.ReadWriteStream | undefined} */
let report;
// eslint-disable-next-line no-control-regex
let magic = /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g; // https://stackoverflow.com/questions/25245716/remove-all-ansi-colors-styles-from-strings
const magic = /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g; // https://stackoverflow.com/questions/25245716/remove-all-ansi-colors-styles-from-strings
child.stdout.on('data', data => {
let str = String(data);
@ -502,12 +502,12 @@ function createTscCompileTask(watch) {
report.end();
} else if (str) {
let match = /(.*\(\d+,\d+\): )(.*: )(.*)/.exec(str);
const match = /(.*\(\d+,\d+\): )(.*: )(.*)/.exec(str);
if (match) {
// trying to massage the message so that it matches the gulp-tsb error messages
// e.g. src/vs/base/common/strings.ts(663,5): error TS2322: Type '1234' is not assignable to type 'string'.
let fullpath = path.join(root, match[1]);
let message = match[3];
const fullpath = path.join(root, match[1]);
const message = match[3];
reporter(fullpath + message);
} else {
reporter(str);

View file

@ -91,7 +91,7 @@ const tasks = compilations.map(function (tsconfigFile) {
const baseUrl = getBaseUrl(out);
let headerId, headerOut;
let index = relativeDirname.indexOf('/');
const index = relativeDirname.indexOf('/');
if (index < 0) {
headerId = 'vscode.' + relativeDirname;
headerOut = 'out';
@ -110,7 +110,7 @@ const tasks = compilations.map(function (tsconfigFile) {
overrideOptions.inlineSources = Boolean(build);
overrideOptions.base = path.dirname(absolutePath);
const compilation = tsb.create(absolutePath, overrideOptions, false, err => reporter(err.toString()));
const compilation = tsb.create(absolutePath, overrideOptions, { verbose: false }, err => reporter(err.toString()));
const pipeline = function () {
const input = es.through();

View file

@ -16,7 +16,7 @@ function checkPackageJSON(actualPath) {
const actual = require(path.join(__dirname, '..', actualPath));
const rootPackageJSON = require('../package.json');
const checkIncluded = (set1, set2) => {
for (let depName in set1) {
for (const depName in set1) {
const depVersion = set1[depName];
const rootDepVersion = set2[depName];
if (!rootDepVersion) {

View file

@ -281,7 +281,7 @@ function packageTask(type, platform, arch, sourceFolderName, destinationFolderNa
].map(resource => gulp.src(resource, { base: '.' }).pipe(rename(resource)));
}
let all = es.merge(
const all = es.merge(
packageJsonStream,
productJsonStream,
license,

View file

@ -122,9 +122,9 @@ gulp.task(core);
* @return {Object} A map of paths to checksums.
*/
function computeChecksums(out, filenames) {
let result = {};
const result = {};
filenames.forEach(function (filename) {
let fullPath = path.join(process.cwd(), out, filename);
const fullPath = path.join(process.cwd(), out, filename);
result[filename] = computeChecksum(fullPath);
});
return result;
@ -137,9 +137,9 @@ function computeChecksums(out, filenames) {
* @return {string} The checksum for `filename`.
*/
function computeChecksum(filename) {
let contents = fs.readFileSync(filename);
const contents = fs.readFileSync(filename);
let hash = crypto
const hash = crypto
.createHash('md5')
.update(contents)
.digest('base64')
@ -453,20 +453,20 @@ gulp.task(task.define(
gulp.task('vscode-translations-pull', function () {
return es.merge([...i18n.defaultLanguages, ...i18n.extraLanguages].map(language => {
let includeDefault = !!innoSetupConfig[language.id].defaultInfo;
const includeDefault = !!innoSetupConfig[language.id].defaultInfo;
return i18n.pullSetupXlfFiles(apiHostname, apiName, apiToken, language, includeDefault).pipe(vfs.dest(`../vscode-translations-import/${language.id}/setup`));
}));
});
gulp.task('vscode-translations-import', function () {
let options = minimist(process.argv.slice(2), {
const options = minimist(process.argv.slice(2), {
string: 'location',
default: {
location: '../vscode-translations-import'
}
});
return es.merge([...i18n.defaultLanguages, ...i18n.extraLanguages].map(language => {
let id = language.id;
const id = language.id;
return gulp.src(`${options.location}/${id}/vscode-setup/messages.xlf`)
.pipe(i18n.prepareIslFiles(language, innoSetupConfig[language.id]))
.pipe(vfs.dest(`./build/win32/i18n`));

View file

@ -208,7 +208,7 @@ function packageTask(sourceFolderName, destinationFolderName) {
gulp.src('resources/server/code-512.png', { base: 'resources/server' })
);
let all = es.merge(
const all = es.merge(
packageJsonStream,
license,
sources,
@ -218,7 +218,7 @@ function packageTask(sourceFolderName, destinationFolderName) {
pwaicons
);
let result = all
const result = all
.pipe(util.skipDirectories())
.pipe(util.fixWin32DirectoryPermissions());

View file

@ -116,8 +116,8 @@ function hygiene(some, linting = true) {
})
.then(
(result) => {
let original = result.src.replace(/\r\n/gm, '\n');
let formatted = result.dest.replace(/\r\n/gm, '\n');
const original = result.src.replace(/\r\n/gm, '\n');
const formatted = result.dest.replace(/\r\n/gm, '\n');
if (original !== formatted) {
console.error(

View file

@ -81,7 +81,7 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
out.push(file.contents);
}
}, function () {
let finish = () => {
const finish = () => {
{
const headerPickle = pickle.createEmpty();
headerPickle.writeString(JSON.stringify(filesystem.header));

View file

@ -98,7 +98,7 @@ export function createAsar(folderPath: string, unpackGlobs: string[], destFilena
}
}, function () {
let finish = () => {
const finish = () => {
{
const headerPickle = pickle.createEmpty();
headerPickle.writeString(JSON.stringify(filesystem.header));

View file

@ -102,7 +102,7 @@ function getBuiltInExtensions() {
const control = readControlFile();
const streams = [];
for (const extension of [...builtInExtensions, ...webBuiltInExtensions]) {
let controlState = control[extension.name] || 'marketplace';
const controlState = control[extension.name] || 'marketplace';
control[extension.name] = controlState;
streams.push(syncExtension(extension, controlState));
}

View file

@ -143,7 +143,7 @@ export function getBuiltInExtensions(): Promise<void> {
const streams: Stream[] = [];
for (const extension of [...builtInExtensions, ...webBuiltInExtensions]) {
let controlState = control[extension.name] || 'marketplace';
const controlState = control[extension.name] || 'marketplace';
control[extension.name] = controlState;
streams.push(syncExtension(extension, controlState));

View file

@ -22,7 +22,7 @@ const watch = require('./watch');
const reporter = (0, reporter_1.createReporter)();
function getTypeScriptCompilerOptions(src) {
const rootDir = path.join(__dirname, `../../${src}`);
let options = {};
const options = {};
options.verbose = false;
options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
@ -42,7 +42,7 @@ function createCompile(src, build, emitError) {
if (!build) {
overrideOptions.inlineSourceMap = true;
}
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));
const compilation = tsb.create(projectPath, overrideOptions, { verbose: false }, err => reporter(err));
function pipeline(token) {
const bom = require('gulp-bom');
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
@ -80,7 +80,7 @@ function compileTask(src, out, build) {
}
const compile = createCompile(src, build, true);
const srcPipe = gulp.src(`${src}/**`, { base: `${src}` });
let generator = new MonacoGenerator(false);
const generator = new MonacoGenerator(false);
if (src === 'src') {
generator.execute();
}
@ -96,7 +96,7 @@ function watchTask(out, build) {
const compile = createCompile('src', build);
const src = gulp.src('src/**', { base: 'src' });
const watchSrc = watch('src/**', { base: 'src', readDelay: 200 });
let generator = new MonacoGenerator(true);
const generator = new MonacoGenerator(true);
generator.execute();
return watchSrc
.pipe(generator.stream)
@ -112,7 +112,7 @@ class MonacoGenerator {
this._isWatch = isWatch;
this.stream = es.through();
this._watchedFiles = {};
let onWillReadFile = (moduleId, filePath) => {
const onWillReadFile = (moduleId, filePath) => {
if (!this._isWatch) {
return;
}
@ -149,7 +149,7 @@ class MonacoGenerator {
}, 20);
}
_run() {
let r = monacodts.run3(this._declarationResolver);
const r = monacodts.run3(this._declarationResolver);
if (!r && !this._isWatch) {
// The build must always be able to generate the monaco.d.ts
throw new Error(`monaco.d.ts generation error - Cannot continue`);

View file

@ -26,7 +26,7 @@ const reporter = createReporter();
function getTypeScriptCompilerOptions(src: string): ts.CompilerOptions {
const rootDir = path.join(__dirname, `../../${src}`);
let options: ts.CompilerOptions = {};
const options: ts.CompilerOptions = {};
options.verbose = false;
options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
@ -50,7 +50,7 @@ function createCompile(src: string, build: boolean, emitError?: boolean) {
overrideOptions.inlineSourceMap = true;
}
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));
const compilation = tsb.create(projectPath, overrideOptions, { verbose: false }, err => reporter(err));
function pipeline(token?: util.ICancellationToken) {
const bom = require('gulp-bom') as typeof import('gulp-bom');
@ -96,7 +96,7 @@ export function compileTask(src: string, out: string, build: boolean): () => Nod
const compile = createCompile(src, build, true);
const srcPipe = gulp.src(`${src}/**`, { base: `${src}` });
let generator = new MonacoGenerator(false);
const generator = new MonacoGenerator(false);
if (src === 'src') {
generator.execute();
}
@ -116,7 +116,7 @@ export function watchTask(out: string, build: boolean): () => NodeJS.ReadWriteSt
const src = gulp.src('src/**', { base: 'src' });
const watchSrc = watch('src/**', { base: 'src', readDelay: 200 });
let generator = new MonacoGenerator(true);
const generator = new MonacoGenerator(true);
generator.execute();
return watchSrc
@ -140,7 +140,7 @@ class MonacoGenerator {
this._isWatch = isWatch;
this.stream = es.through();
this._watchedFiles = {};
let onWillReadFile = (moduleId: string, filePath: string) => {
const onWillReadFile = (moduleId: string, filePath: string) => {
if (!this._isWatch) {
return;
}
@ -182,7 +182,7 @@ class MonacoGenerator {
}
private _run(): monacodts.IMonacoDeclarationResult | null {
let r = monacodts.run3(this._declarationResolver);
const r = monacodts.run3(this._declarationResolver);
if (!r && !this._isWatch) {
// The build must always be able to generate the monaco.d.ts
throw new Error(`monaco.d.ts generation error - Cannot continue`);

View file

@ -41,7 +41,7 @@ module.exports = new (_a = class NoUnexternalizedStrings {
key = keyNode.value;
}
else if (keyNode.type === experimental_utils_1.AST_NODE_TYPES.ObjectExpression) {
for (let property of keyNode.properties) {
for (const property of keyNode.properties) {
if (property.type === experimental_utils_1.AST_NODE_TYPES.Property && !property.computed) {
if (property.key.type === experimental_utils_1.AST_NODE_TYPES.Identifier && property.key.name === 'key') {
if (isStringLiteral(property.value)) {
@ -83,7 +83,7 @@ module.exports = new (_a = class NoUnexternalizedStrings {
// (2)
// report all invalid NLS keys
if (!key.match(NoUnexternalizedStrings._rNlsKeys)) {
for (let value of values) {
for (const value of values) {
context.report({ loc: value.call.loc, messageId: 'badKey', data: { key } });
}
}

View file

@ -51,7 +51,7 @@ export = new class NoUnexternalizedStrings implements eslint.Rule.RuleModule {
key = keyNode.value;
} else if (keyNode.type === AST_NODE_TYPES.ObjectExpression) {
for (let property of keyNode.properties) {
for (const property of keyNode.properties) {
if (property.type === AST_NODE_TYPES.Property && !property.computed) {
if (property.key.type === AST_NODE_TYPES.Identifier && property.key.name === 'key') {
if (isStringLiteral(property.value)) {
@ -97,7 +97,7 @@ export = new class NoUnexternalizedStrings implements eslint.Rule.RuleModule {
// (2)
// report all invalid NLS keys
if (!key.match(NoUnexternalizedStrings._rNlsKeys)) {
for (let value of values) {
for (const value of values) {
context.report({ loc: value.call.loc, messageId: 'badKey', data: { key } });
}
}

View file

@ -16,7 +16,7 @@ module.exports = new class ApiProviderNaming {
return {
['TSInterfaceDeclaration[id.name=/.+Provider/] TSMethodSignature[key.name=/^(provide|resolve).+/]']: (node) => {
let found = false;
for (let param of node.params) {
for (const param of node.params) {
if (param.type === experimental_utils_1.AST_NODE_TYPES.Identifier) {
found = found || param.name === 'token';
}

View file

@ -20,7 +20,7 @@ export = new class ApiProviderNaming implements eslint.Rule.RuleModule {
['TSInterfaceDeclaration[id.name=/.+Provider/] TSMethodSignature[key.name=/^(provide|resolve).+/]']: (node: any) => {
let found = false;
for (let param of (<TSESTree.TSMethodSignature>node).params) {
for (const param of (<TSESTree.TSMethodSignature>node).params) {
if (param.type === AST_NODE_TYPES.Identifier) {
found = found || param.name === 'token';
}

View file

@ -345,7 +345,7 @@ function scanBuiltinExtensions(extensionsRoot, exclude = []) {
if (!fs.existsSync(packageJSONPath)) {
continue;
}
let packageJSON = JSON.parse(fs.readFileSync(packageJSONPath).toString('utf8'));
const packageJSON = JSON.parse(fs.readFileSync(packageJSONPath).toString('utf8'));
if (!isWebExtension(packageJSON)) {
continue;
}
@ -373,7 +373,7 @@ function translatePackageJSON(packageJSON, packageNLSPath) {
const CharCode_PC = '%'.charCodeAt(0);
const packageNls = JSON.parse(fs.readFileSync(packageNLSPath).toString());
const translate = (obj) => {
for (let key in obj) {
for (const key in obj) {
const val = obj[key];
if (Array.isArray(val)) {
val.forEach(translate);

View file

@ -430,7 +430,7 @@ export function scanBuiltinExtensions(extensionsRoot: string, exclude: string[]
if (!fs.existsSync(packageJSONPath)) {
continue;
}
let packageJSON = JSON.parse(fs.readFileSync(packageJSONPath).toString('utf8'));
const packageJSON = JSON.parse(fs.readFileSync(packageJSONPath).toString('utf8'));
if (!isWebExtension(packageJSON)) {
continue;
}
@ -461,7 +461,7 @@ export function translatePackageJSON(packageJSON: string, packageNLSPath: string
const CharCode_PC = '%'.charCodeAt(0);
const packageNls: NLSFormat = JSON.parse(fs.readFileSync(packageNLSPath).toString());
const translate = (obj: any) => {
for (let key in obj) {
for (const key in obj) {
const val = obj[key];
if (Array.isArray(val)) {
val.forEach(translate);
@ -500,7 +500,7 @@ export async function webpackExtensions(taskName: string, isWatch: boolean, webp
function addConfig(configOrFn: webpack.Configuration | Function) {
let config;
if (typeof configOrFn === 'function') {
config = configOrFn({}, {});
config = (configOrFn as Function)({}, {});
webpackConfigs.push(config);
} else {
config = configOrFn;

View file

@ -45,7 +45,7 @@ function getVersion(repo) {
}
const refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
let refsMatch;
let refs = {};
const refs = {};
while (refsMatch = refsRegex.exec(refsRaw)) {
refs[refsMatch[2]] = refsMatch[1];
}

View file

@ -51,7 +51,7 @@ export function getVersion(repo: string): string | undefined {
const refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
let refsMatch: RegExpExecArray | null;
let refs: { [ref: string]: string } = {};
const refs: { [ref: string]: string } = {};
while (refsMatch = refsRegex.exec(refsRaw)) {
refs[refsMatch[2]] = refsMatch[1];

View file

@ -46,7 +46,7 @@ exports.externalExtensionsWithTranslations = {
var LocalizeInfo;
(function (LocalizeInfo) {
function is(value) {
let candidate = value;
const candidate = value;
return Is.defined(candidate) && Is.string(candidate.key) && (Is.undef(candidate.comment) || (Is.array(candidate.comment) && candidate.comment.every(element => Is.string(element))));
}
LocalizeInfo.is = is;
@ -57,8 +57,8 @@ var BundledFormat;
if (Is.undef(value)) {
return false;
}
let candidate = value;
let length = Object.keys(value).length;
const candidate = value;
const length = Object.keys(value).length;
return length === 3 && Is.defined(candidate.keys) && Is.defined(candidate.messages) && Is.defined(candidate.bundles);
}
BundledFormat.is = is;
@ -70,7 +70,7 @@ var PackageJsonFormat;
return false;
}
return Object.keys(value).every(key => {
let element = value[key];
const element = value[key];
return Is.string(element) || (Is.object(element) && Is.defined(element.message) && Is.defined(element.comment));
});
}
@ -133,9 +133,9 @@ class XLF {
}
this.numberOfMessages += keys.length;
this.files[original] = [];
let existingKeys = new Set();
const existingKeys = new Set();
for (let i = 0; i < keys.length; i++) {
let key = keys[i];
const key = keys[i];
let realKey;
let comment;
if (Is.string(key)) {
@ -152,7 +152,7 @@ class XLF {
continue;
}
existingKeys.add(realKey);
let message = encodeEntities(messages[i]);
const message = encodeEntities(messages[i]);
this.files[original].push({ id: realKey, message: message, comment: comment });
}
}
@ -178,7 +178,7 @@ class XLF {
this.appendNewLine('</xliff>', 0);
}
appendNewLine(content, indent) {
let line = new Line(indent);
const line = new Line(indent);
line.append(content);
this.buffer.push(line.toString());
}
@ -186,8 +186,8 @@ class XLF {
exports.XLF = XLF;
XLF.parsePseudo = function (xlfString) {
return new Promise((resolve) => {
let parser = new xml2js.Parser();
let files = [];
const parser = new xml2js.Parser();
const files = [];
parser.parseString(xlfString, function (_err, result) {
const fileNodes = result['xliff']['file'];
fileNodes.forEach(file => {
@ -211,8 +211,8 @@ XLF.parsePseudo = function (xlfString) {
};
XLF.parse = function (xlfString) {
return new Promise((resolve, reject) => {
let parser = new xml2js.Parser();
let files = [];
const parser = new xml2js.Parser();
const files = [];
parser.parseString(xlfString, function (err, result) {
if (err) {
reject(new Error(`XLF parsing error: Failed to parse XLIFF string. ${err}`));
@ -226,7 +226,7 @@ XLF.parse = function (xlfString) {
if (!originalFilePath) {
reject(new Error(`XLF parsing error: XLIFF file node does not contain original attribute to determine the original location of the resource file.`));
}
let language = file.$['target-language'];
const language = file.$['target-language'];
if (!language) {
reject(new Error(`XLF parsing error: XLIFF file node does not contain target-language attribute to determine translated language.`));
}
@ -296,7 +296,7 @@ function stripComments(content) {
// Third group matches a multi line comment
// Forth group matches a single line comment
const regexp = /("[^"\\]*(?:\\.[^"\\]*)*")|('[^'\\]*(?:\\.[^'\\]*)*')|(\/\*[^\/\*]*(?:(?:\*|\/)[^\/\*]*)*?\*\/)|(\/{2,}.*?(?:(?:\r?\n)|$))/g;
let result = content.replace(regexp, (match, _m1, _m2, m3, m4) => {
const result = content.replace(regexp, (match, _m1, _m2, m3, m4) => {
// Only one of m1, m2, m3, m4 matches
if (m3) {
// A block comment. Replace with nothing
@ -356,20 +356,20 @@ function escapeCharacters(value) {
return result.join('');
}
function processCoreBundleFormat(fileHeader, languages, json, emitter) {
let keysSection = json.keys;
let messageSection = json.messages;
let bundleSection = json.bundles;
let statistics = Object.create(null);
let defaultMessages = Object.create(null);
let modules = Object.keys(keysSection);
const keysSection = json.keys;
const messageSection = json.messages;
const bundleSection = json.bundles;
const statistics = Object.create(null);
const defaultMessages = Object.create(null);
const modules = Object.keys(keysSection);
modules.forEach((module) => {
let keys = keysSection[module];
let messages = messageSection[module];
const keys = keysSection[module];
const messages = messageSection[module];
if (!messages || keys.length !== messages.length) {
emitter.emit('error', `Message for module ${module} corrupted. Mismatch in number of keys and messages.`);
return;
}
let messageMap = Object.create(null);
const messageMap = Object.create(null);
defaultMessages[module] = messageMap;
keys.map((key, i) => {
if (typeof key === 'string') {
@ -380,27 +380,27 @@ function processCoreBundleFormat(fileHeader, languages, json, emitter) {
}
});
});
let languageDirectory = path.join(__dirname, '..', '..', '..', 'vscode-loc', 'i18n');
const languageDirectory = path.join(__dirname, '..', '..', '..', 'vscode-loc', 'i18n');
if (!fs.existsSync(languageDirectory)) {
log(`No VS Code localization repository found. Looking at ${languageDirectory}`);
log(`To bundle translations please check out the vscode-loc repository as a sibling of the vscode repository.`);
}
let sortedLanguages = sortLanguages(languages);
const sortedLanguages = sortLanguages(languages);
sortedLanguages.forEach((language) => {
if (process.env['VSCODE_BUILD_VERBOSE']) {
log(`Generating nls bundles for: ${language.id}`);
}
statistics[language.id] = 0;
let localizedModules = Object.create(null);
let languageFolderName = language.translationId || language.id;
let i18nFile = path.join(languageDirectory, `vscode-language-pack-${languageFolderName}`, 'translations', 'main.i18n.json');
const localizedModules = Object.create(null);
const languageFolderName = language.translationId || language.id;
const i18nFile = path.join(languageDirectory, `vscode-language-pack-${languageFolderName}`, 'translations', 'main.i18n.json');
let allMessages;
if (fs.existsSync(i18nFile)) {
let content = stripComments(fs.readFileSync(i18nFile, 'utf8'));
const content = stripComments(fs.readFileSync(i18nFile, 'utf8'));
allMessages = JSON.parse(content);
}
modules.forEach((module) => {
let order = keysSection[module];
const order = keysSection[module];
let moduleMessage;
if (allMessages) {
moduleMessage = allMessages.contents[module];
@ -412,7 +412,7 @@ function processCoreBundleFormat(fileHeader, languages, json, emitter) {
moduleMessage = defaultMessages[module];
statistics[language.id] = statistics[language.id] + Object.keys(moduleMessage).length;
}
let localizedMessages = [];
const localizedMessages = [];
order.forEach((keyInfo) => {
let key = null;
if (typeof keyInfo === 'string') {
@ -434,14 +434,14 @@ function processCoreBundleFormat(fileHeader, languages, json, emitter) {
localizedModules[module] = localizedMessages;
});
Object.keys(bundleSection).forEach((bundle) => {
let modules = bundleSection[bundle];
let contents = [
const modules = bundleSection[bundle];
const contents = [
fileHeader,
`define("${bundle}.nls.${language.id}", {`
];
modules.forEach((module, index) => {
contents.push(`\t"${module}": [`);
let messages = localizedModules[module];
const messages = localizedModules[module];
if (!messages) {
emitter.emit('error', `Didn't find messages for module ${module}.`);
return;
@ -456,11 +456,11 @@ function processCoreBundleFormat(fileHeader, languages, json, emitter) {
});
});
Object.keys(statistics).forEach(key => {
let value = statistics[key];
const value = statistics[key];
log(`${key} has ${value} untranslated strings.`);
});
sortedLanguages.forEach(language => {
let stats = statistics[language.id];
const stats = statistics[language.id];
if (Is.undef(stats)) {
log(`\tNo translations found for language ${language.id}. Using default language instead.`);
}
@ -468,7 +468,7 @@ function processCoreBundleFormat(fileHeader, languages, json, emitter) {
}
function processNlsFiles(opts) {
return (0, event_stream_1.through)(function (file) {
let fileName = path.basename(file.path);
const fileName = path.basename(file.path);
if (fileName === 'nls.metadata.json') {
let json = null;
if (file.isBuffer()) {
@ -525,7 +525,7 @@ function createXlfFilesForCoreBundle() {
if (file.isBuffer()) {
const xlfs = Object.create(null);
const json = JSON.parse(file.contents.toString('utf8'));
for (let coreModule in json.keys) {
for (const coreModule in json.keys) {
const projectResource = getResource(coreModule);
const resource = projectResource.name;
const project = projectResource.project;
@ -544,7 +544,7 @@ function createXlfFilesForCoreBundle() {
xlf.addFile(`src/${coreModule}`, keys, messages);
}
}
for (let resource in xlfs) {
for (const resource in xlfs) {
const xlf = xlfs[resource];
const filePath = `${xlf.project}/${resource.replace(/\//g, '_')}.xlf`;
const xlfFile = new File({
@ -576,7 +576,7 @@ function createXlfFilesForExtensions() {
if (!stat.isDirectory()) {
return;
}
let extensionName = path.basename(extensionFolder.path);
const extensionName = path.basename(extensionFolder.path);
if (extensionName === 'node_modules') {
return;
}
@ -612,7 +612,7 @@ function createXlfFilesForExtensions() {
else if (basename === 'nls.metadata.json') {
const json = JSON.parse(buffer.toString('utf8'));
const relPath = path.relative(`.build/extensions/${extensionName}`, path.dirname(file.path));
for (let file in json) {
for (const file in json) {
const fileContent = json[file];
getXlf().addFile(`extensions/${extensionName}/${relPath}/${file}`, fileContent.keys, fileContent.messages);
}
@ -624,7 +624,7 @@ function createXlfFilesForExtensions() {
}
}, function () {
if (_xlf) {
let xlfFile = new File({
const xlfFile = new File({
path: path.join(extensionsProject, extensionName + '.xlf'),
contents: Buffer.from(_xlf.toString(), 'utf8')
});
@ -656,14 +656,14 @@ function createXlfFilesForIsl() {
else {
throw new Error(`Unknown input file ${file.path}`);
}
let xlf = new XLF(projectName), keys = [], messages = [];
let model = new TextModel(file.contents.toString());
const xlf = new XLF(projectName), keys = [], messages = [];
const model = new TextModel(file.contents.toString());
let inMessageSection = false;
model.lines.forEach(line => {
if (line.length === 0) {
return;
}
let firstChar = line.charAt(0);
const firstChar = line.charAt(0);
switch (firstChar) {
case ';':
// Comment line;
@ -675,13 +675,13 @@ function createXlfFilesForIsl() {
if (!inMessageSection) {
return;
}
let sections = line.split('=');
const sections = line.split('=');
if (sections.length !== 2) {
throw new Error(`Badly formatted message found: ${line}`);
}
else {
let key = sections[0];
let value = sections[1];
const key = sections[0];
const value = sections[1];
if (key.length > 0 && value.length > 0) {
keys.push(key);
messages.push(value);
@ -698,8 +698,8 @@ function createXlfFilesForIsl() {
}
exports.createXlfFilesForIsl = createXlfFilesForIsl;
function pushXlfFiles(apiHostname, username, password) {
let tryGetPromises = [];
let updateCreatePromises = [];
const tryGetPromises = [];
const updateCreatePromises = [];
return (0, event_stream_1.through)(function (file) {
const project = path.dirname(file.relative);
const fileName = path.basename(file.path);
@ -737,11 +737,11 @@ function getAllResources(project, apiHostname, username, password) {
method: 'GET'
};
const request = https.request(options, (res) => {
let buffer = [];
const buffer = [];
res.on('data', (chunk) => buffer.push(chunk));
res.on('end', () => {
if (res.statusCode === 200) {
let json = JSON.parse(Buffer.concat(buffer).toString());
const json = JSON.parse(Buffer.concat(buffer).toString());
if (Array.isArray(json)) {
resolve(json.map(o => o.slug));
return;
@ -760,7 +760,7 @@ function getAllResources(project, apiHostname, username, password) {
});
}
function findObsoleteResources(apiHostname, username, password) {
let resourcesByProject = Object.create(null);
const resourcesByProject = Object.create(null);
resourcesByProject[extensionsProject] = [].concat(exports.externalExtensionsWithTranslations); // clone
return (0, event_stream_1.through)(function (file) {
const project = path.dirname(file.relative);
@ -774,10 +774,10 @@ function findObsoleteResources(apiHostname, username, password) {
this.push(file);
}, function () {
const json = JSON.parse(fs.readFileSync('./build/lib/i18n.resources.json', 'utf8'));
let i18Resources = [...json.editor, ...json.workbench].map((r) => r.project + '/' + r.name.replace(/\//g, '_'));
let extractedResources = [];
for (let project of [workbenchProject, editorProject]) {
for (let resource of resourcesByProject[project]) {
const i18Resources = [...json.editor, ...json.workbench].map((r) => r.project + '/' + r.name.replace(/\//g, '_'));
const extractedResources = [];
for (const project of [workbenchProject, editorProject]) {
for (const resource of resourcesByProject[project]) {
if (resource !== 'setup_messages') {
extractedResources.push(project + '/' + resource);
}
@ -787,11 +787,11 @@ function findObsoleteResources(apiHostname, username, password) {
console.log(`[i18n] Obsolete resources in file 'build/lib/i18n.resources.json': JSON.stringify(${i18Resources.filter(p => extractedResources.indexOf(p) === -1)})`);
console.log(`[i18n] Missing resources in file 'build/lib/i18n.resources.json': JSON.stringify(${extractedResources.filter(p => i18Resources.indexOf(p) === -1)})`);
}
let promises = [];
for (let project in resourcesByProject) {
const promises = [];
for (const project in resourcesByProject) {
promises.push(getAllResources(project, apiHostname, username, password).then(resources => {
let expectedResources = resourcesByProject[project];
let unusedResources = resources.filter(resource => resource && expectedResources.indexOf(resource) === -1);
const expectedResources = resourcesByProject[project];
const unusedResources = resources.filter(resource => resource && expectedResources.indexOf(resource) === -1);
if (unusedResources.length) {
console.log(`[transifex] Obsolete resources in project '${project}': ${unusedResources.join(', ')}`);
}
@ -846,7 +846,7 @@ function createResource(project, slug, xlfFile, apiHostname, credentials) {
auth: credentials,
method: 'POST'
};
let request = https.request(options, (res) => {
const request = https.request(options, (res) => {
if (res.statusCode === 201) {
log(`Resource ${project}/${slug} successfully created on Transifex.`);
}
@ -878,7 +878,7 @@ function updateResource(project, slug, xlfFile, apiHostname, credentials) {
auth: credentials,
method: 'PUT'
};
let request = https.request(options, (res) => {
const request = https.request(options, (res) => {
if (res.statusCode === 200) {
res.setEncoding('utf8');
let responseBuffer = '';
@ -903,7 +903,7 @@ function updateResource(project, slug, xlfFile, apiHostname, credentials) {
});
}
function pullSetupXlfFiles(apiHostname, username, password, language, includeDefault) {
let setupResources = [{ name: 'setup_messages', project: workbenchProject }];
const setupResources = [{ name: 'setup_messages', project: workbenchProject }];
if (includeDefault) {
setupResources.push({ name: 'setup_default', project: setupProject });
}
@ -912,7 +912,7 @@ function pullSetupXlfFiles(apiHostname, username, password, language, includeDef
exports.pullSetupXlfFiles = pullSetupXlfFiles;
function pullXlfFiles(apiHostname, username, password, language, resources) {
const credentials = `${username}:${password}`;
let expectedTranslationsCount = resources.length;
const expectedTranslationsCount = resources.length;
let translationsRetrieved = 0, called = false;
return (0, event_stream_1.readable)(function (_count, callback) {
// Mark end of stream when all resources were retrieved
@ -939,7 +939,7 @@ function retrieveResource(language, resource, apiHostname, credentials) {
return limiter.queue(() => new Promise((resolve, reject) => {
const slug = resource.name.replace(/\//g, '_');
const project = resource.project;
let transifexLanguageId = language.id === 'ps' ? 'en' : language.translationId || language.id;
const transifexLanguageId = language.id === 'ps' ? 'en' : language.translationId || language.id;
const options = {
hostname: apiHostname,
path: `/api/2/project/${project}/resource/${slug}/translation/${transifexLanguageId}?file&mode=onlyreviewed`,
@ -948,8 +948,8 @@ function retrieveResource(language, resource, apiHostname, credentials) {
method: 'GET'
};
console.log('[transifex] Fetching ' + options.path);
let request = https.request(options, (res) => {
let xlfBuffer = [];
const request = https.request(options, (res) => {
const xlfBuffer = [];
res.on('data', (chunk) => xlfBuffer.push(chunk));
res.on('end', () => {
if (res.statusCode === 200) {
@ -971,14 +971,14 @@ function retrieveResource(language, resource, apiHostname, credentials) {
}));
}
function prepareI18nFiles() {
let parsePromises = [];
const parsePromises = [];
return (0, event_stream_1.through)(function (xlf) {
let stream = this;
let parsePromise = XLF.parse(xlf.contents.toString());
const stream = this;
const parsePromise = XLF.parse(xlf.contents.toString());
parsePromises.push(parsePromise);
parsePromise.then(resolvedFiles => {
resolvedFiles.forEach(file => {
let translatedFile = createI18nFile(file.originalFilePath, file.messages);
const translatedFile = createI18nFile(file.originalFilePath, file.messages);
stream.queue(translatedFile);
});
});
@ -990,7 +990,7 @@ function prepareI18nFiles() {
}
exports.prepareI18nFiles = prepareI18nFiles;
function createI18nFile(originalFilePath, messages) {
let result = Object.create(null);
const result = Object.create(null);
result[''] = [
'--------------------------------------------------------------------------------------------',
'Copyright (c) Microsoft Corporation. All rights reserved.',
@ -998,7 +998,7 @@ function createI18nFile(originalFilePath, messages) {
'--------------------------------------------------------------------------------------------',
'Do not edit this file. It is machine generated.'
];
for (let key of Object.keys(messages)) {
for (const key of Object.keys(messages)) {
result[key] = messages[key];
}
let content = JSON.stringify(result, null, '\t');
@ -1012,16 +1012,16 @@ function createI18nFile(originalFilePath, messages) {
}
const i18nPackVersion = '1.0.0';
function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pseudo = false) {
let parsePromises = [];
let mainPack = { version: i18nPackVersion, contents: {} };
let extensionsPacks = {};
let errors = [];
const parsePromises = [];
const mainPack = { version: i18nPackVersion, contents: {} };
const extensionsPacks = {};
const errors = [];
return (0, event_stream_1.through)(function (xlf) {
let project = path.basename(path.dirname(path.dirname(xlf.relative)));
let resource = path.basename(xlf.relative, '.xlf');
let contents = xlf.contents.toString();
const project = path.basename(path.dirname(path.dirname(xlf.relative)));
const resource = path.basename(xlf.relative, '.xlf');
const contents = xlf.contents.toString();
log(`Found ${project}: ${resource}`);
let parsePromise = pseudo ? XLF.parsePseudo(contents) : XLF.parse(contents);
const parsePromise = pseudo ? XLF.parsePseudo(contents) : XLF.parse(contents);
parsePromises.push(parsePromise);
parsePromise.then(resolvedFiles => {
resolvedFiles.forEach(file => {
@ -1057,7 +1057,7 @@ function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pse
const translatedMainFile = createI18nFile('./main', mainPack);
resultingTranslationPaths.push({ id: 'vscode', resourceName: 'main.i18n.json' });
this.queue(translatedMainFile);
for (let extension in extensionsPacks) {
for (const extension in extensionsPacks) {
const translatedExtFile = createI18nFile(`extensions/${extension}`, extensionsPacks[extension]);
this.queue(translatedExtFile);
const externalExtensionId = externalExtensions[extension];
@ -1077,14 +1077,14 @@ function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pse
}
exports.prepareI18nPackFiles = prepareI18nPackFiles;
function prepareIslFiles(language, innoSetupConfig) {
let parsePromises = [];
const parsePromises = [];
return (0, event_stream_1.through)(function (xlf) {
let stream = this;
let parsePromise = XLF.parse(xlf.contents.toString());
const stream = this;
const parsePromise = XLF.parse(xlf.contents.toString());
parsePromises.push(parsePromise);
parsePromise.then(resolvedFiles => {
resolvedFiles.forEach(file => {
let translatedFile = createIslFile(file.originalFilePath, file.messages, language, innoSetupConfig);
const translatedFile = createIslFile(file.originalFilePath, file.messages, language, innoSetupConfig);
stream.queue(translatedFile);
});
}).catch(reason => {
@ -1100,7 +1100,7 @@ function prepareIslFiles(language, innoSetupConfig) {
}
exports.prepareIslFiles = prepareIslFiles;
function createIslFile(originalFilePath, messages, language, innoSetup) {
let content = [];
const content = [];
let originalContent;
if (path.basename(originalFilePath) === 'Default') {
originalContent = new TextModel(fs.readFileSync(originalFilePath + '.isl', 'utf8'));
@ -1110,16 +1110,16 @@ function createIslFile(originalFilePath, messages, language, innoSetup) {
}
originalContent.lines.forEach(line => {
if (line.length > 0) {
let firstChar = line.charAt(0);
const firstChar = line.charAt(0);
if (firstChar === '[' || firstChar === ';') {
content.push(line);
}
else {
let sections = line.split('=');
let key = sections[0];
const sections = line.split('=');
const key = sections[0];
let translated = line;
if (key) {
let translatedMessage = messages[key];
const translatedMessage = messages[key];
if (translatedMessage) {
translated = `${key}=${translatedMessage}`;
}
@ -1137,9 +1137,9 @@ function createIslFile(originalFilePath, messages, language, innoSetup) {
});
}
function encodeEntities(value) {
let result = [];
const result = [];
for (let i = 0; i < value.length; i++) {
let ch = value[i];
const ch = value[i];
switch (ch) {
case '<':
result.push('&lt;');

View file

@ -87,7 +87,7 @@ interface LocalizeInfo {
module LocalizeInfo {
export function is(value: any): value is LocalizeInfo {
let candidate = value as LocalizeInfo;
const candidate = value as LocalizeInfo;
return Is.defined(candidate) && Is.string(candidate.key) && (Is.undef(candidate.comment) || (Is.array(candidate.comment) && candidate.comment.every(element => Is.string(element))));
}
}
@ -104,8 +104,8 @@ module BundledFormat {
return false;
}
let candidate = value as BundledFormat;
let length = Object.keys(value).length;
const candidate = value as BundledFormat;
const length = Object.keys(value).length;
return length === 3 && Is.defined(candidate.keys) && Is.defined(candidate.messages) && Is.defined(candidate.bundles);
}
@ -126,7 +126,7 @@ module PackageJsonFormat {
return false;
}
return Object.keys(value).every(key => {
let element = value[key];
const element = value[key];
return Is.string(element) || (Is.object(element) && Is.defined(element.message) && Is.defined(element.comment));
});
}
@ -218,9 +218,9 @@ export class XLF {
}
this.numberOfMessages += keys.length;
this.files[original] = [];
let existingKeys = new Set<string>();
const existingKeys = new Set<string>();
for (let i = 0; i < keys.length; i++) {
let key = keys[i];
const key = keys[i];
let realKey: string | undefined;
let comment: string | undefined;
if (Is.string(key)) {
@ -236,7 +236,7 @@ export class XLF {
continue;
}
existingKeys.add(realKey);
let message: string = encodeEntities(messages[i]);
const message: string = encodeEntities(messages[i]);
this.files[original].push({ id: realKey, message: message, comment: comment });
}
}
@ -269,15 +269,15 @@ export class XLF {
}
private appendNewLine(content: string, indent?: number): void {
let line = new Line(indent);
const line = new Line(indent);
line.append(content);
this.buffer.push(line.toString());
}
static parsePseudo = function (xlfString: string): Promise<ParsedXLF[]> {
return new Promise((resolve) => {
let parser = new xml2js.Parser();
let files: { messages: Map<string>; originalFilePath: string; language: string }[] = [];
const parser = new xml2js.Parser();
const files: { messages: Map<string>; originalFilePath: string; language: string }[] = [];
parser.parseString(xlfString, function (_err: any, result: any) {
const fileNodes: any[] = result['xliff']['file'];
fileNodes.forEach(file => {
@ -302,9 +302,9 @@ export class XLF {
static parse = function (xlfString: string): Promise<ParsedXLF[]> {
return new Promise((resolve, reject) => {
let parser = new xml2js.Parser();
const parser = new xml2js.Parser();
let files: { messages: Map<string>; originalFilePath: string; language: string }[] = [];
const files: { messages: Map<string>; originalFilePath: string; language: string }[] = [];
parser.parseString(xlfString, function (err: any, result: any) {
if (err) {
@ -321,7 +321,7 @@ export class XLF {
if (!originalFilePath) {
reject(new Error(`XLF parsing error: XLIFF file node does not contain original attribute to determine the original location of the resource file.`));
}
let language = file.$['target-language'];
const language = file.$['target-language'];
if (!language) {
reject(new Error(`XLF parsing error: XLIFF file node does not contain target-language attribute to determine translated language.`));
}
@ -413,7 +413,7 @@ function stripComments(content: string): string {
// Third group matches a multi line comment
// Forth group matches a single line comment
const regexp = /("[^"\\]*(?:\\.[^"\\]*)*")|('[^'\\]*(?:\\.[^'\\]*)*')|(\/\*[^\/\*]*(?:(?:\*|\/)[^\/\*]*)*?\*\/)|(\/{2,}.*?(?:(?:\r?\n)|$))/g;
let result = content.replace(regexp, (match, _m1: string, _m2: string, m3: string, m4: string) => {
const result = content.replace(regexp, (match, _m1: string, _m2: string, m3: string, m4: string) => {
// Only one of m1, m2, m3, m4 matches
if (m3) {
// A block comment. Replace with nothing
@ -472,22 +472,22 @@ function escapeCharacters(value: string): string {
}
function processCoreBundleFormat(fileHeader: string, languages: Language[], json: BundledFormat, emitter: ThroughStream) {
let keysSection = json.keys;
let messageSection = json.messages;
let bundleSection = json.bundles;
const keysSection = json.keys;
const messageSection = json.messages;
const bundleSection = json.bundles;
let statistics: Map<number> = Object.create(null);
const statistics: Map<number> = Object.create(null);
let defaultMessages: Map<Map<string>> = Object.create(null);
let modules = Object.keys(keysSection);
const defaultMessages: Map<Map<string>> = Object.create(null);
const modules = Object.keys(keysSection);
modules.forEach((module) => {
let keys = keysSection[module];
let messages = messageSection[module];
const keys = keysSection[module];
const messages = messageSection[module];
if (!messages || keys.length !== messages.length) {
emitter.emit('error', `Message for module ${module} corrupted. Mismatch in number of keys and messages.`);
return;
}
let messageMap: Map<string> = Object.create(null);
const messageMap: Map<string> = Object.create(null);
defaultMessages[module] = messageMap;
keys.map((key, i) => {
if (typeof key === 'string') {
@ -498,28 +498,28 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
});
});
let languageDirectory = path.join(__dirname, '..', '..', '..', 'vscode-loc', 'i18n');
const languageDirectory = path.join(__dirname, '..', '..', '..', 'vscode-loc', 'i18n');
if (!fs.existsSync(languageDirectory)) {
log(`No VS Code localization repository found. Looking at ${languageDirectory}`);
log(`To bundle translations please check out the vscode-loc repository as a sibling of the vscode repository.`);
}
let sortedLanguages = sortLanguages(languages);
const sortedLanguages = sortLanguages(languages);
sortedLanguages.forEach((language) => {
if (process.env['VSCODE_BUILD_VERBOSE']) {
log(`Generating nls bundles for: ${language.id}`);
}
statistics[language.id] = 0;
let localizedModules: Map<string[]> = Object.create(null);
let languageFolderName = language.translationId || language.id;
let i18nFile = path.join(languageDirectory, `vscode-language-pack-${languageFolderName}`, 'translations', 'main.i18n.json');
const localizedModules: Map<string[]> = Object.create(null);
const languageFolderName = language.translationId || language.id;
const i18nFile = path.join(languageDirectory, `vscode-language-pack-${languageFolderName}`, 'translations', 'main.i18n.json');
let allMessages: I18nFormat | undefined;
if (fs.existsSync(i18nFile)) {
let content = stripComments(fs.readFileSync(i18nFile, 'utf8'));
const content = stripComments(fs.readFileSync(i18nFile, 'utf8'));
allMessages = JSON.parse(content);
}
modules.forEach((module) => {
let order = keysSection[module];
const order = keysSection[module];
let moduleMessage: { [messageKey: string]: string } | undefined;
if (allMessages) {
moduleMessage = allMessages.contents[module];
@ -531,7 +531,7 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
moduleMessage = defaultMessages[module];
statistics[language.id] = statistics[language.id] + Object.keys(moduleMessage).length;
}
let localizedMessages: string[] = [];
const localizedMessages: string[] = [];
order.forEach((keyInfo) => {
let key: string | null = null;
if (typeof keyInfo === 'string') {
@ -552,14 +552,14 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
localizedModules[module] = localizedMessages;
});
Object.keys(bundleSection).forEach((bundle) => {
let modules = bundleSection[bundle];
let contents: string[] = [
const modules = bundleSection[bundle];
const contents: string[] = [
fileHeader,
`define("${bundle}.nls.${language.id}", {`
];
modules.forEach((module, index) => {
contents.push(`\t"${module}": [`);
let messages = localizedModules[module];
const messages = localizedModules[module];
if (!messages) {
emitter.emit('error', `Didn't find messages for module ${module}.`);
return;
@ -574,11 +574,11 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
});
});
Object.keys(statistics).forEach(key => {
let value = statistics[key];
const value = statistics[key];
log(`${key} has ${value} untranslated strings.`);
});
sortedLanguages.forEach(language => {
let stats = statistics[language.id];
const stats = statistics[language.id];
if (Is.undef(stats)) {
log(`\tNo translations found for language ${language.id}. Using default language instead.`);
}
@ -587,7 +587,7 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
export function processNlsFiles(opts: { fileHeader: string; languages: Language[] }): ThroughStream {
return through(function (this: ThroughStream, file: File) {
let fileName = path.basename(file.path);
const fileName = path.basename(file.path);
if (fileName === 'nls.metadata.json') {
let json = null;
if (file.isBuffer()) {
@ -643,7 +643,7 @@ export function createXlfFilesForCoreBundle(): ThroughStream {
if (file.isBuffer()) {
const xlfs: Map<XLF> = Object.create(null);
const json: BundledFormat = JSON.parse((file.contents as Buffer).toString('utf8'));
for (let coreModule in json.keys) {
for (const coreModule in json.keys) {
const projectResource = getResource(coreModule);
const resource = projectResource.name;
const project = projectResource.project;
@ -662,7 +662,7 @@ export function createXlfFilesForCoreBundle(): ThroughStream {
xlf.addFile(`src/${coreModule}`, keys, messages);
}
}
for (let resource in xlfs) {
for (const resource in xlfs) {
const xlf = xlfs[resource];
const filePath = `${xlf.project}/${resource.replace(/\//g, '_')}.xlf`;
const xlfFile = new File({
@ -692,7 +692,7 @@ export function createXlfFilesForExtensions(): ThroughStream {
if (!stat.isDirectory()) {
return;
}
let extensionName = path.basename(extensionFolder.path);
const extensionName = path.basename(extensionFolder.path);
if (extensionName === 'node_modules') {
return;
}
@ -725,7 +725,7 @@ export function createXlfFilesForExtensions(): ThroughStream {
} else if (basename === 'nls.metadata.json') {
const json: BundledExtensionFormat = JSON.parse(buffer.toString('utf8'));
const relPath = path.relative(`.build/extensions/${extensionName}`, path.dirname(file.path));
for (let file in json) {
for (const file in json) {
const fileContent = json[file];
getXlf().addFile(`extensions/${extensionName}/${relPath}/${file}`, fileContent.keys, fileContent.messages);
}
@ -736,7 +736,7 @@ export function createXlfFilesForExtensions(): ThroughStream {
}
}, function () {
if (_xlf) {
let xlfFile = new File({
const xlfFile = new File({
path: path.join(extensionsProject, extensionName + '.xlf'),
contents: Buffer.from(_xlf.toString(), 'utf8')
});
@ -769,17 +769,17 @@ export function createXlfFilesForIsl(): ThroughStream {
throw new Error(`Unknown input file ${file.path}`);
}
let xlf = new XLF(projectName),
const xlf = new XLF(projectName),
keys: string[] = [],
messages: string[] = [];
let model = new TextModel(file.contents.toString());
const model = new TextModel(file.contents.toString());
let inMessageSection = false;
model.lines.forEach(line => {
if (line.length === 0) {
return;
}
let firstChar = line.charAt(0);
const firstChar = line.charAt(0);
switch (firstChar) {
case ';':
// Comment line;
@ -791,12 +791,12 @@ export function createXlfFilesForIsl(): ThroughStream {
if (!inMessageSection) {
return;
}
let sections: string[] = line.split('=');
const sections: string[] = line.split('=');
if (sections.length !== 2) {
throw new Error(`Badly formatted message found: ${line}`);
} else {
let key = sections[0];
let value = sections[1];
const key = sections[0];
const value = sections[1];
if (key.length > 0 && value.length > 0) {
keys.push(key);
messages.push(value);
@ -815,8 +815,8 @@ export function createXlfFilesForIsl(): ThroughStream {
}
export function pushXlfFiles(apiHostname: string, username: string, password: string): ThroughStream {
let tryGetPromises: Array<Promise<boolean>> = [];
let updateCreatePromises: Array<Promise<boolean>> = [];
const tryGetPromises: Array<Promise<boolean>> = [];
const updateCreatePromises: Array<Promise<boolean>> = [];
return through(function (this: ThroughStream, file: File) {
const project = path.dirname(file.relative);
@ -857,11 +857,11 @@ function getAllResources(project: string, apiHostname: string, username: string,
};
const request = https.request(options, (res) => {
let buffer: Buffer[] = [];
const buffer: Buffer[] = [];
res.on('data', (chunk: Buffer) => buffer.push(chunk));
res.on('end', () => {
if (res.statusCode === 200) {
let json = JSON.parse(Buffer.concat(buffer).toString());
const json = JSON.parse(Buffer.concat(buffer).toString());
if (Array.isArray(json)) {
resolve(json.map(o => o.slug));
return;
@ -880,7 +880,7 @@ function getAllResources(project: string, apiHostname: string, username: string,
}
export function findObsoleteResources(apiHostname: string, username: string, password: string): ThroughStream {
let resourcesByProject: Map<string[]> = Object.create(null);
const resourcesByProject: Map<string[]> = Object.create(null);
resourcesByProject[extensionsProject] = ([] as any[]).concat(externalExtensionsWithTranslations); // clone
return through(function (this: ThroughStream, file: File) {
@ -897,10 +897,10 @@ export function findObsoleteResources(apiHostname: string, username: string, pas
}, function () {
const json = JSON.parse(fs.readFileSync('./build/lib/i18n.resources.json', 'utf8'));
let i18Resources = [...json.editor, ...json.workbench].map((r: Resource) => r.project + '/' + r.name.replace(/\//g, '_'));
let extractedResources: string[] = [];
for (let project of [workbenchProject, editorProject]) {
for (let resource of resourcesByProject[project]) {
const i18Resources = [...json.editor, ...json.workbench].map((r: Resource) => r.project + '/' + r.name.replace(/\//g, '_'));
const extractedResources: string[] = [];
for (const project of [workbenchProject, editorProject]) {
for (const resource of resourcesByProject[project]) {
if (resource !== 'setup_messages') {
extractedResources.push(project + '/' + resource);
}
@ -911,12 +911,12 @@ export function findObsoleteResources(apiHostname: string, username: string, pas
console.log(`[i18n] Missing resources in file 'build/lib/i18n.resources.json': JSON.stringify(${extractedResources.filter(p => i18Resources.indexOf(p) === -1)})`);
}
let promises: Array<Promise<void>> = [];
for (let project in resourcesByProject) {
const promises: Array<Promise<void>> = [];
for (const project in resourcesByProject) {
promises.push(
getAllResources(project, apiHostname, username, password).then(resources => {
let expectedResources = resourcesByProject[project];
let unusedResources = resources.filter(resource => resource && expectedResources.indexOf(resource) === -1);
const expectedResources = resourcesByProject[project];
const unusedResources = resources.filter(resource => resource && expectedResources.indexOf(resource) === -1);
if (unusedResources.length) {
console.log(`[transifex] Obsolete resources in project '${project}': ${unusedResources.join(', ')}`);
}
@ -974,7 +974,7 @@ function createResource(project: string, slug: string, xlfFile: File, apiHostnam
method: 'POST'
};
let request = https.request(options, (res) => {
const request = https.request(options, (res) => {
if (res.statusCode === 201) {
log(`Resource ${project}/${slug} successfully created on Transifex.`);
} else {
@ -1008,7 +1008,7 @@ function updateResource(project: string, slug: string, xlfFile: File, apiHostnam
method: 'PUT'
};
let request = https.request(options, (res) => {
const request = https.request(options, (res) => {
if (res.statusCode === 200) {
res.setEncoding('utf8');
@ -1035,7 +1035,7 @@ function updateResource(project: string, slug: string, xlfFile: File, apiHostnam
}
export function pullSetupXlfFiles(apiHostname: string, username: string, password: string, language: Language, includeDefault: boolean): NodeJS.ReadableStream {
let setupResources = [{ name: 'setup_messages', project: workbenchProject }];
const setupResources = [{ name: 'setup_messages', project: workbenchProject }];
if (includeDefault) {
setupResources.push({ name: 'setup_default', project: setupProject });
}
@ -1044,7 +1044,7 @@ export function pullSetupXlfFiles(apiHostname: string, username: string, passwor
function pullXlfFiles(apiHostname: string, username: string, password: string, language: Language, resources: Resource[]): NodeJS.ReadableStream {
const credentials = `${username}:${password}`;
let expectedTranslationsCount = resources.length;
const expectedTranslationsCount = resources.length;
let translationsRetrieved = 0, called = false;
return readable(function (_count: any, callback: any) {
@ -1075,7 +1075,7 @@ function retrieveResource(language: Language, resource: Resource, apiHostname: s
return limiter.queue(() => new Promise<File | null>((resolve, reject) => {
const slug = resource.name.replace(/\//g, '_');
const project = resource.project;
let transifexLanguageId = language.id === 'ps' ? 'en' : language.translationId || language.id;
const transifexLanguageId = language.id === 'ps' ? 'en' : language.translationId || language.id;
const options = {
hostname: apiHostname,
path: `/api/2/project/${project}/resource/${slug}/translation/${transifexLanguageId}?file&mode=onlyreviewed`,
@ -1085,8 +1085,8 @@ function retrieveResource(language: Language, resource: Resource, apiHostname: s
};
console.log('[transifex] Fetching ' + options.path);
let request = https.request(options, (res) => {
let xlfBuffer: Buffer[] = [];
const request = https.request(options, (res) => {
const xlfBuffer: Buffer[] = [];
res.on('data', (chunk: Buffer) => xlfBuffer.push(chunk));
res.on('end', () => {
if (res.statusCode === 200) {
@ -1107,16 +1107,16 @@ function retrieveResource(language: Language, resource: Resource, apiHostname: s
}
export function prepareI18nFiles(): ThroughStream {
let parsePromises: Promise<ParsedXLF[]>[] = [];
const parsePromises: Promise<ParsedXLF[]>[] = [];
return through(function (this: ThroughStream, xlf: File) {
let stream = this;
let parsePromise = XLF.parse(xlf.contents.toString());
const stream = this;
const parsePromise = XLF.parse(xlf.contents.toString());
parsePromises.push(parsePromise);
parsePromise.then(
resolvedFiles => {
resolvedFiles.forEach(file => {
let translatedFile = createI18nFile(file.originalFilePath, file.messages);
const translatedFile = createI18nFile(file.originalFilePath, file.messages);
stream.queue(translatedFile);
});
}
@ -1129,7 +1129,7 @@ export function prepareI18nFiles(): ThroughStream {
}
function createI18nFile(originalFilePath: string, messages: any): File {
let result = Object.create(null);
const result = Object.create(null);
result[''] = [
'--------------------------------------------------------------------------------------------',
'Copyright (c) Microsoft Corporation. All rights reserved.',
@ -1137,7 +1137,7 @@ function createI18nFile(originalFilePath: string, messages: any): File {
'--------------------------------------------------------------------------------------------',
'Do not edit this file. It is machine generated.'
];
for (let key of Object.keys(messages)) {
for (const key of Object.keys(messages)) {
result[key] = messages[key];
}
@ -1166,16 +1166,16 @@ export interface TranslationPath {
}
export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingTranslationPaths: TranslationPath[], pseudo = false): NodeJS.ReadWriteStream {
let parsePromises: Promise<ParsedXLF[]>[] = [];
let mainPack: I18nPack = { version: i18nPackVersion, contents: {} };
let extensionsPacks: Map<I18nPack> = {};
let errors: any[] = [];
const parsePromises: Promise<ParsedXLF[]>[] = [];
const mainPack: I18nPack = { version: i18nPackVersion, contents: {} };
const extensionsPacks: Map<I18nPack> = {};
const errors: any[] = [];
return through(function (this: ThroughStream, xlf: File) {
let project = path.basename(path.dirname(path.dirname(xlf.relative)));
let resource = path.basename(xlf.relative, '.xlf');
let contents = xlf.contents.toString();
const project = path.basename(path.dirname(path.dirname(xlf.relative)));
const resource = path.basename(xlf.relative, '.xlf');
const contents = xlf.contents.toString();
log(`Found ${project}: ${resource}`);
let parsePromise = pseudo ? XLF.parsePseudo(contents) : XLF.parse(contents);
const parsePromise = pseudo ? XLF.parsePseudo(contents) : XLF.parse(contents);
parsePromises.push(parsePromise);
parsePromise.then(
resolvedFiles => {
@ -1213,7 +1213,7 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
resultingTranslationPaths.push({ id: 'vscode', resourceName: 'main.i18n.json' });
this.queue(translatedMainFile);
for (let extension in extensionsPacks) {
for (const extension in extensionsPacks) {
const translatedExtFile = createI18nFile(`extensions/${extension}`, extensionsPacks[extension]);
this.queue(translatedExtFile);
@ -1234,16 +1234,16 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
}
export function prepareIslFiles(language: Language, innoSetupConfig: InnoSetup): ThroughStream {
let parsePromises: Promise<ParsedXLF[]>[] = [];
const parsePromises: Promise<ParsedXLF[]>[] = [];
return through(function (this: ThroughStream, xlf: File) {
let stream = this;
let parsePromise = XLF.parse(xlf.contents.toString());
const stream = this;
const parsePromise = XLF.parse(xlf.contents.toString());
parsePromises.push(parsePromise);
parsePromise.then(
resolvedFiles => {
resolvedFiles.forEach(file => {
let translatedFile = createIslFile(file.originalFilePath, file.messages, language, innoSetupConfig);
const translatedFile = createIslFile(file.originalFilePath, file.messages, language, innoSetupConfig);
stream.queue(translatedFile);
});
}
@ -1260,7 +1260,7 @@ export function prepareIslFiles(language: Language, innoSetupConfig: InnoSetup):
}
function createIslFile(originalFilePath: string, messages: Map<string>, language: Language, innoSetup: InnoSetup): File {
let content: string[] = [];
const content: string[] = [];
let originalContent: TextModel;
if (path.basename(originalFilePath) === 'Default') {
originalContent = new TextModel(fs.readFileSync(originalFilePath + '.isl', 'utf8'));
@ -1269,15 +1269,15 @@ function createIslFile(originalFilePath: string, messages: Map<string>, language
}
originalContent.lines.forEach(line => {
if (line.length > 0) {
let firstChar = line.charAt(0);
const firstChar = line.charAt(0);
if (firstChar === '[' || firstChar === ';') {
content.push(line);
} else {
let sections: string[] = line.split('=');
let key = sections[0];
const sections: string[] = line.split('=');
const key = sections[0];
let translated = line;
if (key) {
let translatedMessage = messages[key];
const translatedMessage = messages[key];
if (translatedMessage) {
translated = `${key}=${translatedMessage}`;
}
@ -1299,9 +1299,9 @@ function createIslFile(originalFilePath: string, messages: Map<string>, language
}
function encodeEntities(value: string): string {
let result: string[] = [];
const result: string[] = [];
for (let i = 0; i < value.length; i++) {
let ch = value[i];
const ch = value[i];
switch (ch) {
case '<':
result.push('&lt;');

View file

@ -27,7 +27,7 @@ function isDeclaration(ts, a) {
}
function visitTopLevelDeclarations(ts, sourceFile, visitor) {
let stop = false;
let visit = (node) => {
const visit = (node) => {
if (stop) {
return;
}
@ -49,19 +49,19 @@ function visitTopLevelDeclarations(ts, sourceFile, visitor) {
visit(sourceFile);
}
function getAllTopLevelDeclarations(ts, sourceFile) {
let all = [];
const all = [];
visitTopLevelDeclarations(ts, sourceFile, (node) => {
if (node.kind === ts.SyntaxKind.InterfaceDeclaration || node.kind === ts.SyntaxKind.ClassDeclaration || node.kind === ts.SyntaxKind.ModuleDeclaration) {
let interfaceDeclaration = node;
let triviaStart = interfaceDeclaration.pos;
let triviaEnd = interfaceDeclaration.name.pos;
let triviaText = getNodeText(sourceFile, { pos: triviaStart, end: triviaEnd });
const interfaceDeclaration = node;
const triviaStart = interfaceDeclaration.pos;
const triviaEnd = interfaceDeclaration.name.pos;
const triviaText = getNodeText(sourceFile, { pos: triviaStart, end: triviaEnd });
if (triviaText.indexOf('@internal') === -1) {
all.push(node);
}
}
else {
let nodeText = getNodeText(sourceFile, node);
const nodeText = getNodeText(sourceFile, node);
if (nodeText.indexOf('@internal') === -1) {
all.push(node);
}
@ -95,7 +95,7 @@ function getNodeText(sourceFile, node) {
function hasModifier(modifiers, kind) {
if (modifiers) {
for (let i = 0; i < modifiers.length; i++) {
let mod = modifiers[i];
const mod = modifiers[i];
if (mod.kind === kind) {
return true;
}
@ -113,14 +113,14 @@ function isDefaultExport(ts, declaration) {
function getMassagedTopLevelDeclarationText(ts, sourceFile, declaration, importName, usage, enums) {
let result = getNodeText(sourceFile, declaration);
if (declaration.kind === ts.SyntaxKind.InterfaceDeclaration || declaration.kind === ts.SyntaxKind.ClassDeclaration) {
let interfaceDeclaration = declaration;
const interfaceDeclaration = declaration;
const staticTypeName = (isDefaultExport(ts, interfaceDeclaration)
? `${importName}.default`
: `${importName}.${declaration.name.text}`);
let instanceTypeName = staticTypeName;
const typeParametersCnt = (interfaceDeclaration.typeParameters ? interfaceDeclaration.typeParameters.length : 0);
if (typeParametersCnt > 0) {
let arr = [];
const arr = [];
for (let i = 0; i < typeParametersCnt; i++) {
arr.push('any');
}
@ -129,7 +129,7 @@ function getMassagedTopLevelDeclarationText(ts, sourceFile, declaration, importN
const members = interfaceDeclaration.members;
members.forEach((member) => {
try {
let memberText = getNodeText(sourceFile, member);
const memberText = getNodeText(sourceFile, member);
if (memberText.indexOf('@internal') >= 0 || memberText.indexOf('private') >= 0) {
result = result.replace(memberText, '');
}
@ -152,7 +152,7 @@ function getMassagedTopLevelDeclarationText(ts, sourceFile, declaration, importN
result = result.replace(/export default /g, 'export ');
result = result.replace(/export declare /g, 'export ');
result = result.replace(/declare /g, '');
let lines = result.split(/\r\n|\r|\n/);
const lines = result.split(/\r\n|\r|\n/);
for (let i = 0; i < lines.length; i++) {
if (/\s*\*/.test(lines[i])) {
// very likely a comment
@ -177,9 +177,9 @@ function format(ts, text, endl) {
return text;
}
// Parse the source text
let sourceFile = ts.createSourceFile('file.ts', text, ts.ScriptTarget.Latest, /*setParentPointers*/ true);
const sourceFile = ts.createSourceFile('file.ts', text, ts.ScriptTarget.Latest, /*setParentPointers*/ true);
// Get the formatting edits on the input sources
let edits = ts.formatting.formatDocument(sourceFile, getRuleProvider(tsfmt), tsfmt);
const edits = ts.formatting.formatDocument(sourceFile, getRuleProvider(tsfmt), tsfmt);
// Apply the edits on the input code
return applyEdits(text, edits);
function countParensCurly(text) {
@ -202,7 +202,7 @@ function format(ts, text, endl) {
return r;
}
function preformat(text, endl) {
let lines = text.split(endl);
const lines = text.split(endl);
let inComment = false;
let inCommentDeltaIndent = 0;
let indent = 0;
@ -282,9 +282,9 @@ function format(ts, text, endl) {
// Apply edits in reverse on the existing text
let result = text;
for (let i = edits.length - 1; i >= 0; i--) {
let change = edits[i];
let head = result.slice(0, change.span.start);
let tail = result.slice(change.span.start + change.span.length);
const change = edits[i];
const head = result.slice(0, change.span.start);
const tail = result.slice(change.span.start + change.span.length);
result = head + change.newText + tail;
}
return result;
@ -300,15 +300,15 @@ function createReplacerFromDirectives(directives) {
}
function createReplacer(data) {
data = data || '';
let rawDirectives = data.split(';');
let directives = [];
const rawDirectives = data.split(';');
const directives = [];
rawDirectives.forEach((rawDirective) => {
if (rawDirective.length === 0) {
return;
}
let pieces = rawDirective.split('=>');
const pieces = rawDirective.split('=>');
let findStr = pieces[0];
let replaceStr = pieces[1];
const replaceStr = pieces[1];
findStr = findStr.replace(/[\-\\\{\}\*\+\?\|\^\$\.\,\[\]\(\)\#\s]/g, '\\$&');
findStr = '\\b' + findStr + '\\b';
directives.push([new RegExp(findStr, 'g'), replaceStr]);
@ -317,32 +317,32 @@ function createReplacer(data) {
}
function generateDeclarationFile(ts, recipe, sourceFileGetter) {
const endl = /\r\n/.test(recipe) ? '\r\n' : '\n';
let lines = recipe.split(endl);
let result = [];
const lines = recipe.split(endl);
const result = [];
let usageCounter = 0;
let usageImports = [];
let usage = [];
const usageImports = [];
const usage = [];
let failed = false;
usage.push(`var a: any;`);
usage.push(`var b: any;`);
const generateUsageImport = (moduleId) => {
let importName = 'm' + (++usageCounter);
const importName = 'm' + (++usageCounter);
usageImports.push(`import * as ${importName} from './${moduleId.replace(/\.d\.ts$/, '')}';`);
return importName;
};
let enums = [];
const enums = [];
let version = null;
lines.forEach(line => {
if (failed) {
return;
}
let m0 = line.match(/^\/\/dtsv=(\d+)$/);
const m0 = line.match(/^\/\/dtsv=(\d+)$/);
if (m0) {
version = m0[1];
}
let m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
const m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m1) {
let moduleId = m1[1];
const moduleId = m1[1];
const sourceFile = sourceFileGetter(moduleId);
if (!sourceFile) {
logErr(`While handling ${line}`);
@ -351,14 +351,14 @@ function generateDeclarationFile(ts, recipe, sourceFileGetter) {
return;
}
const importName = generateUsageImport(moduleId);
let replacer = createReplacer(m1[2]);
let typeNames = m1[3].split(/,/);
const replacer = createReplacer(m1[2]);
const typeNames = m1[3].split(/,/);
typeNames.forEach((typeName) => {
typeName = typeName.trim();
if (typeName.length === 0) {
return;
}
let declaration = getTopLevelDeclaration(ts, sourceFile, typeName);
const declaration = getTopLevelDeclaration(ts, sourceFile, typeName);
if (!declaration) {
logErr(`While handling ${line}`);
logErr(`Cannot find ${typeName}`);
@ -369,9 +369,9 @@ function generateDeclarationFile(ts, recipe, sourceFileGetter) {
});
return;
}
let m2 = line.match(/^\s*#includeAll\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
const m2 = line.match(/^\s*#includeAll\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m2) {
let moduleId = m2[1];
const moduleId = m2[1];
const sourceFile = sourceFileGetter(moduleId);
if (!sourceFile) {
logErr(`While handling ${line}`);
@ -380,10 +380,10 @@ function generateDeclarationFile(ts, recipe, sourceFileGetter) {
return;
}
const importName = generateUsageImport(moduleId);
let replacer = createReplacer(m2[2]);
let typeNames = m2[3].split(/,/);
let typesToExcludeMap = {};
let typesToExcludeArr = [];
const replacer = createReplacer(m2[2]);
const typeNames = m2[3].split(/,/);
const typesToExcludeMap = {};
const typesToExcludeArr = [];
typeNames.forEach((typeName) => {
typeName = typeName.trim();
if (typeName.length === 0) {
@ -400,7 +400,7 @@ function generateDeclarationFile(ts, recipe, sourceFileGetter) {
}
else {
// node is ts.VariableStatement
let nodeText = getNodeText(sourceFile, declaration);
const nodeText = getNodeText(sourceFile, declaration);
for (let i = 0; i < typesToExcludeArr.length; i++) {
if (nodeText.indexOf(typesToExcludeArr[i]) >= 0) {
return;
@ -605,7 +605,7 @@ class TypeScriptLanguageServiceHost {
}
}
function execute() {
let r = run3(new DeclarationResolver(new FSProvider()));
const r = run3(new DeclarationResolver(new FSProvider()));
if (!r) {
throw new Error(`monaco.d.ts generation error - Cannot continue`);
}

View file

@ -40,7 +40,7 @@ function isDeclaration(ts: typeof import('typescript'), a: TSTopLevelDeclare): a
function visitTopLevelDeclarations(ts: typeof import('typescript'), sourceFile: ts.SourceFile, visitor: (node: TSTopLevelDeclare) => boolean): void {
let stop = false;
let visit = (node: ts.Node): void => {
const visit = (node: ts.Node): void => {
if (stop) {
return;
}
@ -67,19 +67,19 @@ function visitTopLevelDeclarations(ts: typeof import('typescript'), sourceFile:
function getAllTopLevelDeclarations(ts: typeof import('typescript'), sourceFile: ts.SourceFile): TSTopLevelDeclare[] {
let all: TSTopLevelDeclare[] = [];
const all: TSTopLevelDeclare[] = [];
visitTopLevelDeclarations(ts, sourceFile, (node) => {
if (node.kind === ts.SyntaxKind.InterfaceDeclaration || node.kind === ts.SyntaxKind.ClassDeclaration || node.kind === ts.SyntaxKind.ModuleDeclaration) {
let interfaceDeclaration = <ts.InterfaceDeclaration>node;
let triviaStart = interfaceDeclaration.pos;
let triviaEnd = interfaceDeclaration.name.pos;
let triviaText = getNodeText(sourceFile, { pos: triviaStart, end: triviaEnd });
const interfaceDeclaration = <ts.InterfaceDeclaration>node;
const triviaStart = interfaceDeclaration.pos;
const triviaEnd = interfaceDeclaration.name.pos;
const triviaText = getNodeText(sourceFile, { pos: triviaStart, end: triviaEnd });
if (triviaText.indexOf('@internal') === -1) {
all.push(node);
}
} else {
let nodeText = getNodeText(sourceFile, node);
const nodeText = getNodeText(sourceFile, node);
if (nodeText.indexOf('@internal') === -1) {
all.push(node);
}
@ -118,7 +118,7 @@ function getNodeText(sourceFile: ts.SourceFile, node: { pos: number; end: number
function hasModifier(modifiers: ts.NodeArray<ts.Modifier> | undefined, kind: ts.SyntaxKind): boolean {
if (modifiers) {
for (let i = 0; i < modifiers.length; i++) {
let mod = modifiers[i];
const mod = modifiers[i];
if (mod.kind === kind) {
return true;
}
@ -141,7 +141,7 @@ function isDefaultExport(ts: typeof import('typescript'), declaration: ts.Interf
function getMassagedTopLevelDeclarationText(ts: typeof import('typescript'), sourceFile: ts.SourceFile, declaration: TSTopLevelDeclare, importName: string, usage: string[], enums: IEnumEntry[]): string {
let result = getNodeText(sourceFile, declaration);
if (declaration.kind === ts.SyntaxKind.InterfaceDeclaration || declaration.kind === ts.SyntaxKind.ClassDeclaration) {
let interfaceDeclaration = <ts.InterfaceDeclaration | ts.ClassDeclaration>declaration;
const interfaceDeclaration = <ts.InterfaceDeclaration | ts.ClassDeclaration>declaration;
const staticTypeName = (
isDefaultExport(ts, interfaceDeclaration)
@ -152,7 +152,7 @@ function getMassagedTopLevelDeclarationText(ts: typeof import('typescript'), sou
let instanceTypeName = staticTypeName;
const typeParametersCnt = (interfaceDeclaration.typeParameters ? interfaceDeclaration.typeParameters.length : 0);
if (typeParametersCnt > 0) {
let arr: string[] = [];
const arr: string[] = [];
for (let i = 0; i < typeParametersCnt; i++) {
arr.push('any');
}
@ -162,7 +162,7 @@ function getMassagedTopLevelDeclarationText(ts: typeof import('typescript'), sou
const members: ts.NodeArray<ts.ClassElement | ts.TypeElement> = interfaceDeclaration.members;
members.forEach((member) => {
try {
let memberText = getNodeText(sourceFile, member);
const memberText = getNodeText(sourceFile, member);
if (memberText.indexOf('@internal') >= 0 || memberText.indexOf('private') >= 0) {
result = result.replace(memberText, '');
} else {
@ -182,7 +182,7 @@ function getMassagedTopLevelDeclarationText(ts: typeof import('typescript'), sou
result = result.replace(/export default /g, 'export ');
result = result.replace(/export declare /g, 'export ');
result = result.replace(/declare /g, '');
let lines = result.split(/\r\n|\r|\n/);
const lines = result.split(/\r\n|\r|\n/);
for (let i = 0; i < lines.length; i++) {
if (/\s*\*/.test(lines[i])) {
// very likely a comment
@ -212,10 +212,10 @@ function format(ts: typeof import('typescript'), text: string, endl: string): st
}
// Parse the source text
let sourceFile = ts.createSourceFile('file.ts', text, ts.ScriptTarget.Latest, /*setParentPointers*/ true);
const sourceFile = ts.createSourceFile('file.ts', text, ts.ScriptTarget.Latest, /*setParentPointers*/ true);
// Get the formatting edits on the input sources
let edits = (<any>ts).formatting.formatDocument(sourceFile, getRuleProvider(tsfmt), tsfmt);
const edits = (<any>ts).formatting.formatDocument(sourceFile, getRuleProvider(tsfmt), tsfmt);
// Apply the edits on the input code
return applyEdits(text, edits);
@ -242,7 +242,7 @@ function format(ts: typeof import('typescript'), text: string, endl: string): st
}
function preformat(text: string, endl: string): string {
let lines = text.split(endl);
const lines = text.split(endl);
let inComment = false;
let inCommentDeltaIndent = 0;
let indent = 0;
@ -328,9 +328,9 @@ function format(ts: typeof import('typescript'), text: string, endl: string): st
// Apply edits in reverse on the existing text
let result = text;
for (let i = edits.length - 1; i >= 0; i--) {
let change = edits[i];
let head = result.slice(0, change.span.start);
let tail = result.slice(change.span.start + change.span.length);
const change = edits[i];
const head = result.slice(0, change.span.start);
const tail = result.slice(change.span.start + change.span.length);
result = head + change.newText + tail;
}
return result;
@ -348,15 +348,15 @@ function createReplacerFromDirectives(directives: [RegExp, string][]): (str: str
function createReplacer(data: string): (str: string) => string {
data = data || '';
let rawDirectives = data.split(';');
let directives: [RegExp, string][] = [];
const rawDirectives = data.split(';');
const directives: [RegExp, string][] = [];
rawDirectives.forEach((rawDirective) => {
if (rawDirective.length === 0) {
return;
}
let pieces = rawDirective.split('=>');
const pieces = rawDirective.split('=>');
let findStr = pieces[0];
let replaceStr = pieces[1];
const replaceStr = pieces[1];
findStr = findStr.replace(/[\-\\\{\}\*\+\?\|\^\$\.\,\[\]\(\)\#\s]/g, '\\$&');
findStr = '\\b' + findStr + '\\b';
@ -380,12 +380,12 @@ interface IEnumEntry {
function generateDeclarationFile(ts: typeof import('typescript'), recipe: string, sourceFileGetter: SourceFileGetter): ITempResult | null {
const endl = /\r\n/.test(recipe) ? '\r\n' : '\n';
let lines = recipe.split(endl);
let result: string[] = [];
const lines = recipe.split(endl);
const result: string[] = [];
let usageCounter = 0;
let usageImports: string[] = [];
let usage: string[] = [];
const usageImports: string[] = [];
const usage: string[] = [];
let failed = false;
@ -393,12 +393,12 @@ function generateDeclarationFile(ts: typeof import('typescript'), recipe: string
usage.push(`var b: any;`);
const generateUsageImport = (moduleId: string) => {
let importName = 'm' + (++usageCounter);
const importName = 'm' + (++usageCounter);
usageImports.push(`import * as ${importName} from './${moduleId.replace(/\.d\.ts$/, '')}';`);
return importName;
};
let enums: IEnumEntry[] = [];
const enums: IEnumEntry[] = [];
let version: string | null = null;
lines.forEach(line => {
@ -407,14 +407,14 @@ function generateDeclarationFile(ts: typeof import('typescript'), recipe: string
return;
}
let m0 = line.match(/^\/\/dtsv=(\d+)$/);
const m0 = line.match(/^\/\/dtsv=(\d+)$/);
if (m0) {
version = m0[1];
}
let m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
const m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m1) {
let moduleId = m1[1];
const moduleId = m1[1];
const sourceFile = sourceFileGetter(moduleId);
if (!sourceFile) {
logErr(`While handling ${line}`);
@ -425,15 +425,15 @@ function generateDeclarationFile(ts: typeof import('typescript'), recipe: string
const importName = generateUsageImport(moduleId);
let replacer = createReplacer(m1[2]);
const replacer = createReplacer(m1[2]);
let typeNames = m1[3].split(/,/);
const typeNames = m1[3].split(/,/);
typeNames.forEach((typeName) => {
typeName = typeName.trim();
if (typeName.length === 0) {
return;
}
let declaration = getTopLevelDeclaration(ts, sourceFile, typeName);
const declaration = getTopLevelDeclaration(ts, sourceFile, typeName);
if (!declaration) {
logErr(`While handling ${line}`);
logErr(`Cannot find ${typeName}`);
@ -445,9 +445,9 @@ function generateDeclarationFile(ts: typeof import('typescript'), recipe: string
return;
}
let m2 = line.match(/^\s*#includeAll\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
const m2 = line.match(/^\s*#includeAll\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m2) {
let moduleId = m2[1];
const moduleId = m2[1];
const sourceFile = sourceFileGetter(moduleId);
if (!sourceFile) {
logErr(`While handling ${line}`);
@ -458,11 +458,11 @@ function generateDeclarationFile(ts: typeof import('typescript'), recipe: string
const importName = generateUsageImport(moduleId);
let replacer = createReplacer(m2[2]);
const replacer = createReplacer(m2[2]);
let typeNames = m2[3].split(/,/);
let typesToExcludeMap: { [typeName: string]: boolean } = {};
let typesToExcludeArr: string[] = [];
const typeNames = m2[3].split(/,/);
const typesToExcludeMap: { [typeName: string]: boolean } = {};
const typesToExcludeArr: string[] = [];
typeNames.forEach((typeName) => {
typeName = typeName.trim();
if (typeName.length === 0) {
@ -479,7 +479,7 @@ function generateDeclarationFile(ts: typeof import('typescript'), recipe: string
}
} else {
// node is ts.VariableStatement
let nodeText = getNodeText(sourceFile, declaration);
const nodeText = getNodeText(sourceFile, declaration);
for (let i = 0; i < typesToExcludeArr.length; i++) {
if (nodeText.indexOf(typesToExcludeArr[i]) >= 0) {
return;
@ -732,7 +732,7 @@ class TypeScriptLanguageServiceHost implements ts.LanguageServiceHost {
}
export function execute(): IMonacoDeclarationResult {
let r = run3(new DeclarationResolver(new FSProvider()));
const r = run3(new DeclarationResolver(new FSProvider()));
if (!r) {
throw new Error(`monaco.d.ts generation error - Cannot continue`);
}

View file

@ -10,7 +10,7 @@ const path = require("path");
const tss = require("./treeshaking");
const REPO_ROOT = path.join(__dirname, '../../');
const SRC_DIR = path.join(REPO_ROOT, 'src');
let dirCache = {};
const dirCache = {};
function writeFile(filePath, contents) {
function ensureDirs(dirPath) {
if (dirCache[dirPath]) {
@ -53,13 +53,13 @@ function extractEditor(options) {
options.typings.push(`../node_modules/@types/${type}/index.d.ts`);
});
}
let result = tss.shake(options);
for (let fileName in result) {
const result = tss.shake(options);
for (const fileName in result) {
if (result.hasOwnProperty(fileName)) {
writeFile(path.join(options.destRoot, fileName), result[fileName]);
}
}
let copied = {};
const copied = {};
const copyFile = (fileName) => {
if (copied[fileName]) {
return;
@ -72,7 +72,7 @@ function extractEditor(options) {
const writeOutputFile = (fileName, contents) => {
writeFile(path.join(options.destRoot, fileName), contents);
};
for (let fileName in result) {
for (const fileName in result) {
if (result.hasOwnProperty(fileName)) {
const fileContents = result[fileName];
const info = ts.preProcessFile(fileContents);
@ -119,7 +119,7 @@ function createESMSourcesAndResources2(options) {
const OUT_FOLDER = path.join(REPO_ROOT, options.outFolder);
const OUT_RESOURCES_FOLDER = path.join(REPO_ROOT, options.outResourcesFolder);
const getDestAbsoluteFilePath = (file) => {
let dest = options.renames[file.replace(/\\/g, '/')] || file;
const dest = options.renames[file.replace(/\\/g, '/')] || file;
if (dest === 'tsconfig.json') {
return path.join(OUT_FOLDER, `tsconfig.json`);
}
@ -193,7 +193,7 @@ function createESMSourcesAndResources2(options) {
if (dir.charAt(dir.length - 1) !== '/' || dir.charAt(dir.length - 1) !== '\\') {
dir += '/';
}
let result = [];
const result = [];
_walkDirRecursive(dir, result, dir.length);
return result;
}
@ -215,7 +215,7 @@ function createESMSourcesAndResources2(options) {
}
writeFile(absoluteFilePath, contents);
function toggleComments(fileContents) {
let lines = fileContents.split(/\r\n|\r|\n/);
const lines = fileContents.split(/\r\n|\r|\n/);
let mode = 0;
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
@ -278,14 +278,14 @@ function transportCSS(module, enqueue, write) {
let DATA = ';base64,' + fileContents.toString('base64');
if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
let newText = fileContents.toString()
const newText = fileContents.toString()
.replace(/"/g, '\'')
.replace(/</g, '%3C')
.replace(/>/g, '%3E')
.replace(/&/g, '%26')
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
let encodedData = ',' + newText;
const encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
}

View file

@ -10,7 +10,7 @@ import * as tss from './treeshaking';
const REPO_ROOT = path.join(__dirname, '../../');
const SRC_DIR = path.join(REPO_ROOT, 'src');
let dirCache: { [dir: string]: boolean } = {};
const dirCache: { [dir: string]: boolean } = {};
function writeFile(filePath: string, contents: Buffer | string): void {
function ensureDirs(dirPath: string): void {
@ -63,13 +63,13 @@ export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: str
});
}
let result = tss.shake(options);
for (let fileName in result) {
const result = tss.shake(options);
for (const fileName in result) {
if (result.hasOwnProperty(fileName)) {
writeFile(path.join(options.destRoot, fileName), result[fileName]);
}
}
let copied: { [fileName: string]: boolean } = {};
const copied: { [fileName: string]: boolean } = {};
const copyFile = (fileName: string) => {
if (copied[fileName]) {
return;
@ -82,7 +82,7 @@ export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: str
const writeOutputFile = (fileName: string, contents: string | Buffer) => {
writeFile(path.join(options.destRoot, fileName), contents);
};
for (let fileName in result) {
for (const fileName in result) {
if (result.hasOwnProperty(fileName)) {
const fileContents = result[fileName];
const info = ts.preProcessFile(fileContents);
@ -142,7 +142,7 @@ export function createESMSourcesAndResources2(options: IOptions2): void {
const OUT_RESOURCES_FOLDER = path.join(REPO_ROOT, options.outResourcesFolder);
const getDestAbsoluteFilePath = (file: string): string => {
let dest = options.renames[file.replace(/\\/g, '/')] || file;
const dest = options.renames[file.replace(/\\/g, '/')] || file;
if (dest === 'tsconfig.json') {
return path.join(OUT_FOLDER, `tsconfig.json`);
}
@ -229,7 +229,7 @@ export function createESMSourcesAndResources2(options: IOptions2): void {
if (dir.charAt(dir.length - 1) !== '/' || dir.charAt(dir.length - 1) !== '\\') {
dir += '/';
}
let result: string[] = [];
const result: string[] = [];
_walkDirRecursive(dir, result, dir.length);
return result;
}
@ -253,7 +253,7 @@ export function createESMSourcesAndResources2(options: IOptions2): void {
writeFile(absoluteFilePath, contents);
function toggleComments(fileContents: string): string {
let lines = fileContents.split(/\r\n|\r|\n/);
const lines = fileContents.split(/\r\n|\r|\n/);
let mode = 0;
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
@ -325,14 +325,14 @@ function transportCSS(module: string, enqueue: (module: string) => void, write:
if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
let newText = fileContents.toString()
const newText = fileContents.toString()
.replace(/"/g, '\'')
.replace(/</g, '%3C')
.replace(/>/g, '%3E')
.replace(/&/g, '%26')
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
let encodedData = ',' + newText;
const encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
}

View file

@ -32,7 +32,7 @@ function printDiagnostics(options, diagnostics) {
result += `${path.join(options.sourcesRoot, diag.file.fileName)}`;
}
if (diag.file && diag.start) {
let location = diag.file.getLineAndCharacterOfPosition(diag.start);
const location = diag.file.getLineAndCharacterOfPosition(diag.start);
result += `:${location.line + 1}:${location.character}`;
}
result += ` - ` + JSON.stringify(diag.messageText);
@ -150,7 +150,7 @@ function processLibFiles(ts, options) {
result[key] = sourceText;
// precess dependencies and "recurse"
const info = ts.preProcessFile(sourceText);
for (let ref of info.libReferenceDirectives) {
for (const ref of info.libReferenceDirectives) {
stack.push(ref.fileName);
}
}
@ -503,7 +503,7 @@ function markNodes(ts, languageService, options) {
}
// queue the heritage clauses
if (declaration.heritageClauses) {
for (let heritageClause of declaration.heritageClauses) {
for (const heritageClause of declaration.heritageClauses) {
enqueue_black(heritageClause);
}
}
@ -551,7 +551,7 @@ function generateResult(ts, languageService, shakeLevel) {
if (!program) {
throw new Error('Could not get program from language service');
}
let result = {};
const result = {};
const writeFile = (filePath, contents) => {
result[filePath] = contents;
};
@ -567,7 +567,7 @@ function generateResult(ts, languageService, shakeLevel) {
}
return;
}
let text = sourceFile.text;
const text = sourceFile.text;
let result = '';
function keep(node) {
result += text.substring(node.pos, node.end);
@ -597,7 +597,7 @@ function generateResult(ts, languageService, shakeLevel) {
}
}
else {
let survivingImports = [];
const survivingImports = [];
for (const importNode of node.importClause.namedBindings.elements) {
if (getColor(importNode) === 2 /* NodeColor.Black */) {
survivingImports.push(importNode.getFullText(sourceFile));
@ -626,7 +626,7 @@ function generateResult(ts, languageService, shakeLevel) {
}
if (ts.isExportDeclaration(node)) {
if (node.exportClause && node.moduleSpecifier && ts.isNamedExports(node.exportClause)) {
let survivingExports = [];
const survivingExports = [];
for (const exportSpecifier of node.exportClause.elements) {
if (getColor(exportSpecifier) === 2 /* NodeColor.Black */) {
survivingExports.push(exportSpecifier.getFullText(sourceFile));
@ -647,8 +647,8 @@ function generateResult(ts, languageService, shakeLevel) {
// keep method
continue;
}
let pos = member.pos - node.pos;
let end = member.end - node.pos;
const pos = member.pos - node.pos;
const end = member.end - node.pos;
toWrite = toWrite.substring(0, pos) + toWrite.substring(end);
}
return write(toWrite);

View file

@ -73,7 +73,7 @@ function printDiagnostics(options: ITreeShakingOptions, diagnostics: ReadonlyArr
result += `${path.join(options.sourcesRoot, diag.file.fileName)}`;
}
if (diag.file && diag.start) {
let location = diag.file.getLineAndCharacterOfPosition(diag.start);
const location = diag.file.getLineAndCharacterOfPosition(diag.start);
result += `:${location.line + 1}:${location.character}`;
}
result += ` - ` + JSON.stringify(diag.messageText);
@ -216,7 +216,7 @@ function processLibFiles(ts: typeof import('typescript'), options: ITreeShakingO
// precess dependencies and "recurse"
const info = ts.preProcessFile(sourceText);
for (let ref of info.libReferenceDirectives) {
for (const ref of info.libReferenceDirectives) {
stack.push(ref.fileName);
}
}
@ -629,7 +629,7 @@ function markNodes(ts: typeof import('typescript'), languageService: ts.Language
// queue the heritage clauses
if (declaration.heritageClauses) {
for (let heritageClause of declaration.heritageClauses) {
for (const heritageClause of declaration.heritageClauses) {
enqueue_black(heritageClause);
}
}
@ -682,7 +682,7 @@ function generateResult(ts: typeof import('typescript'), languageService: ts.Lan
throw new Error('Could not get program from language service');
}
let result: ITreeShakingResult = {};
const result: ITreeShakingResult = {};
const writeFile = (filePath: string, contents: string): void => {
result[filePath] = contents;
};
@ -700,7 +700,7 @@ function generateResult(ts: typeof import('typescript'), languageService: ts.Lan
return;
}
let text = sourceFile.text;
const text = sourceFile.text;
let result = '';
function keep(node: ts.Node): void {
@ -734,7 +734,7 @@ function generateResult(ts: typeof import('typescript'), languageService: ts.Lan
return keep(node);
}
} else {
let survivingImports: string[] = [];
const survivingImports: string[] = [];
for (const importNode of node.importClause.namedBindings.elements) {
if (getColor(importNode) === NodeColor.Black) {
survivingImports.push(importNode.getFullText(sourceFile));
@ -762,7 +762,7 @@ function generateResult(ts: typeof import('typescript'), languageService: ts.Lan
if (ts.isExportDeclaration(node)) {
if (node.exportClause && node.moduleSpecifier && ts.isNamedExports(node.exportClause)) {
let survivingExports: string[] = [];
const survivingExports: string[] = [];
for (const exportSpecifier of node.exportClause.elements) {
if (getColor(exportSpecifier) === NodeColor.Black) {
survivingExports.push(exportSpecifier.getFullText(sourceFile));
@ -785,8 +785,8 @@ function generateResult(ts: typeof import('typescript'), languageService: ts.Lan
continue;
}
let pos = member.pos - node.pos;
let end = member.end - node.pos;
const pos = member.pos - node.pos;
const end = member.end - node.pos;
toWrite = toWrite.substring(0, pos) + toWrite.substring(end);
}
return write(toWrite);

View file

@ -9,7 +9,6 @@ const fs_1 = require("fs");
const path = require("path");
const crypto = require("crypto");
const utils = require("./utils");
const log = require("fancy-log");
const colors = require("ansi-colors");
const ts = require("typescript");
const Vinyl = require("vinyl");
@ -23,12 +22,15 @@ function normalize(path) {
return path.replace(/\\/g, '/');
}
function createTypeScriptBuilder(config, projectFile, cmd) {
function _log(topic, message) {
if (config.verbose) {
log(colors.cyan(topic), message);
}
}
let host = new LanguageServiceHost(cmd, projectFile, _log), service = ts.createLanguageService(host, ts.createDocumentRegistry()), lastBuildVersion = Object.create(null), lastDtsHash = Object.create(null), userWantsDeclarations = cmd.options.declaration, oldErrors = Object.create(null), headUsed = process.memoryUsage().heapUsed, emitSourceMapsInStream = true;
const _log = config.logFn;
const host = new LanguageServiceHost(cmd, projectFile, _log);
const service = ts.createLanguageService(host, ts.createDocumentRegistry());
const lastBuildVersion = Object.create(null);
const lastDtsHash = Object.create(null);
const userWantsDeclarations = cmd.options.declaration;
let oldErrors = Object.create(null);
let headUsed = process.memoryUsage().heapUsed;
let emitSourceMapsInStream = true;
// always emit declaraction files
host.getCompilationSettings().declaration = true;
function file(file) {
@ -85,8 +87,8 @@ function createTypeScriptBuilder(config, projectFile, cmd) {
process.nextTick(function () {
if (/\.d\.ts$/.test(fileName)) {
// if it's already a d.ts file just emit it signature
let snapshot = host.getScriptSnapshot(fileName);
let signature = crypto.createHash('md5')
const snapshot = host.getScriptSnapshot(fileName);
const signature = crypto.createHash('md5')
.update(snapshot.getText(0, snapshot.getLength()))
.digest('base64');
return resolve({
@ -95,10 +97,10 @@ function createTypeScriptBuilder(config, projectFile, cmd) {
files: []
});
}
let output = service.getEmitOutput(fileName);
let files = [];
const output = service.getEmitOutput(fileName);
const files = [];
let signature;
for (let file of output.outputFiles) {
for (const file of output.outputFiles) {
if (!emitSourceMapsInStream && /\.js\.map$/.test(file.name)) {
continue;
}
@ -111,19 +113,19 @@ function createTypeScriptBuilder(config, projectFile, cmd) {
continue;
}
}
let vinyl = new Vinyl({
const vinyl = new Vinyl({
path: file.name,
contents: Buffer.from(file.text),
base: !config._emitWithoutBasePath && baseFor(host.getScriptSnapshot(fileName)) || undefined
});
if (!emitSourceMapsInStream && /\.js$/.test(file.name)) {
let sourcemapFile = output.outputFiles.filter(f => /\.js\.map$/.test(f.name))[0];
const sourcemapFile = output.outputFiles.filter(f => /\.js\.map$/.test(f.name))[0];
if (sourcemapFile) {
let extname = path.extname(vinyl.relative);
let basename = path.basename(vinyl.relative, extname);
let dirname = path.dirname(vinyl.relative);
let tsname = (dirname === '.' ? '' : dirname + '/') + basename + '.ts';
let sourceMap = JSON.parse(sourcemapFile.text);
const extname = path.extname(vinyl.relative);
const basename = path.basename(vinyl.relative, extname);
const dirname = path.dirname(vinyl.relative);
const tsname = (dirname === '.' ? '' : dirname + '/') + basename + '.ts';
const sourceMap = JSON.parse(sourcemapFile.text);
sourceMap.sources[0] = tsname.replace(/\\/g, '/');
vinyl.sourceMap = sourceMap;
}
@ -138,15 +140,15 @@ function createTypeScriptBuilder(config, projectFile, cmd) {
});
});
}
let newErrors = Object.create(null);
let t1 = Date.now();
let toBeEmitted = [];
let toBeCheckedSyntactically = [];
let toBeCheckedSemantically = [];
let filesWithChangedSignature = [];
let dependentFiles = [];
let newLastBuildVersion = new Map();
for (let fileName of host.getScriptFileNames()) {
const newErrors = Object.create(null);
const t1 = Date.now();
const toBeEmitted = [];
const toBeCheckedSyntactically = [];
const toBeCheckedSemantically = [];
const filesWithChangedSignature = [];
const dependentFiles = [];
const newLastBuildVersion = new Map();
for (const fileName of host.getScriptFileNames()) {
if (lastBuildVersion[fileName] !== host.getScriptVersion(fileName)) {
toBeEmitted.push(fileName);
toBeCheckedSyntactically.push(fileName);
@ -154,8 +156,8 @@ function createTypeScriptBuilder(config, projectFile, cmd) {
}
}
return new Promise(resolve => {
let semanticCheckInfo = new Map();
let seenAsDependentFile = new Set();
const semanticCheckInfo = new Map();
const seenAsDependentFile = new Set();
function workOnNext() {
let promise;
// let fileName: string;
@ -168,9 +170,9 @@ function createTypeScriptBuilder(config, projectFile, cmd) {
}
// (1st) emit code
else if (toBeEmitted.length) {
let fileName = toBeEmitted.pop();
const fileName = toBeEmitted.pop();
promise = emitSoon(fileName).then(value => {
for (let file of value.files) {
for (const file of value.files) {
_log('[emit code]', file.path);
out(file);
}
@ -189,7 +191,7 @@ function createTypeScriptBuilder(config, projectFile, cmd) {
}
// (2nd) check syntax
else if (toBeCheckedSyntactically.length) {
let fileName = toBeCheckedSyntactically.pop();
const fileName = toBeCheckedSyntactically.pop();
_log('[check syntax]', fileName);
promise = checkSyntaxSoon(fileName).then(diagnostics => {
delete oldErrors[fileName];
@ -224,7 +226,7 @@ function createTypeScriptBuilder(config, projectFile, cmd) {
// (4th) check dependents
else if (filesWithChangedSignature.length) {
while (filesWithChangedSignature.length) {
let fileName = filesWithChangedSignature.pop();
const fileName = filesWithChangedSignature.pop();
if (!isExternalModule(service.getProgram().getSourceFile(fileName))) {
_log('[check semantics*]', fileName + ' is an internal module and it has changed shape -> check whatever hasn\'t been checked yet');
toBeCheckedSemantically.push(...host.getScriptFileNames());
@ -243,7 +245,7 @@ function createTypeScriptBuilder(config, projectFile, cmd) {
}
if (fileName) {
seenAsDependentFile.add(fileName);
let value = semanticCheckInfo.get(fileName);
const value = semanticCheckInfo.get(fileName);
if (value === 0) {
// already validated successfully -> look at dependents next
host.collectDependents(fileName, dependentFiles);
@ -283,12 +285,10 @@ function createTypeScriptBuilder(config, projectFile, cmd) {
});
oldErrors = newErrors;
// print stats
if (config.verbose) {
const headNow = process.memoryUsage().heapUsed;
const MB = 1024 * 1024;
log('[tsb]', 'time:', colors.yellow((Date.now() - t1) + 'ms'), 'mem:', colors.cyan(Math.ceil(headNow / MB) + 'MB'), colors.bgCyan('delta: ' + Math.ceil((headNow - headUsed) / MB)));
headUsed = headNow;
}
const headNow = process.memoryUsage().heapUsed;
const MB = 1024 * 1024;
_log('[tsb]', `time: ${colors.yellow((Date.now() - t1) + 'ms')} + \nmem: ${colors.cyan(Math.ceil(headNow / MB) + 'MB')} ${colors.bgCyan('delta: ' + Math.ceil((headNow - headUsed) / MB))}`);
headUsed = headNow;
});
}
return {
@ -400,7 +400,7 @@ class LanguageServiceHost {
}
if (!old || old.getVersion() !== snapshot.getVersion()) {
this._dependenciesRecomputeList.push(filename);
let node = this._dependencies.lookup(filename);
const node = this._dependencies.lookup(filename);
if (node) {
node.outgoing = Object.create(null);
}
@ -479,7 +479,7 @@ class LanguageServiceHost {
}
}
if (!found) {
for (let key in this._fileNameToDeclaredModule) {
for (const key in this._fileNameToDeclaredModule) {
if (this._fileNameToDeclaredModule[key] && ~this._fileNameToDeclaredModule[key].indexOf(ref.fileName)) {
this._dependencies.inertEdge(filename, key);
}

View file

@ -7,13 +7,12 @@ import { statSync, readFileSync } from 'fs';
import * as path from 'path';
import * as crypto from 'crypto';
import * as utils from './utils';
import * as log from 'fancy-log';
import * as colors from 'ansi-colors';
import * as ts from 'typescript';
import * as Vinyl from 'vinyl';
export interface IConfiguration {
verbose: boolean;
logFn: (topic: string, message: string) => void;
_emitWithoutBasePath?: boolean;
}
@ -39,25 +38,20 @@ function normalize(path: string): string {
export function createTypeScriptBuilder(config: IConfiguration, projectFile: string, cmd: ts.ParsedCommandLine): ITypeScriptBuilder {
function _log(topic: string, message: string): void {
if (config.verbose) {
log(colors.cyan(topic), message);
}
}
const _log = config.logFn;
let host = new LanguageServiceHost(cmd, projectFile, _log),
service = ts.createLanguageService(host, ts.createDocumentRegistry()),
lastBuildVersion: { [path: string]: string } = Object.create(null),
lastDtsHash: { [path: string]: string } = Object.create(null),
userWantsDeclarations = cmd.options.declaration,
oldErrors: { [path: string]: ts.Diagnostic[] } = Object.create(null),
headUsed = process.memoryUsage().heapUsed,
emitSourceMapsInStream = true;
const host = new LanguageServiceHost(cmd, projectFile, _log);
const service = ts.createLanguageService(host, ts.createDocumentRegistry());
const lastBuildVersion: { [path: string]: string } = Object.create(null);
const lastDtsHash: { [path: string]: string } = Object.create(null);
const userWantsDeclarations = cmd.options.declaration;
let oldErrors: { [path: string]: ts.Diagnostic[] } = Object.create(null);
let headUsed = process.memoryUsage().heapUsed;
let emitSourceMapsInStream = true;
// always emit declaraction files
host.getCompilationSettings().declaration = true;
function file(file: Vinyl): void {
// support gulp-sourcemaps
if ((<any>file).sourceMap) {
@ -117,8 +111,8 @@ export function createTypeScriptBuilder(config: IConfiguration, projectFile: str
if (/\.d\.ts$/.test(fileName)) {
// if it's already a d.ts file just emit it signature
let snapshot = host.getScriptSnapshot(fileName);
let signature = crypto.createHash('md5')
const snapshot = host.getScriptSnapshot(fileName);
const signature = crypto.createHash('md5')
.update(snapshot.getText(0, snapshot.getLength()))
.digest('base64');
@ -129,11 +123,11 @@ export function createTypeScriptBuilder(config: IConfiguration, projectFile: str
});
}
let output = service.getEmitOutput(fileName);
let files: Vinyl[] = [];
const output = service.getEmitOutput(fileName);
const files: Vinyl[] = [];
let signature: string | undefined;
for (let file of output.outputFiles) {
for (const file of output.outputFiles) {
if (!emitSourceMapsInStream && /\.js\.map$/.test(file.name)) {
continue;
}
@ -149,22 +143,22 @@ export function createTypeScriptBuilder(config: IConfiguration, projectFile: str
}
}
let vinyl = new Vinyl({
const vinyl = new Vinyl({
path: file.name,
contents: Buffer.from(file.text),
base: !config._emitWithoutBasePath && baseFor(host.getScriptSnapshot(fileName)) || undefined
});
if (!emitSourceMapsInStream && /\.js$/.test(file.name)) {
let sourcemapFile = output.outputFiles.filter(f => /\.js\.map$/.test(f.name))[0];
const sourcemapFile = output.outputFiles.filter(f => /\.js\.map$/.test(f.name))[0];
if (sourcemapFile) {
let extname = path.extname(vinyl.relative);
let basename = path.basename(vinyl.relative, extname);
let dirname = path.dirname(vinyl.relative);
let tsname = (dirname === '.' ? '' : dirname + '/') + basename + '.ts';
const extname = path.extname(vinyl.relative);
const basename = path.basename(vinyl.relative, extname);
const dirname = path.dirname(vinyl.relative);
const tsname = (dirname === '.' ? '' : dirname + '/') + basename + '.ts';
let sourceMap = JSON.parse(sourcemapFile.text);
const sourceMap = JSON.parse(sourcemapFile.text);
sourceMap.sources[0] = tsname.replace(/\\/g, '/');
(<any>vinyl).sourceMap = sourceMap;
}
@ -182,17 +176,17 @@ export function createTypeScriptBuilder(config: IConfiguration, projectFile: str
});
}
let newErrors: { [path: string]: ts.Diagnostic[] } = Object.create(null);
let t1 = Date.now();
const newErrors: { [path: string]: ts.Diagnostic[] } = Object.create(null);
const t1 = Date.now();
let toBeEmitted: string[] = [];
let toBeCheckedSyntactically: string[] = [];
let toBeCheckedSemantically: string[] = [];
let filesWithChangedSignature: string[] = [];
let dependentFiles: string[] = [];
let newLastBuildVersion = new Map<string, string>();
const toBeEmitted: string[] = [];
const toBeCheckedSyntactically: string[] = [];
const toBeCheckedSemantically: string[] = [];
const filesWithChangedSignature: string[] = [];
const dependentFiles: string[] = [];
const newLastBuildVersion = new Map<string, string>();
for (let fileName of host.getScriptFileNames()) {
for (const fileName of host.getScriptFileNames()) {
if (lastBuildVersion[fileName] !== host.getScriptVersion(fileName)) {
toBeEmitted.push(fileName);
@ -203,8 +197,8 @@ export function createTypeScriptBuilder(config: IConfiguration, projectFile: str
return new Promise<void>(resolve => {
let semanticCheckInfo = new Map<string, number>();
let seenAsDependentFile = new Set<string>();
const semanticCheckInfo = new Map<string, number>();
const seenAsDependentFile = new Set<string>();
function workOnNext() {
@ -221,10 +215,10 @@ export function createTypeScriptBuilder(config: IConfiguration, projectFile: str
// (1st) emit code
else if (toBeEmitted.length) {
let fileName = toBeEmitted.pop()!;
const fileName = toBeEmitted.pop()!;
promise = emitSoon(fileName).then(value => {
for (let file of value.files) {
for (const file of value.files) {
_log('[emit code]', file.path);
out(file);
}
@ -246,7 +240,7 @@ export function createTypeScriptBuilder(config: IConfiguration, projectFile: str
// (2nd) check syntax
else if (toBeCheckedSyntactically.length) {
let fileName = toBeCheckedSyntactically.pop()!;
const fileName = toBeCheckedSyntactically.pop()!;
_log('[check syntax]', fileName);
promise = checkSyntaxSoon(fileName).then(diagnostics => {
delete oldErrors[fileName];
@ -286,7 +280,7 @@ export function createTypeScriptBuilder(config: IConfiguration, projectFile: str
// (4th) check dependents
else if (filesWithChangedSignature.length) {
while (filesWithChangedSignature.length) {
let fileName = filesWithChangedSignature.pop()!;
const fileName = filesWithChangedSignature.pop()!;
if (!isExternalModule(service.getProgram()!.getSourceFile(fileName)!)) {
_log('[check semantics*]', fileName + ' is an internal module and it has changed shape -> check whatever hasn\'t been checked yet');
@ -308,7 +302,7 @@ export function createTypeScriptBuilder(config: IConfiguration, projectFile: str
}
if (fileName) {
seenAsDependentFile.add(fileName);
let value = semanticCheckInfo.get(fileName);
const value = semanticCheckInfo.get(fileName);
if (value === 0) {
// already validated successfully -> look at dependents next
host.collectDependents(fileName, dependentFiles);
@ -355,15 +349,13 @@ export function createTypeScriptBuilder(config: IConfiguration, projectFile: str
oldErrors = newErrors;
// print stats
if (config.verbose) {
const headNow = process.memoryUsage().heapUsed;
const MB = 1024 * 1024;
log('[tsb]',
'time:', colors.yellow((Date.now() - t1) + 'ms'),
'mem:', colors.cyan(Math.ceil(headNow / MB) + 'MB'), colors.bgCyan('delta: ' + Math.ceil((headNow - headUsed) / MB))
);
headUsed = headNow;
}
const headNow = process.memoryUsage().heapUsed;
const MB = 1024 * 1024;
_log(
'[tsb]',
`time: ${colors.yellow((Date.now() - t1) + 'ms')} + \nmem: ${colors.cyan(Math.ceil(headNow / MB) + 'MB')} ${colors.bgCyan('delta: ' + Math.ceil((headNow - headUsed) / MB))}`
);
headUsed = headNow;
});
}
@ -507,7 +499,7 @@ class LanguageServiceHost implements ts.LanguageServiceHost {
}
if (!old || old.getVersion() !== snapshot.getVersion()) {
this._dependenciesRecomputeList.push(filename);
let node = this._dependencies.lookup(filename);
const node = this._dependencies.lookup(filename);
if (node) {
node.outgoing = Object.create(null);
}
@ -605,7 +597,7 @@ class LanguageServiceHost implements ts.LanguageServiceHost {
}
if (!found) {
for (let key in this._fileNameToDeclaredModule) {
for (const key in this._fileNameToDeclaredModule) {
if (this._fileNameToDeclaredModule[key] && ~this._fileNameToDeclaredModule[key].indexOf(ref.fileName)) {
this._dependencies.inertEdge(filename, key);
}

View file

@ -13,6 +13,8 @@ const stream_1 = require("stream");
const path_1 = require("path");
const utils_1 = require("./utils");
const fs_1 = require("fs");
const log = require("fancy-log");
const colors = require("ansi-colors");
class EmptyDuplex extends stream_1.Duplex {
_write(_chunk, _encoding, callback) { callback(); }
_read() { this.push(null); }
@ -23,7 +25,7 @@ function createNullCompiler() {
return result;
}
const _defaultOnError = (err) => console.log(JSON.stringify(err, null, 4));
function create(projectPath, existingOptions, verbose = false, onError = _defaultOnError) {
function create(projectPath, existingOptions, config, onError = _defaultOnError) {
function printDiagnostic(diag) {
if (!diag.file || !diag.start) {
onError(ts.flattenDiagnosticMessageText(diag.messageText, '\n'));
@ -43,8 +45,14 @@ function create(projectPath, existingOptions, verbose = false, onError = _defaul
cmdLine.errors.forEach(printDiagnostic);
return createNullCompiler();
}
const _builder = builder.createTypeScriptBuilder({ verbose }, projectPath, cmdLine);
function createStream(token) {
function logFn(topic, message) {
if (config.verbose) {
log(colors.cyan(topic), message);
}
}
// FULL COMPILE stream doing transpile, syntax and semantic diagnostics
function createCompileStream(token) {
const _builder = builder.createTypeScriptBuilder({ logFn }, projectPath, cmdLine);
return through(function (file) {
// give the file to the compiler
if (file.isStream()) {
@ -57,10 +65,41 @@ function create(projectPath, existingOptions, verbose = false, onError = _defaul
_builder.build(file => this.queue(file), printDiagnostic, token).catch(e => console.error(e)).then(() => this.queue(null));
});
}
const result = (token) => createStream(token);
// TRANSPILE ONLY stream doing just TS to JS conversion
function createTranspileStream() {
return through(function (file) {
// give the file to the compiler
if (file.isStream()) {
this.emit('error', 'no support for streams');
return;
}
if (!file.contents) {
return;
}
const out = ts.transpileModule(String(file.contents), {
compilerOptions: { ...cmdLine.options, declaration: false, sourceMap: false }
});
if (out.diagnostics) {
out.diagnostics.forEach(printDiagnostic);
}
const outFile = new Vinyl({
path: file.path.replace(/\.ts$/, '.js'),
cwd: file.cwd,
base: file.base,
contents: Buffer.from(out.outputText),
});
this.push(outFile);
logFn('Transpiled', file.path);
});
}
const result = (token) => {
return config.transplileOnly
? createTranspileStream()
: createCompileStream(token);
};
result.src = (opts) => {
let _pos = 0;
let _fileNames = cmdLine.fileNames.slice(0);
const _fileNames = cmdLine.fileNames.slice(0);
return new class extends stream_1.Readable {
constructor() {
super({ objectMode: true });

View file

@ -11,6 +11,8 @@ import { Readable, Writable, Duplex } from 'stream';
import { dirname } from 'path';
import { strings } from './utils';
import { readFileSync, statSync } from 'fs';
import * as log from 'fancy-log';
import colors = require('ansi-colors');
export interface IncrementalCompiler {
(token?: any): Readable & Writable;
@ -33,7 +35,7 @@ const _defaultOnError = (err: string) => console.log(JSON.stringify(err, null, 4
export function create(
projectPath: string,
existingOptions: Partial<ts.CompilerOptions>,
verbose: boolean = false,
config: { verbose?: boolean; transplileOnly?: boolean },
onError: (message: string) => void = _defaultOnError
): IncrementalCompiler {
@ -64,9 +66,16 @@ export function create(
return createNullCompiler();
}
const _builder = builder.createTypeScriptBuilder({ verbose }, projectPath, cmdLine);
function logFn(topic: string, message: string): void {
if (config.verbose) {
log(colors.cyan(topic), message);
}
}
function createStream(token?: builder.CancellationToken): Readable & Writable {
// FULL COMPILE stream doing transpile, syntax and semantic diagnostics
function createCompileStream(token?: builder.CancellationToken): Readable & Writable {
const _builder = builder.createTypeScriptBuilder({ logFn }, projectPath, cmdLine);
return through(function (this: through.ThroughStream, file: Vinyl) {
// give the file to the compiler
@ -86,10 +95,51 @@ export function create(
});
}
const result = (token: builder.CancellationToken) => createStream(token);
// TRANSPILE ONLY stream doing just TS to JS conversion
function createTranspileStream(): Readable & Writable {
return through(function (this: through.ThroughStream, file: Vinyl) {
// give the file to the compiler
if (file.isStream()) {
this.emit('error', 'no support for streams');
return;
}
if (!file.contents) {
return;
}
const out = ts.transpileModule(String(file.contents), {
compilerOptions: { ...cmdLine.options, declaration: false, sourceMap: false }
});
if (out.diagnostics) {
out.diagnostics.forEach(printDiagnostic);
}
const outFile = new Vinyl({
path: file.path.replace(/\.ts$/, '.js'),
cwd: file.cwd,
base: file.base,
contents: Buffer.from(out.outputText),
});
this.push(outFile);
logFn('Transpiled', file.path);
});
}
const result = (token: builder.CancellationToken) => {
return config.transplileOnly
? createTranspileStream()
: createCompileStream(token);
};
result.src = (opts?: { cwd?: string; base?: string }) => {
let _pos = 0;
let _fileNames = cmdLine.fileNames.slice(0);
const _fileNames = cmdLine.fileNames.slice(0);
return new class extends Readable {
constructor() {
super({ objectMode: true });

View file

@ -30,7 +30,7 @@ var collections;
}
collections.lookupOrInsert = lookupOrInsert;
function forEach(collection, callback) {
for (let key in collection) {
for (const key in collection) {
if (hasOwnProperty.call(collection, key)) {
callback({
key: key,

View file

@ -28,7 +28,7 @@ export module collections {
}
export function forEach<T>(collection: { [keys: string]: T }, callback: (entry: { key: string; value: T }) => void): void {
for (let key in collection) {
for (const key in collection) {
if (hasOwnProperty.call(collection, key)) {
callback({
key: key,

View file

@ -240,7 +240,7 @@ function _rreaddir(dirPath, prepend, result) {
}
}
function rreddir(dirPath) {
let result = [];
const result = [];
_rreaddir(dirPath, '', result);
return result;
}
@ -344,7 +344,7 @@ function createExternalLoaderConfig(webEndpoint, commit, quality) {
return undefined;
}
webEndpoint = webEndpoint + `/${quality}/${commit}`;
let nodePaths = acquireWebNodePaths();
const nodePaths = acquireWebNodePaths();
Object.keys(nodePaths).map(function (key, _) {
nodePaths[key] = `${webEndpoint}/node_modules/${key}/${nodePaths[key]}`;
});

View file

@ -306,7 +306,7 @@ function _rreaddir(dirPath: string, prepend: string, result: string[]): void {
}
export function rreddir(dirPath: string): string[] {
let result: string[] = [];
const result: string[] = [];
_rreaddir(dirPath, '', result);
return result;
}
@ -423,7 +423,7 @@ export function createExternalLoaderConfig(webEndpoint?: string, commit?: string
return undefined;
}
webEndpoint = webEndpoint + `/${quality}/${commit}`;
let nodePaths = acquireWebNodePaths();
const nodePaths = acquireWebNodePaths();
Object.keys(nodePaths).map(function (key, _) {
nodePaths[key] = `${webEndpoint}/node_modules/${key}/${nodePaths[key]}`;
});

View file

@ -43,14 +43,14 @@ export async function startClient(context: ExtensionContext, newLanguageClient:
const customDataSource = getCustomDataSource(context.subscriptions);
let documentSelector = ['css', 'scss', 'less'];
const documentSelector = ['css', 'scss', 'less'];
const formatterRegistrations: FormatterRegistration[] = documentSelector.map(languageId => ({
languageId, settingId: `${languageId}.format.enable`, provider: undefined
}));
// Options to control the language client
let clientOptions: LanguageClientOptions = {
const clientOptions: LanguageClientOptions = {
documentSelector,
synchronize: {
configurationSection: ['css', 'scss', 'less']
@ -98,7 +98,7 @@ export async function startClient(context: ExtensionContext, newLanguageClient:
};
// Create the language client and start the client.
let client = newLanguageClient('css', localize('cssserver.name', 'CSS Language Server'), clientOptions);
const client = newLanguageClient('css', localize('cssserver.name', 'CSS Language Server'), clientOptions);
client.registerProposedFeatures();
await client.start();
@ -125,17 +125,17 @@ export async function startClient(context: ExtensionContext, newLanguageClient:
return languages.registerCompletionItemProvider(documentSelector, {
provideCompletionItems(doc: TextDocument, pos: Position) {
let lineUntilPos = doc.getText(new Range(new Position(pos.line, 0), pos));
let match = lineUntilPos.match(regionCompletionRegExpr);
const lineUntilPos = doc.getText(new Range(new Position(pos.line, 0), pos));
const match = lineUntilPos.match(regionCompletionRegExpr);
if (match) {
let range = new Range(new Position(pos.line, match[1].length), pos);
let beginProposal = new CompletionItem('#region', CompletionItemKind.Snippet);
const range = new Range(new Position(pos.line, match[1].length), pos);
const beginProposal = new CompletionItem('#region', CompletionItemKind.Snippet);
beginProposal.range = range; TextEdit.replace(range, '/* #region */');
beginProposal.insertText = new SnippetString('/* #region $1*/');
beginProposal.documentation = localize('folding.start', 'Folding Region Start');
beginProposal.filterText = match[2];
beginProposal.sortText = 'za';
let endProposal = new CompletionItem('#endregion', CompletionItemKind.Snippet);
const endProposal = new CompletionItem('#endregion', CompletionItemKind.Snippet);
endProposal.range = range;
endProposal.insertText = '/* #endregion */';
endProposal.documentation = localize('folding.end', 'Folding Region End');
@ -151,13 +151,13 @@ export async function startClient(context: ExtensionContext, newLanguageClient:
commands.registerCommand('_css.applyCodeAction', applyCodeAction);
function applyCodeAction(uri: string, documentVersion: number, edits: TextEdit[]) {
let textEditor = window.activeTextEditor;
const textEditor = window.activeTextEditor;
if (textEditor && textEditor.document.uri.toString() === uri) {
if (textEditor.document.version !== documentVersion) {
window.showInformationMessage(`CSS fix is outdated and can't be applied to the document.`);
}
textEditor.edit(mutator => {
for (let edit of edits) {
for (const edit of edits) {
mutator.replace(client.protocol2CodeConverter.asRange(edit.range), edit.newText);
}
}).then(success => {

View file

@ -18,10 +18,10 @@ export function getLanguageModelCache<T>(maxEntries: number, cleanupIntervalTime
let cleanupInterval: NodeJS.Timer | undefined = undefined;
if (cleanupIntervalTimeInSec > 0) {
cleanupInterval = setInterval(() => {
let cutoffTime = Date.now() - cleanupIntervalTimeInSec * 1000;
let uris = Object.keys(languageModels);
for (let uri of uris) {
let languageModelInfo = languageModels[uri];
const cutoffTime = Date.now() - cleanupIntervalTimeInSec * 1000;
const uris = Object.keys(languageModels);
for (const uri of uris) {
const languageModelInfo = languageModels[uri];
if (languageModelInfo.cTime < cutoffTime) {
delete languageModels[uri];
nModels--;
@ -32,14 +32,14 @@ export function getLanguageModelCache<T>(maxEntries: number, cleanupIntervalTime
return {
get(document: TextDocument): T {
let version = document.version;
let languageId = document.languageId;
let languageModelInfo = languageModels[document.uri];
const version = document.version;
const languageId = document.languageId;
const languageModelInfo = languageModels[document.uri];
if (languageModelInfo && languageModelInfo.version === version && languageModelInfo.languageId === languageId) {
languageModelInfo.cTime = Date.now();
return languageModelInfo.languageModel;
}
let languageModel = parse(document);
const languageModel = parse(document);
languageModels[document.uri] = { languageModel, version, languageId, cTime: Date.now() };
if (!languageModelInfo) {
nModels++;
@ -48,8 +48,8 @@ export function getLanguageModelCache<T>(maxEntries: number, cleanupIntervalTime
if (nModels === maxEntries) {
let oldestTime = Number.MAX_VALUE;
let oldestUri = null;
for (let uri in languageModels) {
let languageModelInfo = languageModels[uri];
for (const uri in languageModels) {
const languageModelInfo = languageModels[uri];
if (languageModelInfo.cTime < oldestTime) {
oldestUri = uri;
oldestTime = languageModelInfo.cTime;
@ -64,7 +64,7 @@ export function getLanguageModelCache<T>(maxEntries: number, cleanupIntervalTime
},
onDocumentRemoved(document: TextDocument) {
let uri = document.uri;
const uri = document.uri;
if (languageModels[uri]) {
delete languageModels[uri];
nModels--;

View file

@ -65,7 +65,7 @@ export interface RequestService {
export function getRequestService(handledSchemas: string[], connection: Connection, runtime: RuntimeEnvironment): RequestService {
const builtInHandlers: { [protocol: string]: RequestService | undefined } = {};
for (let protocol of handledSchemas) {
for (const protocol of handledSchemas) {
if (protocol === 'file') {
builtInHandlers[protocol] = runtime.file;
} else if (protocol === 'http' || protocol === 'https') {

View file

@ -19,13 +19,13 @@ export interface ItemDescription {
suite('Completions', () => {
let assertCompletion = function (completions: CompletionList, expected: ItemDescription, document: TextDocument, _offset: number) {
let matches = completions.items.filter(completion => {
const assertCompletion = function (completions: CompletionList, expected: ItemDescription, document: TextDocument, _offset: number) {
const matches = completions.items.filter(completion => {
return completion.label === expected.label;
});
assert.strictEqual(matches.length, 1, `${expected.label} should only existing once: Actual: ${completions.items.map(c => c.label).join(', ')}`);
let match = matches[0];
const match = matches[0];
if (expected.resultText && TextEdit.is(match.textEdit)) {
assert.strictEqual(TextDocument.applyEdits(document, [match.textEdit]), expected.resultText);
}
@ -47,21 +47,21 @@ suite('Completions', () => {
const context = getDocumentContext(testUri, workspaceFolders);
const stylesheet = cssLanguageService.parseStylesheet(document);
let list = await cssLanguageService.doComplete2(document, position, stylesheet, context);
const list = await cssLanguageService.doComplete2(document, position, stylesheet, context);
if (expected.count) {
assert.strictEqual(list.items.length, expected.count);
}
if (expected.items) {
for (let item of expected.items) {
for (const item of expected.items) {
assertCompletion(list, item, document, offset);
}
}
}
test('CSS url() Path completion', async function () {
let testUri = URI.file(path.resolve(__dirname, '../../test/pathCompletionFixtures/about/about.css')).toString();
let folders = [{ name: 'x', uri: URI.file(path.resolve(__dirname, '../../test')).toString() }];
const testUri = URI.file(path.resolve(__dirname, '../../test/pathCompletionFixtures/about/about.css')).toString();
const folders = [{ name: 'x', uri: URI.file(path.resolve(__dirname, '../../test')).toString() }];
await assertCompletions('html { background-image: url("./|")', {
items: [
@ -119,8 +119,8 @@ suite('Completions', () => {
});
test('CSS url() Path Completion - Unquoted url', async function () {
let testUri = URI.file(path.resolve(__dirname, '../../test/pathCompletionFixtures/about/about.css')).toString();
let folders = [{ name: 'x', uri: URI.file(path.resolve(__dirname, '../../test')).toString() }];
const testUri = URI.file(path.resolve(__dirname, '../../test/pathCompletionFixtures/about/about.css')).toString();
const folders = [{ name: 'x', uri: URI.file(path.resolve(__dirname, '../../test')).toString() }];
await assertCompletions('html { background-image: url(./|)', {
items: [
@ -148,8 +148,8 @@ suite('Completions', () => {
});
test('CSS @import Path completion', async function () {
let testUri = URI.file(path.resolve(__dirname, '../../test/pathCompletionFixtures/about/about.css')).toString();
let folders = [{ name: 'x', uri: URI.file(path.resolve(__dirname, '../../test')).toString() }];
const testUri = URI.file(path.resolve(__dirname, '../../test/pathCompletionFixtures/about/about.css')).toString();
const folders = [{ name: 'x', uri: URI.file(path.resolve(__dirname, '../../test')).toString() }];
await assertCompletions(`@import './|'`, {
items: [
@ -171,8 +171,8 @@ suite('Completions', () => {
* For SCSS, `@import 'foo';` can be used for importing partial file `_foo.scss`
*/
test('SCSS @import Path completion', async function () {
let testCSSUri = URI.file(path.resolve(__dirname, '../../test/pathCompletionFixtures/about/about.css')).toString();
let folders = [{ name: 'x', uri: URI.file(path.resolve(__dirname, '../../test')).toString() }];
const testCSSUri = URI.file(path.resolve(__dirname, '../../test/pathCompletionFixtures/about/about.css')).toString();
const folders = [{ name: 'x', uri: URI.file(path.resolve(__dirname, '../../test')).toString() }];
/**
* We are in a CSS file, so no special treatment for SCSS partial files
@ -184,7 +184,7 @@ suite('Completions', () => {
]
}, testCSSUri, folders);
let testSCSSUri = URI.file(path.resolve(__dirname, '../../test/pathCompletionFixtures/scss/main.scss')).toString();
const testSCSSUri = URI.file(path.resolve(__dirname, '../../test/pathCompletionFixtures/scss/main.scss')).toString();
await assertCompletions(`@import './|'`, {
items: [
{ label: '_foo.scss', resultText: `@import './foo'` }
@ -193,8 +193,8 @@ suite('Completions', () => {
});
test('Completion should ignore files/folders starting with dot', async function () {
let testUri = URI.file(path.resolve(__dirname, '../../test/pathCompletionFixtures/about/about.css')).toString();
let folders = [{ name: 'x', uri: URI.file(path.resolve(__dirname, '../../test')).toString() }];
const testUri = URI.file(path.resolve(__dirname, '../../test/pathCompletionFixtures/about/about.css')).toString();
const folders = [{ name: 'x', uri: URI.file(path.resolve(__dirname, '../../test')).toString() }];
await assertCompletions('html { background-image: url("../|")', {
count: 4

View file

@ -21,13 +21,13 @@ export interface ItemDescription {
suite('Links', () => {
const cssLanguageService = getCSSLanguageService({ fileSystemProvider: getNodeFSRequestService() });
let assertLink = function (links: DocumentLink[], expected: ItemDescription, document: TextDocument) {
let matches = links.filter(link => {
const assertLink = function (links: DocumentLink[], expected: ItemDescription, document: TextDocument) {
const matches = links.filter(link => {
return document.offsetAt(link.range.start) === expected.offset;
});
assert.strictEqual(matches.length, 1, `${expected.offset} should only existing once: Actual: ${links.map(l => document.offsetAt(l.range.start)).join(', ')}`);
let match = matches[0];
const match = matches[0];
assert.strictEqual(document.getText(match.range), expected.value);
assert.strictEqual(match.target, expected.target);
};
@ -45,11 +45,11 @@ suite('Links', () => {
const context = getDocumentContext(testUri, workspaceFolders);
const stylesheet = cssLanguageService.parseStylesheet(document);
let links = await cssLanguageService.findDocumentLinks2(document, stylesheet, context)!;
const links = await cssLanguageService.findDocumentLinks2(document, stylesheet, context)!;
assert.strictEqual(links.length, expected.length);
for (let item of expected) {
for (const item of expected) {
assertLink(links, item, document);
}
}
@ -60,8 +60,8 @@ suite('Links', () => {
test('url links', async function () {
let testUri = getTestResource('about.css');
let folders = [{ name: 'x', uri: getTestResource('') }];
const testUri = getTestResource('about.css');
const folders = [{ name: 'x', uri: getTestResource('') }];
await assertLinks('html { background-image: url("hello.html|")',
[{ offset: 29, value: '"hello.html"', target: getTestResource('hello.html') }], testUri, folders
@ -70,8 +70,8 @@ suite('Links', () => {
test('node module resolving', async function () {
let testUri = getTestResource('about.css');
let folders = [{ name: 'x', uri: getTestResource('') }];
const testUri = getTestResource('about.css');
const folders = [{ name: 'x', uri: getTestResource('') }];
await assertLinks('html { background-image: url("~foo/hello.html|")',
[{ offset: 29, value: '"~foo/hello.html"', target: getTestResource('node_modules/foo/hello.html') }], testUri, folders
@ -80,8 +80,8 @@ suite('Links', () => {
test('node module subfolder resolving', async function () {
let testUri = getTestResource('subdir/about.css');
let folders = [{ name: 'x', uri: getTestResource('') }];
const testUri = getTestResource('subdir/about.css');
const folders = [{ name: 'x', uri: getTestResource('') }];
await assertLinks('html { background-image: url("~foo/hello.html|")',
[{ offset: 29, value: '"~foo/hello.html"', target: getTestResource('node_modules/foo/hello.html') }], testUri, folders

View file

@ -10,7 +10,7 @@ import { Utils, URI } from 'vscode-uri';
export function getDocumentContext(documentUri: string, workspaceFolders: WorkspaceFolder[]): DocumentContext {
function getRootFolder(): string | undefined {
for (let folder of workspaceFolders) {
for (const folder of workspaceFolders) {
let folderURI = folder.uri;
if (!endsWith(folderURI, '/')) {
folderURI = folderURI + '/';
@ -25,7 +25,7 @@ export function getDocumentContext(documentUri: string, workspaceFolders: Worksp
return {
resolveReference: (ref: string, base = documentUri) => {
if (ref[0] === '/') { // resolve absolute path against the current workspace folder
let folderUri = getRootFolder();
const folderUri = getRootFolder();
if (folderUri) {
return folderUri + ref.substr(1);
}

View file

@ -8,7 +8,7 @@ import { RuntimeEnvironment } from '../cssServer';
export function formatError(message: string, err: any): string {
if (err instanceof Error) {
let error = <Error>err;
const error = <Error>err;
return `${message}: ${error.message}\n${error.stack}`;
} else if (typeof err === 'string') {
return `${message}: ${err}`;

View file

@ -21,7 +21,7 @@ export function startsWith(haystack: string, needle: string): boolean {
* Determines if haystack ends with needle.
*/
export function endsWith(haystack: string, needle: string): boolean {
let diff = haystack.length - needle.length;
const diff = haystack.length - needle.length;
if (diff > 0) {
return haystack.lastIndexOf(needle) === diff;
} else if (diff === 0) {

View file

@ -243,7 +243,7 @@ const createServerInner = async (ipcAddress: string) => {
const createServerInstance = (ipcAddress: string) =>
new Promise<Server>((resolve, reject) => {
const s = createServer(socket => {
let data: Buffer[] = [];
const data: Buffer[] = [];
socket.on('data', async chunk => {
if (chunk[chunk.length - 1] !== 0) {
// terminated with NUL byte
@ -392,7 +392,7 @@ async function getIpcAddress(context: vscode.ExtensionContext) {
}
function getJsDebugSettingKey() {
let o: { [key: string]: unknown } = {};
const o: { [key: string]: unknown } = {};
const config = vscode.workspace.getConfiguration(SETTING_SECTION);
for (const setting of SETTINGS_CAUSE_REFRESH) {
o[setting] = config.get(setting);

View file

@ -57,7 +57,7 @@ class ServerReadyDetector extends vscode.Disposable {
}
static stop(session: vscode.DebugSession): void {
let detector = ServerReadyDetector.detectors.get(session);
const detector = ServerReadyDetector.detectors.get(session);
if (detector) {
ServerReadyDetector.detectors.delete(session);
detector.dispose();
@ -65,7 +65,7 @@ class ServerReadyDetector extends vscode.Disposable {
}
static rememberShellPid(session: vscode.DebugSession, pid: number) {
let detector = ServerReadyDetector.detectors.get(session);
const detector = ServerReadyDetector.detectors.get(session);
if (detector) {
detector.shellPid = pid;
}
@ -77,7 +77,7 @@ class ServerReadyDetector extends vscode.Disposable {
// first find the detector with a matching pid
const pid = await e.terminal.processId;
for (let [, detector] of this.detectors) {
for (const [, detector] of this.detectors) {
if (detector.shellPid === pid) {
detector.detectPattern(e.data);
return;
@ -85,7 +85,7 @@ class ServerReadyDetector extends vscode.Disposable {
}
// if none found, try all detectors until one matches
for (let [, detector] of this.detectors) {
for (const [, detector] of this.detectors) {
if (detector.detectPattern(e.data)) {
return;
}

View file

@ -49,7 +49,7 @@ export class DefaultCompletionItemProvider implements vscode.CompletionItemProvi
const mappedLanguages = getMappingForIncludedLanguages();
const isSyntaxMapped = mappedLanguages[document.languageId] ? true : false;
let emmetMode = getEmmetMode((isSyntaxMapped ? mappedLanguages[document.languageId] : document.languageId), mappedLanguages, excludedLanguages);
const emmetMode = getEmmetMode((isSyntaxMapped ? mappedLanguages[document.languageId] : document.languageId), mappedLanguages, excludedLanguages);
if (!emmetMode
|| emmetConfig['showExpandedAbbreviation'] === 'never'
@ -135,7 +135,7 @@ export class DefaultCompletionItemProvider implements vscode.CompletionItemProvi
const offset = document.offsetAt(position);
if (isStyleSheet(document.languageId) && context.triggerKind !== vscode.CompletionTriggerKind.TriggerForIncompleteCompletions) {
validateLocation = true;
let usePartialParsing = vscode.workspace.getConfiguration('emmet')['optimizeStylesheetParsing'] === true;
const usePartialParsing = vscode.workspace.getConfiguration('emmet')['optimizeStylesheetParsing'] === true;
rootNode = usePartialParsing && document.lineCount > 1000 ? parsePartialStylesheet(document, position) : <Stylesheet>getRootNode(document, true);
if (!rootNode) {
return;
@ -152,8 +152,8 @@ export class DefaultCompletionItemProvider implements vscode.CompletionItemProvi
if (!rootNode) {
return;
}
let flatNode = getFlatNode(rootNode, offset, true);
let embeddedCssNode = getEmbeddedCssNodeIfAny(document, flatNode, position);
const flatNode = getFlatNode(rootNode, offset, true);
const embeddedCssNode = getEmbeddedCssNodeIfAny(document, flatNode, position);
currentNode = getFlatNode(embeddedCssNode, offset, true);
}
@ -167,7 +167,7 @@ export class DefaultCompletionItemProvider implements vscode.CompletionItemProvi
// Check for document symbols in js/ts/jsx/tsx and avoid triggering emmet for abbreviations of the form symbolName.sometext
// Presence of > or * or + in the abbreviation denotes valid abbreviation that should trigger emmet
if (!isStyleSheet(syntax) && (document.languageId === 'javascript' || document.languageId === 'javascriptreact' || document.languageId === 'typescript' || document.languageId === 'typescriptreact')) {
let abbreviation: string = extractAbbreviationResults.abbreviation;
const abbreviation: string = extractAbbreviationResults.abbreviation;
// For the second condition, we don't want abbreviations that have [] characters but not ='s in them to expand
// In turn, users must explicitly expand abbreviations of the form Component[attr1 attr2], but it means we don't try to expand a[i].
if (abbreviation.startsWith('this.') || /\[[^\]=]*\]/.test(abbreviation)) {
@ -194,14 +194,14 @@ export class DefaultCompletionItemProvider implements vscode.CompletionItemProvi
}
}
let newItems: vscode.CompletionItem[] = [];
const newItems: vscode.CompletionItem[] = [];
if (result && result.items) {
result.items.forEach((item: any) => {
let newItem = new vscode.CompletionItem(item.label);
const newItem = new vscode.CompletionItem(item.label);
newItem.documentation = item.documentation;
newItem.detail = item.detail;
newItem.insertText = new vscode.SnippetString(item.textEdit.newText);
let oldrange = item.textEdit.range;
const oldrange = item.textEdit.range;
newItem.range = new vscode.Range(oldrange.start.line, oldrange.start.character, oldrange.end.line, oldrange.end.character);
newItem.filterText = item.filterText;

View file

@ -12,9 +12,9 @@ export function fetchEditPoint(direction: string): void {
}
const editor = vscode.window.activeTextEditor;
let newSelections: vscode.Selection[] = [];
const newSelections: vscode.Selection[] = [];
editor.selections.forEach(selection => {
let updatedSelection = direction === 'next' ? nextEditPoint(selection, editor) : prevEditPoint(selection, editor);
const updatedSelection = direction === 'next' ? nextEditPoint(selection, editor) : prevEditPoint(selection, editor);
newSelections.push(updatedSelection);
});
editor.selections = newSelections;
@ -23,7 +23,7 @@ export function fetchEditPoint(direction: string): void {
function nextEditPoint(selection: vscode.Selection, editor: vscode.TextEditor): vscode.Selection {
for (let lineNum = selection.anchor.line; lineNum < editor.document.lineCount; lineNum++) {
let updatedSelection = findEditPoint(lineNum, editor, selection.anchor, 'next');
const updatedSelection = findEditPoint(lineNum, editor, selection.anchor, 'next');
if (updatedSelection) {
return updatedSelection;
}
@ -33,7 +33,7 @@ function nextEditPoint(selection: vscode.Selection, editor: vscode.TextEditor):
function prevEditPoint(selection: vscode.Selection, editor: vscode.TextEditor): vscode.Selection {
for (let lineNum = selection.anchor.line; lineNum >= 0; lineNum--) {
let updatedSelection = findEditPoint(lineNum, editor, selection.anchor, 'prev');
const updatedSelection = findEditPoint(lineNum, editor, selection.anchor, 'prev');
if (updatedSelection) {
return updatedSelection;
}
@ -43,7 +43,7 @@ function prevEditPoint(selection: vscode.Selection, editor: vscode.TextEditor):
function findEditPoint(lineNum: number, editor: vscode.TextEditor, position: vscode.Position, direction: string): vscode.Selection | undefined {
let line = editor.document.lineAt(lineNum);
const line = editor.document.lineAt(lineNum);
let lineContent = line.text;
if (lineNum !== position.line && line.isEmptyOrWhitespace && lineContent.length) {
@ -53,8 +53,8 @@ function findEditPoint(lineNum: number, editor: vscode.TextEditor, position: vsc
if (lineNum === position.line && direction === 'prev') {
lineContent = lineContent.substr(0, position.character);
}
let emptyAttrIndex = direction === 'next' ? lineContent.indexOf('""', lineNum === position.line ? position.character : 0) : lineContent.lastIndexOf('""');
let emptyTagIndex = direction === 'next' ? lineContent.indexOf('><', lineNum === position.line ? position.character : 0) : lineContent.lastIndexOf('><');
const emptyAttrIndex = direction === 'next' ? lineContent.indexOf('""', lineNum === position.line ? position.character : 0) : lineContent.lastIndexOf('""');
const emptyTagIndex = direction === 'next' ? lineContent.indexOf('><', lineNum === position.line ? position.character : 0) : lineContent.lastIndexOf('><');
let winner = -1;

View file

@ -161,8 +161,8 @@ const languageMappingForCompletionProviders: Map<string, string> = new Map<strin
const completionProvidersMapping: Map<string, vscode.Disposable> = new Map<string, vscode.Disposable>();
function registerCompletionProviders(context: vscode.ExtensionContext) {
let completionProvider = new DefaultCompletionItemProvider();
let includedLanguages = getMappingForIncludedLanguages();
const completionProvider = new DefaultCompletionItemProvider();
const includedLanguages = getMappingForIncludedLanguages();
Object.keys(includedLanguages).forEach(language => {
if (languageMappingForCompletionProviders.has(language) && languageMappingForCompletionProviders.get(language) === includedLanguages[language]) {

View file

@ -21,7 +21,7 @@ export function incrementDecrement(delta: number): Thenable<boolean> | undefined
return editor.edit(editBuilder => {
editor.selections.forEach(selection => {
let rangeToReplace = locate(editor.document, selection.isReversed ? selection.anchor : selection.active);
const rangeToReplace = locate(editor.document, selection.isReversed ? selection.anchor : selection.active);
if (!rangeToReplace) {
return;
}
@ -40,7 +40,7 @@ export function incrementDecrement(delta: number): Thenable<boolean> | undefined
*/
export function update(numString: string, delta: number): string {
let m: RegExpMatchArray | null;
let decimals = (m = numString.match(/\.(\d+)$/)) ? m[1].length : 1;
const decimals = (m = numString.match(/\.(\d+)$/)) ? m[1].length : 1;
let output = String((parseFloat(numString) + delta).toFixed(decimals)).replace(/\.0+$/, '');
if (m = numString.match(/^\-?(0\d+)/)) {

View file

@ -20,7 +20,7 @@ export function matchTag() {
return;
}
let updatedSelections: vscode.Selection[] = [];
const updatedSelections: vscode.Selection[] = [];
editor.selections.forEach(selection => {
const updatedSelection = getUpdatedSelections(document, rootNode, selection.start);
if (updatedSelection) {

View file

@ -19,7 +19,7 @@ export function removeTag() {
return;
}
let finalRangesToRemove = Array.from(editor.selections).reverse()
const finalRangesToRemove = Array.from(editor.selections).reverse()
.reduce<vscode.Range[]>((prev, selection) =>
prev.concat(getRangesToRemove(editor.document, rootNode, selection)), []);
@ -68,7 +68,7 @@ function getRangesToRemove(document: vscode.TextDocument, rootNode: HtmlFlatNode
}
}
let rangesToRemove = [];
const rangesToRemove = [];
if (openTagRange) {
rangesToRemove.push(openTagRange);
if (closeTagRange) {

View file

@ -21,7 +21,7 @@ export function fetchSelectItem(direction: string): void {
return;
}
let newSelections: vscode.Selection[] = [];
const newSelections: vscode.Selection[] = [];
editor.selections.forEach(selection => {
const selectionStart = selection.isReversed ? selection.active : selection.anchor;
const selectionEnd = selection.isReversed ? selection.anchor : selection.active;

View file

@ -28,7 +28,7 @@ export function nextItemStylesheet(document: vscode.TextDocument, startPosition:
if (currentNode.type === 'property' &&
startOffset >= (<Property>currentNode).valueToken.start &&
endOffset <= (<Property>currentNode).valueToken.end) {
let singlePropertyValue = getSelectionFromProperty(document, currentNode, startOffset, endOffset, false, 'next');
const singlePropertyValue = getSelectionFromProperty(document, currentNode, startOffset, endOffset, false, 'next');
if (singlePropertyValue) {
return singlePropertyValue;
}
@ -77,7 +77,7 @@ export function prevItemStylesheet(document: vscode.TextDocument, startPosition:
if (currentNode.type === 'property' &&
startOffset >= (<Property>currentNode).valueToken.start &&
endOffset <= (<Property>currentNode).valueToken.end) {
let singlePropertyValue = getSelectionFromProperty(document, currentNode, startOffset, endOffset, false, 'prev');
const singlePropertyValue = getSelectionFromProperty(document, currentNode, startOffset, endOffset, false, 'prev');
if (singlePropertyValue) {
return singlePropertyValue;
}
@ -115,7 +115,7 @@ function getSelectionFromProperty(document: vscode.TextDocument, node: Node | un
}
const propertyNode = <Property>node;
let propertyValue = propertyNode.valueToken.stream.substring(propertyNode.valueToken.start, propertyNode.valueToken.end);
const propertyValue = propertyNode.valueToken.stream.substring(propertyNode.valueToken.start, propertyNode.valueToken.end);
selectFullValue = selectFullValue ||
(direction === 'prev' && selectionStart === propertyNode.valueToken.start && selectionEnd < propertyNode.valueToken.end);
@ -144,7 +144,7 @@ function getSelectionFromProperty(document: vscode.TextDocument, node: Node | un
}
let [newSelectionStartOffset, newSelectionEndOffset] = direction === 'prev' ? findPrevWord(propertyValue, pos) : findNextWord(propertyValue, pos);
const [newSelectionStartOffset, newSelectionEndOffset] = direction === 'prev' ? findPrevWord(propertyValue, pos) : findNextWord(propertyValue, pos);
if (!newSelectionStartOffset && !newSelectionEndOffset) {
return;
}

View file

@ -62,13 +62,13 @@ suite('Tests for Next/Previous Select/Edit point and Balance actions', () => {
return withRandomFileEditor(htmlContents, '.html', (editor, _) => {
editor.selections = [new Selection(1, 5, 1, 5)];
let expectedNextEditPoints: [number, number][] = [[4, 16], [6, 8], [10, 2], [10, 2]];
const expectedNextEditPoints: [number, number][] = [[4, 16], [6, 8], [10, 2], [10, 2]];
expectedNextEditPoints.forEach(([line, col]) => {
fetchEditPoint('next');
testSelection(editor.selection, col, line);
});
let expectedPrevEditPoints = [[6, 8], [4, 16], [4, 16]];
const expectedPrevEditPoints = [[6, 8], [4, 16], [4, 16]];
expectedPrevEditPoints.forEach(([line, col]) => {
fetchEditPoint('prev');
testSelection(editor.selection, col, line);
@ -82,7 +82,7 @@ suite('Tests for Next/Previous Select/Edit point and Balance actions', () => {
return withRandomFileEditor(htmlContents, '.html', (editor, _) => {
editor.selections = [new Selection(2, 2, 2, 2)];
let expectedNextItemPoints: [number, number, number][] = [
const expectedNextItemPoints: [number, number, number][] = [
[2, 1, 5], // html
[2, 6, 15], // lang="en"
[2, 12, 14], // en
@ -141,7 +141,7 @@ suite('Tests for Next/Previous Select/Edit point and Balance actions', () => {
return withRandomFileEditor(templateContents, '.html', (editor, _) => {
editor.selections = [new Selection(2, 2, 2, 2)];
let expectedNextItemPoints: [number, number, number][] = [
const expectedNextItemPoints: [number, number, number][] = [
[2, 2, 5], // div
[2, 6, 20], // class="header"
[2, 13, 19], // header
@ -170,7 +170,7 @@ suite('Tests for Next/Previous Select/Edit point and Balance actions', () => {
return withRandomFileEditor(cssContents, '.css', (editor, _) => {
editor.selections = [new Selection(0, 0, 0, 0)];
let expectedNextItemPoints: [number, number, number][] = [
const expectedNextItemPoints: [number, number, number][] = [
[1, 0, 4], // .boo
[2, 1, 19], // margin: 20px 10px;
[2, 9, 18], // 20px 10px
@ -201,7 +201,7 @@ suite('Tests for Next/Previous Select/Edit point and Balance actions', () => {
return withRandomFileEditor(scssContents, '.scss', (editor, _) => {
editor.selections = [new Selection(0, 0, 0, 0)];
let expectedNextItemPoints: [number, number, number][] = [
const expectedNextItemPoints: [number, number, number][] = [
[1, 0, 4], // .boo
[2, 1, 19], // margin: 20px 10px;
[2, 9, 18], // 20px 10px
@ -232,7 +232,7 @@ suite('Tests for Next/Previous Select/Edit point and Balance actions', () => {
return withRandomFileEditor(htmlContents, 'html', (editor, _) => {
editor.selections = [new Selection(14, 6, 14, 10)];
let expectedBalanceOutRanges: [number, number, number, number][] = [
const expectedBalanceOutRanges: [number, number, number, number][] = [
[14, 3, 14, 32], // <li class="item1">Item 1</li>
[13, 23, 16, 2], // inner contents of <ul class="nav main">
[13, 2, 16, 7], // outer contents of <ul class="nav main">
@ -249,7 +249,7 @@ suite('Tests for Next/Previous Select/Edit point and Balance actions', () => {
});
editor.selections = [new Selection(12, 7, 12, 7)];
let expectedBalanceInRanges: [number, number, number, number][] = [
const expectedBalanceInRanges: [number, number, number, number][] = [
[12, 21, 17, 1], // inner contents of <div class="header">
[13, 2, 16, 7], // outer contents of <ul class="nav main">
[13, 23, 16, 2], // inner contents of <ul class="nav main">
@ -269,7 +269,7 @@ suite('Tests for Next/Previous Select/Edit point and Balance actions', () => {
return withRandomFileEditor(htmlContents, 'html', (editor, _) => {
editor.selections = [new Selection(15, 6, 15, 10)];
let expectedBalanceOutRanges: [number, number, number, number][] = [
const expectedBalanceOutRanges: [number, number, number, number][] = [
[15, 3, 15, 32], // <li class="item1">Item 2</li>
[13, 23, 16, 2], // inner contents of <ul class="nav main">
[13, 2, 16, 7], // outer contents of <ul class="nav main">
@ -327,7 +327,7 @@ suite('Tests for Next/Previous Select/Edit point and Balance actions', () => {
return withRandomFileEditor(htmlTemplate, 'html', (editor, _) => {
editor.selections = [new Selection(5, 24, 5, 24)];
let expectedBalanceOutRanges: [number, number, number, number][] = [
const expectedBalanceOutRanges: [number, number, number, number][] = [
[5, 20, 5, 26], // <li class="item1">``Item 2''</li>
[5, 2, 5, 31], // ``<li class="item1">Item 2</li>''
[3, 22, 6, 1], // inner contents of ul

View file

@ -30,12 +30,12 @@ p {
} p
`;
return withRandomFileEditor(cssContents, '.css', (_, doc) => {
let rangesForEmmet = [
const rangesForEmmet = [
new vscode.Range(3, 18, 3, 19), // Same line after block comment
new vscode.Range(4, 1, 4, 2), // p after block comment
new vscode.Range(5, 1, 5, 3) // p. after block comment
];
let rangesNotEmmet = [
const rangesNotEmmet = [
new vscode.Range(1, 0, 1, 1), // Selector
new vscode.Range(2, 9, 2, 10), // Property value
new vscode.Range(3, 3, 3, 5), // dn inside block comment
@ -65,7 +65,7 @@ dn {
} bg
`;
return withRandomFileEditor(sassContents, '.scss', (_, doc) => {
let rangesNotEmmet = [
const rangesNotEmmet = [
new vscode.Range(1, 0, 1, 4), // Selector
new vscode.Range(2, 3, 2, 7), // Line commented selector
new vscode.Range(3, 3, 3, 7), // Block commented selector
@ -94,12 +94,12 @@ comment */
} p
`;
return withRandomFileEditor(cssContents, '.css', (_, doc) => {
let rangesForEmmet = [
const rangesForEmmet = [
new vscode.Range(7, 18, 7, 19), // Same line after block comment
new vscode.Range(8, 1, 8, 2), // p after block comment
new vscode.Range(9, 1, 9, 3) // p. after block comment
];
let rangesNotEmmet = [
const rangesNotEmmet = [
new vscode.Range(1, 2, 1, 3), // Selector
new vscode.Range(3, 3, 3, 4), // Inside multiline comment
new vscode.Range(5, 0, 5, 1), // Opening Brace
@ -133,13 +133,13 @@ comment */
}}}
`;
return withRandomFileEditor(sassContents, '.scss', (_, doc) => {
let rangesForEmmet = [
const rangesForEmmet = [
new vscode.Range(2, 1, 2, 2), // Inside a ruleset before errors
new vscode.Range(3, 1, 3, 2), // Inside a ruleset after no serious error
new vscode.Range(7, 1, 7, 2), // @ inside a so far well structured ruleset
new vscode.Range(9, 2, 9, 3), // @ inside a so far well structured nested ruleset
];
let rangesNotEmmet = [
const rangesNotEmmet = [
new vscode.Range(4, 4, 4, 5), // p inside ruleset without proper selector
new vscode.Range(6, 3, 6, 4) // In selector
];
@ -158,14 +158,14 @@ comment */
.foo{dn}.bar{.boo{dn}dn}.comd{/*{dn*/p{div{dn}} }.foo{.other{dn}} dn
`;
return withRandomFileEditor(sassContents, '.scss', (_, doc) => {
let rangesForEmmet = [
const rangesForEmmet = [
new vscode.Range(1, 5, 1, 7), // Inside a ruleset
new vscode.Range(1, 18, 1, 20), // Inside a nested ruleset
new vscode.Range(1, 21, 1, 23), // Inside ruleset after nested one.
new vscode.Range(1, 43, 1, 45), // Inside nested ruleset after comment
new vscode.Range(1, 61, 1, 63) // Inside nested ruleset
];
let rangesNotEmmet = [
const rangesNotEmmet = [
new vscode.Range(1, 3, 1, 4), // In foo selector
new vscode.Range(1, 10, 1, 11), // In bar selector
new vscode.Range(1, 15, 1, 16), // In boo selector
@ -197,11 +197,11 @@ p.#{dn} {
}
`;
return withRandomFileEditor(sassContents, '.scss', (_, doc) => {
let rangesForEmmet = [
const rangesForEmmet = [
new vscode.Range(2, 1, 2, 4), // p.3 inside a ruleset whose selector uses interpolation
new vscode.Range(4, 1, 4, 3) // dn inside ruleset after property with variable
];
let rangesNotEmmet = [
const rangesNotEmmet = [
new vscode.Range(1, 0, 1, 1), // In p in selector
new vscode.Range(1, 2, 1, 3), // In # in selector
new vscode.Range(1, 4, 1, 6), // In dn inside variable in selector
@ -237,13 +237,13 @@ ment */{
}
`;
return withRandomFileEditor(sassContents, '.scss', (_, doc) => {
let rangesForEmmet = [
const rangesForEmmet = [
new vscode.Range(2, 14, 2, 21), // brs6-2p with a block commented line comment ('/* */' overrides '//')
new vscode.Range(3, 1, 3, 3), // dn after a line with combined comments inside a ruleset
new vscode.Range(9, 1, 9, 4), // m10 inside ruleset whose selector is before a comment
new vscode.Range(12, 1, 12, 5) // op3 inside a ruleset with commented extra braces
];
let rangesNotEmmet = [
const rangesNotEmmet = [
new vscode.Range(2, 4, 2, 5), // In p inside block comment
new vscode.Range(2, 9, 2, 10), // In p inside block comment and after line comment
new vscode.Range(6, 3, 6, 4) // In c inside block comment

View file

@ -43,7 +43,7 @@ suite('Tests for Emmet actions on html tags', () => {
</div>
`;
let contentsWithTemplate = `
const contentsWithTemplate = `
<script type="text/template">
<ul>
<li><span>Hello</span></li>
@ -353,7 +353,7 @@ suite('Tests for Emmet actions on html tags', () => {
});
test('match tag with template scripts', () => {
let templateScript = `
const templateScript = `
<script type="text/template">
<div>
Hello

View file

@ -27,7 +27,7 @@ export function toggleComment(): Thenable<boolean> | undefined {
}
return editor.edit(editBuilder => {
let allEdits: vscode.TextEdit[][] = [];
const allEdits: vscode.TextEdit[][] = [];
Array.from(editor.selections).reverse().forEach(selection => {
const edits = isStyleSheet(editor.document.languageId) ? toggleCommentStylesheet(editor.document, selection, <Stylesheet>rootNode) : toggleCommentHTML(editor.document, selection, rootNode!);
if (edits.length > 0) {
@ -37,7 +37,7 @@ export function toggleComment(): Thenable<boolean> | undefined {
// Apply edits in order so we can skip nested ones.
allEdits.sort((arr1, arr2) => {
let result = arr1[0].range.start.line - arr2[0].range.start.line;
const result = arr1[0].range.start.line - arr2[0].range.start.line;
return result === 0 ? arr1[0].range.start.character - arr2[0].range.start.character : result;
});
let lastEditPosition = new vscode.Position(0, 0);
@ -76,7 +76,7 @@ function toggleCommentHTML(document: vscode.TextDocument, selection: vscode.Sele
return toggleCommentStylesheet(document, selection, cssRootNode);
}
let allNodes: Node[] = getNodesInBetween(startNode, endNode);
const allNodes: Node[] = getNodesInBetween(startNode, endNode);
let edits: vscode.TextEdit[] = [];
allNodes.forEach(node => {
@ -132,8 +132,8 @@ function toggleCommentStylesheet(document: vscode.TextDocument, selection: vscod
}
// Uncomment the comments that intersect with the selection.
let rangesToUnComment: vscode.Range[] = [];
let edits: vscode.TextEdit[] = [];
const rangesToUnComment: vscode.Range[] = [];
const edits: vscode.TextEdit[] = [];
rootNode.comments.forEach(comment => {
const commentRange = offsetRangeToVsRange(document, comment.start, comment.end);
if (selection.intersection(commentRange)) {

View file

@ -193,7 +193,7 @@ function updateHTMLTag(editor: TextEditor, node: HtmlNode, width: number, height
const quote = getAttributeQuote(editor, srcAttr);
const endOfAttributes = node.attributes[node.attributes.length - 1].end;
let edits: TextEdit[] = [];
const edits: TextEdit[] = [];
let textToAdd = '';
if (!widthAttr) {
@ -226,7 +226,7 @@ function updateCSSNode(editor: TextEditor, srcProp: Property, width: number, hei
const separator = srcProp.separator || ': ';
const before = getPropertyDelimitor(editor, srcProp);
let edits: TextEdit[] = [];
const edits: TextEdit[] = [];
if (!srcProp.terminatorToken) {
edits.push(new TextEdit(offsetRangeToVsRange(document, srcProp.end, srcProp.end), ';'));
}

View file

@ -54,7 +54,7 @@ export async function updateTag(tagName: string | undefined): Promise<boolean |
}
function getRangesFromNode(node: HtmlFlatNode, document: vscode.TextDocument): TagRange[] {
let ranges: TagRange[] = [];
const ranges: TagRange[] = [];
if (node.open) {
const start = document.positionAt(node.open.start);
ranges.push({

View file

@ -58,7 +58,7 @@ export function updateEmmetExtensionsPath(forceRefresh: boolean = false) {
*/
export function migrateEmmetExtensionsPath() {
// Get the detail info of emmet.extensionsPath setting
let config = vscode.workspace.getConfiguration().inspect('emmet.extensionsPath');
const config = vscode.workspace.getConfiguration().inspect('emmet.extensionsPath');
// Update Global setting if the value type is string or the value is null
if (typeof config?.globalValue === 'string') {
@ -100,12 +100,12 @@ export const LANGUAGE_MODES: { [id: string]: string[] } = {
};
export function isStyleSheet(syntax: string): boolean {
let stylesheetSyntaxes = ['css', 'scss', 'sass', 'less', 'stylus'];
const stylesheetSyntaxes = ['css', 'scss', 'sass', 'less', 'stylus'];
return stylesheetSyntaxes.includes(syntax);
}
export function validate(allowStylesheet: boolean = true): boolean {
let editor = vscode.window.activeTextEditor;
const editor = vscode.window.activeTextEditor;
if (!editor) {
vscode.window.showInformationMessage('No editor is active');
return false;
@ -197,7 +197,7 @@ export function parsePartialStylesheet(document: vscode.TextDocument, position:
function findOpeningCommentBeforePosition(pos: number): number | undefined {
const text = document.getText().substring(0, pos);
let offset = text.lastIndexOf('/*');
const offset = text.lastIndexOf('/*');
if (offset === -1) {
return;
}

View file

@ -338,7 +338,7 @@ export class ExtensionLinter {
if (!hasScheme && !info.hasHttpsRepository) {
const range = new Range(document.positionAt(begin), document.positionAt(end));
let message = (() => {
const message = (() => {
switch (context) {
case Context.ICON: return relativeIconUrlRequiresHttpsRepository;
case Context.BADGE: return relativeBadgeUrlRequiresHttpsRepository;
@ -367,7 +367,7 @@ export class ExtensionLinter {
function parseUri(src: string, base?: string, retry: boolean = true): Uri | null {
try {
let url = new URL(src, base);
const url = new URL(src, base);
return Uri.parse(url.toString());
} catch (err) {
if (retry) {

View file

@ -13,6 +13,7 @@ module.exports = withDefaults({
context: __dirname,
entry: {
main: './src/main.ts',
['askpass-main']: './src/askpass-main.ts'
['askpass-main']: './src/askpass-main.ts',
['git-editor-main']: './src/git-editor-main.ts'
}
});

View file

@ -14,6 +14,7 @@
"contribMergeEditorToolbar",
"contribViewsWelcome",
"scmActionButton",
"scmInput",
"scmSelectedProvider",
"scmValidation",
"timeline"
@ -213,83 +214,99 @@
"command": "git.commit",
"title": "%command.commit%",
"category": "Git",
"icon": "$(check)"
"icon": "$(check)",
"enablement": "!commitInProgress"
},
{
"command": "git.commitStaged",
"title": "%command.commitStaged%",
"category": "Git"
"category": "Git",
"enablement": "!commitInProgress"
},
{
"command": "git.commitEmpty",
"title": "%command.commitEmpty%",
"category": "Git"
"category": "Git",
"enablement": "!commitInProgress"
},
{
"command": "git.commitStagedSigned",
"title": "%command.commitStagedSigned%",
"category": "Git"
"category": "Git",
"enablement": "!commitInProgress"
},
{
"command": "git.commitStagedAmend",
"title": "%command.commitStagedAmend%",
"category": "Git"
"category": "Git",
"enablement": "!commitInProgress"
},
{
"command": "git.commitAll",
"title": "%command.commitAll%",
"category": "Git"
"category": "Git",
"enablement": "!commitInProgress"
},
{
"command": "git.commitAllSigned",
"title": "%command.commitAllSigned%",
"category": "Git"
"category": "Git",
"enablement": "!commitInProgress"
},
{
"command": "git.commitAllAmend",
"title": "%command.commitAllAmend%",
"category": "Git"
"category": "Git",
"enablement": "!commitInProgress"
},
{
"command": "git.commitNoVerify",
"title": "%command.commitNoVerify%",
"category": "Git",
"icon": "$(check)"
"icon": "$(check)",
"enablement": "!commitInProgress"
},
{
"command": "git.commitStagedNoVerify",
"title": "%command.commitStagedNoVerify%",
"category": "Git"
"category": "Git",
"enablement": "!commitInProgress"
},
{
"command": "git.commitEmptyNoVerify",
"title": "%command.commitEmptyNoVerify%",
"category": "Git"
"category": "Git",
"enablement": "!commitInProgress"
},
{
"command": "git.commitStagedSignedNoVerify",
"title": "%command.commitStagedSignedNoVerify%",
"category": "Git"
"category": "Git",
"enablement": "!commitInProgress"
},
{
"command": "git.commitStagedAmendNoVerify",
"title": "%command.commitStagedAmendNoVerify%",
"category": "Git"
"category": "Git",
"enablement": "!commitInProgress"
},
{
"command": "git.commitAllNoVerify",
"title": "%command.commitAllNoVerify%",
"category": "Git"
"category": "Git",
"enablement": "!commitInProgress"
},
{
"command": "git.commitAllSignedNoVerify",
"title": "%command.commitAllSignedNoVerify%",
"category": "Git"
"category": "Git",
"enablement": "!commitInProgress"
},
{
"command": "git.commitAllAmendNoVerify",
"title": "%command.commitAllAmendNoVerify%",
"category": "Git"
"category": "Git",
"enablement": "!commitInProgress"
},
{
"command": "git.restoreCommitTemplate",
@ -2013,6 +2030,18 @@
"scope": "machine",
"description": "%config.defaultCloneDirectory%"
},
"git.useEditorAsCommitInput": {
"type": "boolean",
"scope": "resource",
"description": "%config.useEditorAsCommitInput%",
"default": false
},
"git.verboseCommit": {
"type": "boolean",
"scope": "resource",
"markdownDescription": "%config.verboseCommit%",
"default": false
},
"git.enableSmartCommit": {
"type": "boolean",
"scope": "resource",

View file

@ -140,6 +140,8 @@
"config.ignoreLimitWarning": "Ignores the warning when there are too many changes in a repository.",
"config.ignoreRebaseWarning": "Ignores the warning when it looks like the branch might have been rebased when pulling.",
"config.defaultCloneDirectory": "The default location to clone a git repository.",
"config.useEditorAsCommitInput": "Use an editor to author the commit message.",
"config.verboseCommit": "Enable verbose output when `#git.useEditorAsCommitInput#` is enabled.",
"config.enableSmartCommit": "Commit all changes when there are no staged changes.",
"config.smartCommitChanges": "Control which changes are automatically staged by Smart Commit.",
"config.smartCommitChanges.all": "Automatically stage all changes.",

View file

@ -137,6 +137,8 @@ export interface CommitOptions {
empty?: boolean;
noVerify?: boolean;
requireUserConfig?: boolean;
useEditor?: boolean;
verbose?: boolean;
}
export interface FetchOptions {
@ -336,4 +338,5 @@ export const enum GitErrorCodes {
PatchDoesNotApply = 'PatchDoesNotApply',
NoPathFound = 'NoPathFound',
UnknownPath = 'UnknownPath',
EmptyCommitMessage = 'EmptyCommitMessage'
}

View file

@ -6,9 +6,8 @@
import { window, InputBoxOptions, Uri, Disposable, workspace } from 'vscode';
import { IDisposable, EmptyDisposable, toDisposable } from './util';
import * as path from 'path';
import { IIPCHandler, IIPCServer, createIPCServer } from './ipc/ipcServer';
import { IIPCHandler, IIPCServer } from './ipc/ipcServer';
import { CredentialsProvider, Credentials } from './api/git';
import { OutputChannelLogger } from './log';
export class Askpass implements IIPCHandler {
@ -16,16 +15,7 @@ export class Askpass implements IIPCHandler {
private cache = new Map<string, Credentials>();
private credentialsProviders = new Set<CredentialsProvider>();
static async create(outputChannelLogger: OutputChannelLogger, context?: string): Promise<Askpass> {
try {
return new Askpass(await createIPCServer(context));
} catch (err) {
outputChannelLogger.logError(`Failed to create git askpass IPC: ${err}`);
return new Askpass();
}
}
private constructor(private ipc?: IIPCServer) {
constructor(private ipc?: IIPCServer) {
if (ipc) {
this.disposable = ipc.registerHandler('askpass', this);
}
@ -80,7 +70,7 @@ export class Askpass implements IIPCHandler {
};
}
let env: { [key: string]: string } = {
const env: { [key: string]: string } = {
...this.ipc.getEnv(),
VSCODE_GIT_ASKPASS_NODE: process.execPath,
VSCODE_GIT_ASKPASS_EXTRA_ARGS: (process.versions['electron'] && process.versions['microsoft-build']) ? '--ms-enable-electron-run-as-node' : '',

View file

@ -418,8 +418,8 @@ export class CommandCenter {
type InputData = { uri: Uri; detail?: string; description?: string };
const mergeUris = toMergeUris(uri);
let input1: InputData = { uri: mergeUris.ours };
let input2: InputData = { uri: mergeUris.theirs };
const input1: InputData = { uri: mergeUris.ours };
const input2: InputData = { uri: mergeUris.theirs };
try {
const [head, mergeHead] = await Promise.all([repo.getCommit('HEAD'), repo.getCommit('MERGE_HEAD')]);
@ -1516,6 +1516,14 @@ export class CommandCenter {
opts.signoff = true;
}
if (config.get<boolean>('useEditorAsCommitInput')) {
opts.useEditor = true;
if (config.get<boolean>('verboseCommit')) {
opts.verbose = true;
}
}
const smartCommitChanges = config.get<'all' | 'tracked'>('smartCommitChanges');
if (
@ -1561,9 +1569,9 @@ export class CommandCenter {
}
}
let message = await getCommitMessage();
const message = await getCommitMessage();
if (!message && !opts.amend) {
if (!message && !opts.amend && !opts.useEditor) {
return false;
}
@ -1623,11 +1631,14 @@ export class CommandCenter {
private async commitWithAnyInput(repository: Repository, opts?: CommitOptions): Promise<void> {
const message = repository.inputBox.value;
const root = Uri.file(repository.root);
const config = workspace.getConfiguration('git', root);
const getCommitMessage = async () => {
let _message: string | undefined = message;
if (!_message) {
let value: string | undefined = undefined;
if (!_message && !config.get<boolean>('useEditorAsCommitInput')) {
const value: string | undefined = undefined;
if (opts && opts.amend && repository.HEAD && repository.HEAD.commit) {
return undefined;
@ -3010,7 +3021,7 @@ export class CommandCenter {
};
let message: string;
let type: 'error' | 'warning' = 'error';
let type: 'error' | 'warning' | 'information' = 'error';
const choices = new Map<string, () => void>();
const openOutputChannelChoice = localize('open git log', "Open Git Log");
@ -3073,6 +3084,12 @@ export class CommandCenter {
message = localize('missing user info', "Make sure you configure your 'user.name' and 'user.email' in git.");
choices.set(localize('learn more', "Learn More"), () => commands.executeCommand('vscode.open', Uri.parse('https://aka.ms/vscode-setup-git')));
break;
case GitErrorCodes.EmptyCommitMessage:
message = localize('empty commit', "Commit operation was cancelled due to empty commit message.");
choices.clear();
type = 'information';
options.modal = false;
break;
default: {
const hint = (err.stderr || err.message || String(err))
.replace(/^error: /mi, '')
@ -3094,17 +3111,25 @@ export class CommandCenter {
return;
}
let result: string | undefined;
const allChoices = Array.from(choices.keys());
const result = type === 'error'
? await window.showErrorMessage(message, options, ...allChoices)
: await window.showWarningMessage(message, options, ...allChoices);
switch (type) {
case 'error':
result = await window.showErrorMessage(message, options, ...allChoices);
break;
case 'warning':
result = await window.showWarningMessage(message, options, ...allChoices);
break;
case 'information':
result = await window.showInformationMessage(message, options, ...allChoices);
break;
}
if (result) {
const resultFn = choices.get(result);
if (resultFn) {
resultFn();
}
resultFn?.();
}
});
};

View file

@ -106,7 +106,7 @@ class GitDecorationProvider implements FileDecorationProvider {
}
private onDidRunGitStatus(): void {
let newDecorations = new Map<string, FileDecoration>();
const newDecorations = new Map<string, FileDecoration>();
this.collectSubmoduleDecorationData(newDecorations);
this.collectDecorationData(this.repository.indexGroup, newDecorations);

View file

@ -50,7 +50,7 @@ const JSCHARDET_TO_ICONV_ENCODINGS: { [name: string]: string } = {
};
export function detectEncoding(buffer: Buffer): string | null {
let result = detectEncodingByBOM(buffer);
const result = detectEncodingByBOM(buffer);
if (result) {
return result;

View file

@ -0,0 +1 @@
#!/bin/sh

View file

@ -0,0 +1,21 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { IPCClient } from './ipc/ipcClient';
function fatal(err: any): void {
console.error(err);
process.exit(1);
}
function main(argv: string[]): void {
const ipcClient = new IPCClient('git-editor');
const commitMessagePath = argv[argv.length - 1];
ipcClient.call({ commitMessagePath }).then(() => {
setTimeout(() => process.exit(0), 0);
}).catch(err => fatal(err));
}
main(process.argv);

View file

@ -0,0 +1,4 @@
#!/bin/sh
ELECTRON_RUN_AS_NODE="1" \
"$VSCODE_GIT_EDITOR_NODE" "$VSCODE_GIT_EDITOR_MAIN" $VSCODE_GIT_EDITOR_EXTRA_ARGS $@

View file

@ -406,7 +406,7 @@ export class Git {
}
async clone(url: string, options: ICloneOptions, cancellationToken?: CancellationToken): Promise<string> {
let baseFolderName = decodeURI(url).replace(/[\/]+$/, '').replace(/^.*[\/\\]/, '').replace(/\.git$/, '') || 'repository';
const baseFolderName = decodeURI(url).replace(/[\/]+$/, '').replace(/^.*[\/\\]/, '').replace(/\.git$/, '') || 'repository';
let folderName = baseFolderName;
let folderPath = path.join(options.parentPath, folderName);
let count = 1;
@ -447,7 +447,7 @@ export class Git {
};
try {
let command = ['clone', url.includes(' ') ? encodeURI(url) : url, folderPath, '--progress'];
const command = ['clone', url.includes(' ') ? encodeURI(url) : url, folderPath, '--progress'];
if (options.recursive) {
command.push('--recursive');
}
@ -481,7 +481,7 @@ export class Git {
const pathUri = Uri.file(repositoryPath);
if (repoUri.authority.length !== 0 && pathUri.authority.length === 0) {
// eslint-disable-next-line code-no-look-behind-regex
let match = /(?<=^\/?)([a-zA-Z])(?=:\/)/.exec(pathUri.path);
const match = /(?<=^\/?)([a-zA-Z])(?=:\/)/.exec(pathUri.path);
if (match !== null) {
const [, letter] = match;
@ -556,9 +556,7 @@ export class Git {
private async _exec(args: string[], options: SpawnOptions = {}): Promise<IExecutionResult<string>> {
const child = this.spawn(args, options);
if (options.onSpawn) {
options.onSpawn(child);
}
options.onSpawn?.(child);
if (options.input) {
child.stdin!.end(options.input, 'utf8');
@ -794,7 +792,7 @@ export function parseGitmodules(raw: string): Submodule[] {
const commitRegex = /([0-9a-f]{40})\n(.*)\n(.*)\n(.*)\n(.*)\n(.*)\n(.*)(?:\n([^]*?))?(?:\x00)/gm;
export function parseGitCommits(data: string): Commit[] {
let commits: Commit[] = [];
const commits: Commit[] = [];
let ref;
let authorName;
@ -1400,20 +1398,37 @@ export class Repository {
}
async commit(message: string | undefined, opts: CommitOptions = Object.create(null)): Promise<void> {
const args = ['commit', '--quiet', '--allow-empty-message'];
const args = ['commit', '--quiet'];
const options: SpawnOptions = {};
if (message) {
options.input = message;
args.push('--file', '-');
}
if (opts.verbose) {
args.push('--verbose');
}
if (opts.all) {
args.push('--all');
}
if (opts.amend && message) {
if (opts.amend) {
args.push('--amend');
}
if (opts.amend && !message) {
args.push('--amend', '--no-edit');
} else {
args.push('--file', '-');
if (!opts.useEditor) {
if (!message) {
if (opts.amend) {
args.push('--no-edit');
} else {
options.input = '';
args.push('--file', '-');
}
}
args.push('--allow-empty-message');
}
if (opts.signoff) {
@ -1438,7 +1453,7 @@ export class Repository {
}
try {
await this.exec(args, !opts.amend || message ? { input: message || '' } : {});
await this.exec(args, options);
} catch (commitErr) {
await this.handleCommitError(commitErr);
}
@ -1462,6 +1477,9 @@ export class Repository {
if (/not possible because you have unmerged files/.test(commitErr.stderr || '')) {
commitErr.gitErrorCode = GitErrorCodes.UnmergedChanges;
throw commitErr;
} else if (/Aborting commit due to empty commit message/.test(commitErr.stderr || '')) {
commitErr.gitErrorCode = GitErrorCodes.EmptyCommitMessage;
throw commitErr;
}
try {
@ -1543,7 +1561,7 @@ export class Repository {
}
async deleteTag(name: string): Promise<void> {
let args = ['tag', '-d', name];
const args = ['tag', '-d', name];
await this.exec(args);
}

View file

@ -0,0 +1,65 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as path from 'path';
import { TabInputText, Uri, window, workspace } from 'vscode';
import { IIPCHandler, IIPCServer } from './ipc/ipcServer';
import { EmptyDisposable, IDisposable } from './util';
interface GitEditorRequest {
commitMessagePath?: string;
}
export class GitEditor implements IIPCHandler {
private disposable: IDisposable = EmptyDisposable;
constructor(private ipc?: IIPCServer) {
if (ipc) {
this.disposable = ipc.registerHandler('git-editor', this);
}
}
async handle({ commitMessagePath }: GitEditorRequest): Promise<any> {
if (commitMessagePath) {
const uri = Uri.file(commitMessagePath);
const doc = await workspace.openTextDocument(uri);
await window.showTextDocument(doc, { preview: false });
return new Promise((c) => {
const onDidClose = window.tabGroups.onDidChangeTabs(async (tabs) => {
if (tabs.closed.some(t => t.input instanceof TabInputText && t.input.uri.toString() === uri.toString())) {
onDidClose.dispose();
return c(true);
}
});
});
}
}
getEnv(): { [key: string]: string } {
if (!this.ipc) {
return {
GIT_EDITOR: `"${path.join(__dirname, 'git-editor-empty.sh')}"`
};
}
const env: { [key: string]: string } = {
VSCODE_GIT_EDITOR_NODE: process.execPath,
VSCODE_GIT_EDITOR_EXTRA_ARGS: (process.versions['electron'] && process.versions['microsoft-build']) ? '--ms-enable-electron-run-as-node' : '',
VSCODE_GIT_EDITOR_MAIN: path.join(__dirname, 'git-editor-main.js')
};
const config = workspace.getConfiguration('git');
if (config.get<boolean>('useEditorAsCommitInput')) {
env.GIT_EDITOR = `"${path.join(__dirname, 'git-editor.sh')}"`;
}
return env;
}
dispose(): void {
this.disposable.dispose();
}
}

View file

@ -25,6 +25,8 @@ import { GitTimelineProvider } from './timelineProvider';
import { registerAPICommands } from './api/api1';
import { TerminalEnvironmentManager } from './terminal';
import { OutputChannelLogger } from './log';
import { createIPCServer, IIPCServer } from './ipc/ipcServer';
import { GitEditor } from './gitEditor';
const deactivateTasks: { (): Promise<any> }[] = [];
@ -60,10 +62,21 @@ async function createModel(context: ExtensionContext, outputChannelLogger: Outpu
return !skip;
});
const askpass = await Askpass.create(outputChannelLogger, context.storagePath);
let ipc: IIPCServer | undefined = undefined;
try {
ipc = await createIPCServer(context.storagePath);
} catch (err) {
outputChannelLogger.logError(`Failed to create git IPC: ${err}`);
}
const askpass = new Askpass(ipc);
disposables.push(askpass);
const environment = askpass.getEnv();
const gitEditor = new GitEditor(ipc);
disposables.push(gitEditor);
const environment = { ...askpass.getEnv(), ...gitEditor.getEnv() };
const terminalEnvironmentManager = new TerminalEnvironmentManager(context, environment);
disposables.push(terminalEnvironmentManager);

View file

@ -454,6 +454,13 @@ class ProgressManager {
const onDidChange = filterEvent(workspace.onDidChangeConfiguration, e => e.affectsConfiguration('git', Uri.file(this.repository.root)));
onDidChange(_ => this.updateEnablement());
this.updateEnablement();
this.repository.onDidChangeOperations(() => {
const commitInProgress = this.repository.operations.isRunning(Operation.Commit);
this.repository.sourceControl.inputBox.enabled = !commitInProgress;
commands.executeCommand('setContext', 'commitInProgress', commitInProgress);
});
}
private updateEnablement(): void {
@ -1029,7 +1036,7 @@ export class Repository implements Disposable {
}
let lineNumber = 0;
let start = 0, end;
let start = 0;
let match: RegExpExecArray | null;
const regex = /\r?\n/g;
@ -1038,7 +1045,7 @@ export class Repository implements Disposable {
lineNumber++;
}
end = match ? match.index : text.length;
const end = match ? match.index : text.length;
const line = text.substring(start, end);

View file

@ -9,7 +9,7 @@ export function applyLineChanges(original: TextDocument, modified: TextDocument,
const result: string[] = [];
let currentLine = 0;
for (let diff of diffs) {
for (const diff of diffs) {
const isInsertion = diff.originalEndLineNumber === 0;
const isDeletion = diff.modifiedEndLineNumber === 0;

View file

@ -89,7 +89,7 @@ export function eventToPromise<T>(event: Event<T>): Promise<T> {
}
export function once(fn: (...args: any[]) => any): (...args: any[]) => any {
let didRun = false;
const didRun = false;
return (...args) => {
if (didRun) {
@ -219,11 +219,11 @@ export async function grep(filename: string, pattern: RegExp): Promise<boolean>
export function readBytes(stream: Readable, bytes: number): Promise<Buffer> {
return new Promise<Buffer>((complete, error) => {
let done = false;
let buffer = Buffer.allocUnsafe(bytes);
const buffer = Buffer.allocUnsafe(bytes);
let bytesRead = 0;
stream.on('data', (data: Buffer) => {
let bytesToRead = Math.min(bytes - bytesRead, data.length);
const bytesToRead = Math.min(bytes - bytesRead, data.length);
data.copy(buffer, bytesRead, 0, bytesToRead);
bytesRead += bytesToRead;

View file

@ -12,6 +12,7 @@
"../../src/vscode-dts/vscode.d.ts",
"../../src/vscode-dts/vscode.proposed.diffCommand.d.ts",
"../../src/vscode-dts/vscode.proposed.scmActionButton.d.ts",
"../../src/vscode-dts/vscode.proposed.scmInput.d.ts",
"../../src/vscode-dts/vscode.proposed.scmSelectedProvider.d.ts",
"../../src/vscode-dts/vscode.proposed.scmValidation.d.ts",
"../../src/vscode-dts/vscode.proposed.tabs.d.ts",

View file

@ -51,7 +51,7 @@ export function promiseFromEvent<T, U>(
event: Event<T>,
adapter: PromiseAdapter<T, U> = passthrough): { promise: Promise<U>; cancel: EventEmitter<void> } {
let subscription: Disposable;
let cancel = new EventEmitter<void>();
const cancel = new EventEmitter<void>();
return {
promise: new Promise<U>((resolve, reject) => {
cancel.event(_ => reject('Cancelled'));

View file

@ -107,7 +107,7 @@ export class GithubRemoteSourceProvider implements RemoteSourceProvider {
let page = 1;
while (true) {
let res = await octokit.repos.listBranches({ ...repository, per_page: 100, page });
const res = await octokit.repos.listBranches({ ...repository, per_page: 100, page });
if (res.data.length === 0) {
break;

View file

@ -33,7 +33,7 @@ function exec(command: string, options: cp.ExecOptions): Promise<{ stdout: strin
const buildNames: string[] = ['build', 'compile', 'watch'];
function isBuildTask(name: string): boolean {
for (let buildName of buildNames) {
for (const buildName of buildNames) {
if (name.indexOf(buildName) !== -1) {
return true;
}
@ -43,7 +43,7 @@ function isBuildTask(name: string): boolean {
const testNames: string[] = ['test'];
function isTestTask(name: string): boolean {
for (let testName of testNames) {
for (const testName of testNames) {
if (name.indexOf(testName) !== -1) {
return true;
}
@ -73,7 +73,7 @@ interface GruntTaskDefinition extends vscode.TaskDefinition {
async function findGruntCommand(rootPath: string): Promise<string> {
let command: string;
let platform = process.platform;
const platform = process.platform;
if (platform === 'win32' && await exists(path.join(rootPath!, 'node_modules', '.bin', 'grunt.cmd'))) {
command = path.join('.', 'node_modules', '.bin', 'grunt.cmd');
} else if ((platform === 'linux' || platform === 'darwin') && await exists(path.join(rootPath!, 'node_modules', '.bin', 'grunt'))) {
@ -103,7 +103,7 @@ class FolderDetector {
}
public start(): void {
let pattern = path.join(this._workspaceFolder.uri.fsPath, '{node_modules,[Gg]runtfile.js}');
const pattern = path.join(this._workspaceFolder.uri.fsPath, '{node_modules,[Gg]runtfile.js}');
this.fileWatcher = vscode.workspace.createFileSystemWatcher(pattern);
this.fileWatcher.onDidChange(() => this.promise = undefined);
this.fileWatcher.onDidCreate(() => this.promise = undefined);
@ -125,9 +125,9 @@ class FolderDetector {
const taskDefinition = <any>_task.definition;
const gruntTask = taskDefinition.task;
if (gruntTask) {
let options: vscode.ShellExecutionOptions = { cwd: this.workspaceFolder.uri.fsPath };
let source = 'grunt';
let task = gruntTask.indexOf(' ') === -1
const options: vscode.ShellExecutionOptions = { cwd: this.workspaceFolder.uri.fsPath };
const source = 'grunt';
const task = gruntTask.indexOf(' ') === -1
? new vscode.Task(taskDefinition, this.workspaceFolder, gruntTask, source, new vscode.ShellExecution(`${await this._gruntCommand}`, [gruntTask, ...taskDefinition.args], options))
: new vscode.Task(taskDefinition, this.workspaceFolder, gruntTask, source, new vscode.ShellExecution(`${await this._gruntCommand}`, [`"${gruntTask}"`, ...taskDefinition.args], options));
return task;
@ -136,8 +136,8 @@ class FolderDetector {
}
private async computeTasks(): Promise<vscode.Task[]> {
let rootPath = this._workspaceFolder.uri.scheme === 'file' ? this._workspaceFolder.uri.fsPath : undefined;
let emptyTasks: vscode.Task[] = [];
const rootPath = this._workspaceFolder.uri.scheme === 'file' ? this._workspaceFolder.uri.fsPath : undefined;
const emptyTasks: vscode.Task[] = [];
if (!rootPath) {
return emptyTasks;
}
@ -145,14 +145,14 @@ class FolderDetector {
return emptyTasks;
}
let commandLine = `${await this._gruntCommand} --help --no-color`;
const commandLine = `${await this._gruntCommand} --help --no-color`;
try {
let { stdout, stderr } = await exec(commandLine, { cwd: rootPath });
const { stdout, stderr } = await exec(commandLine, { cwd: rootPath });
if (stderr) {
getOutputChannel().appendLine(stderr);
showError();
}
let result: vscode.Task[] = [];
const result: vscode.Task[] = [];
if (stdout) {
// grunt lists tasks as follows (description is wrapped into a new line if too long):
// ...
@ -166,10 +166,10 @@ class FolderDetector {
//
// Tasks run in the order specified
let lines = stdout.split(/\r{0,1}\n/);
const lines = stdout.split(/\r{0,1}\n/);
let tasksStart = false;
let tasksEnd = false;
for (let line of lines) {
for (const line of lines) {
if (line.length === 0) {
continue;
}
@ -181,21 +181,21 @@ class FolderDetector {
if (line.indexOf('Tasks run in the order specified') === 0) {
tasksEnd = true;
} else {
let regExp = /^\s*(\S.*\S) \S/g;
let matches = regExp.exec(line);
const regExp = /^\s*(\S.*\S) \S/g;
const matches = regExp.exec(line);
if (matches && matches.length === 2) {
let name = matches[1];
let kind: GruntTaskDefinition = {
const name = matches[1];
const kind: GruntTaskDefinition = {
type: 'grunt',
task: name
};
let source = 'grunt';
let options: vscode.ShellExecutionOptions = { cwd: this.workspaceFolder.uri.fsPath };
let task = name.indexOf(' ') === -1
const source = 'grunt';
const options: vscode.ShellExecutionOptions = { cwd: this.workspaceFolder.uri.fsPath };
const task = name.indexOf(' ') === -1
? new vscode.Task(kind, this.workspaceFolder, name, source, new vscode.ShellExecution(`${await this._gruntCommand} ${name}`, options))
: new vscode.Task(kind, this.workspaceFolder, name, source, new vscode.ShellExecution(`${await this._gruntCommand} "${name}"`, options));
result.push(task);
let lowerCaseTaskName = name.toLowerCase();
const lowerCaseTaskName = name.toLowerCase();
if (isBuildTask(lowerCaseTaskName)) {
task.group = vscode.TaskGroup.Build;
} else if (isTestTask(lowerCaseTaskName)) {
@ -208,7 +208,7 @@ class FolderDetector {
}
return result;
} catch (err) {
let channel = getOutputChannel();
const channel = getOutputChannel();
if (err.stderr) {
channel.appendLine(err.stderr);
}
@ -238,7 +238,7 @@ class TaskDetector {
}
public start(): void {
let folders = vscode.workspace.workspaceFolders;
const folders = vscode.workspace.workspaceFolders;
if (folders) {
this.updateWorkspaceFolders(folders, []);
}
@ -255,15 +255,15 @@ class TaskDetector {
}
private updateWorkspaceFolders(added: readonly vscode.WorkspaceFolder[], removed: readonly vscode.WorkspaceFolder[]): void {
for (let remove of removed) {
let detector = this.detectors.get(remove.uri.toString());
for (const remove of removed) {
const detector = this.detectors.get(remove.uri.toString());
if (detector) {
detector.dispose();
this.detectors.delete(remove.uri.toString());
}
}
for (let add of added) {
let detector = new FolderDetector(add, findGruntCommand(add.uri.fsPath));
for (const add of added) {
const detector = new FolderDetector(add, findGruntCommand(add.uri.fsPath));
this.detectors.set(add.uri.toString(), detector);
if (detector.isEnabled()) {
detector.start();
@ -273,15 +273,15 @@ class TaskDetector {
}
private updateConfiguration(): void {
for (let detector of this.detectors.values()) {
for (const detector of this.detectors.values()) {
detector.dispose();
this.detectors.delete(detector.workspaceFolder.uri.toString());
}
let folders = vscode.workspace.workspaceFolders;
const folders = vscode.workspace.workspaceFolders;
if (folders) {
for (let folder of folders) {
for (const folder of folders) {
if (!this.detectors.has(folder.uri.toString())) {
let detector = new FolderDetector(folder, findGruntCommand(folder.uri.fsPath));
const detector = new FolderDetector(folder, findGruntCommand(folder.uri.fsPath));
this.detectors.set(folder.uri.toString(), detector);
if (detector.isEnabled()) {
detector.start();
@ -320,13 +320,13 @@ class TaskDetector {
} else if (this.detectors.size === 1) {
return this.detectors.values().next().value.getTasks();
} else {
let promises: Promise<vscode.Task[]>[] = [];
for (let detector of this.detectors.values()) {
const promises: Promise<vscode.Task[]>[] = [];
for (const detector of this.detectors.values()) {
promises.push(detector.getTasks().then((value) => value, () => []));
}
return Promise.all(promises).then((values) => {
let result: vscode.Task[] = [];
for (let tasks of values) {
const result: vscode.Task[] = [];
for (const tasks of values) {
if (tasks && tasks.length > 0) {
result.push(...tasks);
}

Some files were not shown because too many files have changed in this diff Show more