Merge branch 'master' into feature/php-commenting

This commit is contained in:
Rob Lourens 2017-09-03 21:28:19 -07:00 committed by GitHub
commit ddb6d23841
8845 changed files with 327296 additions and 268453 deletions

45
.github/classifier.yml vendored Normal file
View file

@ -0,0 +1,45 @@
{
perform: true,
alwaysRequireAssignee: false,
labelsRequiringAssignee: [ feature-request ],
autoAssignees: {
accessibility: [],
api: [],
css-less-sass: [],
debug: [ isidorn ],
editor: [ alexandrudima ],
editor-brackets: [],
editor-clipboard: [ rebornix ],
editor-colors: [],
editor-contrib: [ rebornix ],
editor-core: [],
editor-find-widget: [ rebornix ],
editor-folding: [],
editor-ime: [ rebornix ],
editor-indentation: [ rebornix ],
editor-input: [],
editor-minimap: [],
editor-multicursor: [],
editor-wrapping: [],
emmet: [ ramya-rao-a ],
error-list: [],
extensions: [],
git: [ joaomoreno ],
hot-exit: [ Tyriar ],
html: [],
i18n: [],
install-update: [],
integrated-terminal: [ Tyriar ],
javascript: [ mjbvz ],
json: [],
languages basic: [],
markdown: [],
perf-profile: [],
php: [ roblourens ],
search: [ roblourens ],
snippets: [ jrieken ],
tasks: [],
typescript: [ mjbvz ],
workbench: [],
}
}

5
.github/copycat.yml vendored Normal file
View file

@ -0,0 +1,5 @@
{
perform: true,
target_owner: 'chrmarti',
target_repo: 'testissues'
}

4
.github/insiders.yml vendored Normal file
View file

@ -0,0 +1,4 @@
{
insidersLabel: 'insiders',
perform: true
}

13
.github/issue_template.md vendored Normal file
View file

@ -0,0 +1,13 @@
<!-- Do you have a question? Please ask it on http://stackoverflow.com/questions/tagged/vscode. -->
<!-- Use Help > Report Issues to prefill these. -->
- VSCode Version:
- OS Version:
Steps to Reproduce:
1.
2.
<!-- Launch with `code --disable-extensions` to check. -->
Reproduces without extensions: Yes/No

6
.github/needs_more_info.yml vendored Normal file
View file

@ -0,0 +1,6 @@
{
daysUntilClose: 7,
needsMoreInfoLabel: 'needs more info',
perform: true,
closeComment: 'This issue has been closed automatically because it needs more information and has not had recent activity. Please refer to our [guidelines](https://github.com/Microsoft/vscode/blob/master/CONTRIBUTING.md) for filing issues. Thank you for your contributions.'
}

5
.github/new_release.yml vendored Normal file
View file

@ -0,0 +1,5 @@
{
newReleaseLabel: 'new release',
newReleases: ['1.15'],
perform: true
}

View file

@ -19,13 +19,16 @@ addons:
- g++-4.9-multilib
- zip
- libgtk2.0-0
- libx11-dev
- libxkbfile-dev
- libsecret-1-dev
before_install:
- git submodule update --init --recursive
- git clone https://github.com/creationix/nvm.git ./.nvm
- git clone --depth 1 https://github.com/creationix/nvm.git ./.nvm
- source ./.nvm/nvm.sh
- nvm install 6.6.0
- nvm use 6.6.0
- nvm install 7.4.0
- nvm use 7.4.0
- npm config set python `which python`
- npm install -g gulp
- if [ $TRAVIS_OS_NAME == "linux" ]; then
@ -42,8 +45,8 @@ script:
- gulp electron --silent
- gulp compile --silent --max_old_space_size=4096
- gulp optimize-vscode --silent --max_old_space_size=4096
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then ./scripts/test.sh --reporter dot --coverage; else ./scripts/test.sh --reporter dot; fi
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then ./scripts/test.sh --coverage --reporter dot; else ./scripts/test.sh --reporter dot; fi
- ./scripts/test-integration.sh
after_success:
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then node_modules/.bin/coveralls < .build/coverage/lcov.info; fi
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then node_modules/.bin/coveralls < .build/coverage/lcov.info; fi

123
.vscode/launch.json vendored
View file

@ -1,26 +1,7 @@
{
"version": "0.1.0",
"configurations": [
{
"type": "node",
"request": "launch",
"name": "Unit Tests",
"program": "${workspaceRoot}/node_modules/mocha/bin/_mocha",
"runtimeExecutable": "${execPath}",
"stopOnEntry": false,
"args": [
"--timeout",
"999999"
],
"cwd": "${workspaceRoot}",
"env": {
"ELECTRON_RUN_AS_NODE": "true"
},
"sourceMaps": true,
"outFiles": [
"${workspaceRoot}/out/**/*.js"
]
},
{
"type": "node",
"request": "launch",
@ -36,8 +17,9 @@
"type": "node",
"request": "attach",
"name": "Attach to Extension Host",
"protocol": "inspector",
"port": 5870,
"sourceMaps": true,
"restart": true,
"outFiles": [
"${workspaceRoot}/out/**/*.js"
]
@ -46,8 +28,18 @@
"type": "node",
"request": "attach",
"name": "Attach to Shared Process",
"protocol": "inspector",
"port": 5871,
"sourceMaps": true,
"outFiles": [
"${workspaceRoot}/out/**/*.js"
]
},
{
"type": "node",
"request": "attach",
"protocol": "inspector",
"name": "Attach to Search process",
"port": 7890,
"outFiles": [
"${workspaceRoot}/out/**/*.js"
]
@ -56,8 +48,32 @@
"type": "node",
"request": "attach",
"name": "Attach to CLI Process",
"protocol": "inspector",
"port": 5874,
"sourceMaps": true,
"outFiles": [
"${workspaceRoot}/out/**/*.js"
]
},
{
"type": "node",
"request": "attach",
"name": "Attach to Main Process",
"protocol": "inspector",
"port": 5875,
"outFiles": [
"${workspaceRoot}/out/**/*.js"
]
},
{
"type": "extensionHost",
"request": "launch",
"name": "VS Code Emmet Tests",
"runtimeExecutable": "${execPath}",
"args": [
"${workspaceRoot}/extensions/emmet/test-fixtures",
"--extensionDevelopmentPath=${workspaceRoot}/extensions/emmet",
"--extensionTestsPath=${workspaceRoot}/extensions/emmet/out/test"
],
"outFiles": [
"${workspaceRoot}/out/**/*.js"
]
@ -72,7 +88,6 @@
"--extensionDevelopmentPath=${workspaceRoot}/extensions/vscode-api-tests",
"--extensionTestsPath=${workspaceRoot}/extensions/vscode-api-tests/out"
],
"sourceMaps": true,
"outFiles": [
"${workspaceRoot}/out/**/*.js"
]
@ -87,7 +102,6 @@
"--extensionDevelopmentPath=${workspaceRoot}/extensions/vscode-colorize-tests",
"--extensionTestsPath=${workspaceRoot}/extensions/vscode-colorize-tests/out"
],
"sourceMaps": true,
"outFiles": [
"${workspaceRoot}/out/**/*.js"
]
@ -96,8 +110,7 @@
"type": "chrome",
"request": "attach",
"name": "Attach to VS Code",
"port": 9222,
"sourceMaps": true
"port": 9222
},
{
"type": "chrome",
@ -112,18 +125,58 @@
"linux": {
"runtimeExecutable": "${workspaceRoot}/scripts/code.sh"
},
"sourceMaps": true
"urlFilter": "*index.html*",
"runtimeArgs": [
"--inspect=5875"
],
"webRoot": "${workspaceRoot}"
},
{
"type": "node",
"request": "launch",
"name": "Debug Monaco",
"program": "${workspaceRoot}/build/lib/monaco.js",
"cwd": "${workspaceRoot}/build/lib"
// ,
// "port": 5870,
// "sourceMaps": true,
// "outFiles": [ "${workspaceRoot}/out/**/*.js" ]
"name": "Git Unit Tests",
"protocol": "inspector",
"program": "${workspaceRoot}/extensions/git/node_modules/mocha/bin/_mocha",
"stopOnEntry": false,
"cwd": "${workspaceRoot}/extensions/git",
"outFiles": [
"${workspaceRoot}/extensions/git/out/**/*.js"
]
},
{
"type": "node",
"request": "launch",
"name": "Unit Tests",
"protocol": "inspector",
"program": "${workspaceRoot}/node_modules/mocha/bin/_mocha",
"runtimeExecutable": "${workspaceRoot}/.build/electron/Code - OSS.app/Contents/MacOS/Electron",
"windows": {
"runtimeExecutable": "${workspaceRoot}/.build/electron/Code - OSS.exe"
},
"linux": {
"runtimeExecutable": "${workspaceRoot}/.build/electron/code-oss"
},
"stopOnEntry": false,
"args": [
"--timeout",
"2000"
],
"cwd": "${workspaceRoot}",
"env": {
"ELECTRON_RUN_AS_NODE": "true"
},
"outFiles": [
"${workspaceRoot}/out/**/*.js"
]
}
],
"compounds": [
{
"name": "Debug VS Code Main and Renderer",
"configurations": [
"Launch VS Code",
"Attach to Main Process"
]
}
]
}

View file

@ -6,7 +6,9 @@
".git": true,
".build": true,
"**/.DS_Store": true,
"build/**/*.js": { "when": "$(basename).ts" }
"build/**/*.js": {
"when": "$(basename).ts"
}
},
"search.exclude": {
"**/node_modules": true,
@ -17,7 +19,6 @@
"extensions/**/out/**": true
},
"tslint.enable": true,
"tslint.rulesDirectory": "build/lib/tslint",
"lcov.path": [
"./.build/coverage/lcov.info",
"./.build/coverage-single/lcov.info"
@ -31,4 +32,4 @@
}
}
]
}
}

87
.vscode/tasks.json vendored
View file

@ -1,23 +1,15 @@
{
"version": "0.1.0",
"windows": {
"command": ".\\node_modules\\.bin\\gulp"
},
"osx": {
"command": "./node_modules/.bin/gulp"
},
"linux": {
"command": "./node_modules/.bin/gulp"
},
"isShellCommand": true,
"version": "2.0.0",
"tasks": [
{
"taskName": "watch",
"args": [
"--no-color"
],
"isBuildCommand": true,
"isWatching": true,
"type": "npm",
"script": "watch",
"label": "Build VS Code",
"group": "build",
"isBackground": true,
"presentation": {
"reveal": "never"
},
"problemMatcher": {
"owner": "typescript",
"applyTo": "closedDocuments",
@ -30,38 +22,51 @@
"location": 2,
"message": 3
},
"watching": {
"background": {
"beginsPattern": "Starting compilation",
"endsPattern": "Finished compilation"
}
}
},
{
"taskName": "tslint",
"args": [],
"problemMatcher": {
"owner": "tslint",
"fileLocation": [
"relative",
"${workspaceRoot}"
],
"severity": "warning",
"pattern": {
"regexp": "(.*)\\[(\\d+),\\s(\\d+)\\]:\\s(.*)$", // (.*)\[(\d+), (\d+)\]: (.*)
"file": 1,
"line": 2,
"column": 3,
"message": 4
}
"type": "gulp",
"task": "tslint",
"label": "Run tslint",
"problemMatcher": [
"$tslint4"
]
},
{
"taskName": "Run tests",
"type": "shell",
"command": "./scripts/test.sh",
"windows": {
"command": ".\\scripts\\test.bat"
},
"group": "test",
"presentation": {
"echo": true,
"reveal": "always"
}
},
{
"taskName": "test",
"args": [
"--no-color"
],
"showOutput": "always",
"isTestCommand": true
"taskName": "Run Dev",
"type": "shell",
"command": "./scripts/code.sh",
"windows": {
"command": ".\\scripts\\code.bat"
},
"problemMatcher": []
},
{
"type": "gulp",
"task": "electron",
"label": "Download electron"
},
{
"type": "gulp",
"task": "hygiene",
"problemMatcher": []
}
]
}
}

1
CODE_OF_CONDUCT.md Normal file
View file

@ -0,0 +1 @@
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.

View file

@ -1,12 +1,6 @@
// Listing in here platform dependencies that come in at build time
[{
"name": "atom-brightray",
"repositoryURL": "https://github.com/electron/brightray",
"license": "MIT",
"isProd": true
},
{
"isLicense": true,
"name": "async-each",
"repositoryURL": "https://github.com/paulmillr/async-each",
@ -68,7 +62,7 @@
},
{
"name": "chromium",
"version": "53.0.2785.143",
"version": "58.0.3029.110",
"repositoryURL": "http://www.chromium.org/Home",
"licenseDetail": [
"BSD License",
@ -104,20 +98,20 @@
},
{
"name": "libchromiumcontent",
"version": "53.0.2785.143",
"version": "58.0.3029.110",
"license": "MIT",
"repositoryURL": "https://github.com/electron/libchromiumcontent",
"isProd": true
},
{
"name": "nodejs",
"version": "6.5.0",
"version": "7.9.0",
"repositoryURL": "https://github.com/nodejs/node",
"isProd": true
},
{
"name": "electron",
"version": "1.4.6",
"version": "1.7.3",
"license": "MIT",
"repositoryURL": "https://github.com/electron/electron",
"isProd": true
@ -127,6 +121,655 @@
"version": "5.5.6",
"repositoryURL": "https://github.com/jrsoftware/issrc",
"isProd": true
},
{
"isLicense": true,
"name": "ripgrep",
"repositoryURL": "https://github.com/BurntSushi/ripgrep",
"license": "MIT",
"licenseDetail": [
"The MIT License (MIT)",
"",
"Copyright (c) 2015 Andrew Gallant",
"",
"Permission is hereby granted, free of charge, to any person obtaining a copy of",
"this software and associated documentation files (the \"Software\"), to deal in",
"the Software without restriction, including without limitation the rights to",
"use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies",
"of the Software, and to permit persons to whom the Software is furnished to do",
"so, subject to the following conditions:",
"",
"The above copyright notice and this permission notice shall be included in all",
"copies or substantial portions of the Software.",
"",
"THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR",
"IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,",
"FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE",
"AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER",
"LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,",
"OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE",
"SOFTWARE."
],
"isProd": true
},
{
"isLicense": true,
"name": "jschardet",
"repositoryURL": "https://github.com/aadsm/jschardet",
"license": "LGPL",
"licenseDetail": [
"Chardet was originally ported from C++ by Mark Pilgrim. It is now maintained",
" by Dan Blanchard and Ian Cordasco, and was formerly maintained by Erik Rose.",
" JSChardet was ported from python to JavaScript by António Afonso ",
" (https://github.com/aadsm/jschardet) and transformed into an npm package by ",
"Markus Ast (https://github.com/brainafk)",
"",
"GNU LESSER GENERAL PUBLIC LICENSE",
"\t\t Version 2.1, February 1999",
"",
" Copyright (C) 1991,",
"1999 Free Software Foundation, Inc.",
" 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA",
" Everyone is permitted to copy and distribute verbatim copies",
" of this license document, but changing it is not allowed.",
"",
"[This is the first released version of the Lesser GPL. It also counts",
" as the successor of the GNU Library Public License, version 2, hence",
" the version number 2.1.",
"]",
"",
"\t\t\t Preamble",
"",
" The licenses for most software are designed to take away your",
"freedom to share and change it. By contrast, the GNU General Public",
"Licenses are intended to guarantee your freedom to share and change",
"free software--to make sure the software is free for all its users.",
"",
" This license, the Lesser General Public License, applies to some",
"specially designated software packages--typically libraries--of the",
"Free Software Foundation and other authors who decide to use it. You",
"can use it too, but we suggest you first think carefully about whether",
"this license or the ordinary General Public License is the better",
"strategy to use in any particular case, based on the explanations below.",
"",
" When we speak of free software, we are referring to freedom of use,",
"not price. Our General Public Licenses are designed to make sure that",
"you have the freedom to distribute copies of free software (and charge",
"for this service if you wish); that you receive source code or can get",
"it if you want it; that you can change the software and use pieces of",
"it in new free programs; and that you are informed that you can do",
"these things.",
"",
" To protect your rights, we need to make restrictions that forbid",
"distributors to deny you these rights or to ask you to surrender these",
"rights. These restrictions translate to certain responsibilities for",
"you if you distribute copies of the library or if you modify it.",
"",
" For example, if you distribute copies of the library, whether gratis",
"or for a fee, you must give the recipients all the rights that we gave",
"you. You must make sure that they, too, receive or can get the source",
"code. If you link other code with the library, you must provide",
"complete object files to the recipients, so that they can relink them",
"with the library after making changes to the library and recompiling",
"it. And you must show them these terms so they know their rights.",
"",
" We protect your rights with a two-step method: (1) we copyright the",
"library, and (2) we offer you this license, which gives you legal",
"permission to copy, distribute and/or modify the library.",
"",
" To protect each distributor, we want to make it very clear that",
"there is no warranty for the free library. Also, if the library is",
"modified by someone else and passed on, the recipients should know",
"that what they have is not the original version, so that the original",
"author's reputation will not be affected by problems that might be",
"introduced by others.",
"",
" Finally, software patents pose a constant threat to the existence of",
"any free program. We wish to make sure that a company cannot",
"effectively restrict the users of a free program by obtaining a",
"restrictive license from a patent holder. Therefore, we insist that",
"any patent license obtained for a version of the library must be",
"consistent with the full freedom of use specified in this license.",
"",
" Most GNU software, including some libraries, is covered by the",
"ordinary GNU General Public License. This license, the GNU Lesser",
"General Public License, applies to certain designated libraries, and",
"is quite different from the ordinary General Public License. We use",
"this license for certain libraries in order to permit linking those",
"libraries into non-free programs.",
"",
" When a program is linked with a library, whether statically or using",
"a shared library, the combination of the two is legally speaking a",
"combined work, a derivative of the original library. The ordinary",
"General Public License therefore permits such linking only if the",
"entire combination fits its criteria of freedom. The Lesser General",
"Public License permits more lax criteria for linking other code with",
"the library.",
"",
" We call this license the \"Lesser\" General Public License because it",
"does Less to protect the user's freedom than the ordinary General",
"Public License. It also provides other free software developers Less",
"of an advantage over competing non-free programs. These disadvantages",
"are the reason we use the ordinary General Public License for many",
"libraries. However, the Lesser license provides advantages in certain",
"special circumstances.",
"",
" For example, on rare occasions, there may be a special need to",
"encourage the widest possible use of a certain library, so that it becomes",
"a de-facto standard. To achieve this, non-free programs must be",
"allowed to use the library. A more frequent case is that a free",
"library does the same job as widely used non-free libraries. In this",
"case, there is little to gain by limiting the free library to free",
"software only, so we use the Lesser General Public License.",
"",
" In other cases, permission to use a particular library in non-free",
"programs enables a greater number of people to use a large body of",
"free software. For example, permission to use the GNU C Library in",
"non-free programs enables many more people to use the whole GNU",
"operating system, as well as its variant, the GNU/Linux operating",
"system.",
"",
" Although the Lesser General Public License is Less protective of the",
"users' freedom, it does ensure that the user of a program that is",
"linked with the Library has the freedom and the wherewithal to run",
"that program using a modified version of the Library.",
"",
" The precise terms and conditions for copying, distribution and",
"modification follow. Pay close attention to the difference between a",
"\"work based on the library\" and a \"work that uses the library\". The",
"former contains code derived from the library, whereas the latter must",
"be combined with the library in order to run.",
"",
"\t\t GNU LESSER GENERAL PUBLIC LICENSE",
" TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION",
"",
" 0. This License Agreement applies to any software library or other",
"program which contains a notice placed by the copyright holder or",
"other authorized party saying it may be distributed under the terms of",
"this Lesser General Public License (also called \"this License\").",
"Each licensee is addressed as \"you\".",
"",
" A \"library\" means a collection of software functions and/or data",
"prepared so as to be conveniently linked with application programs",
"(which use some of those functions and data) to form executables.",
"",
" The \"Library\", below, refers to any such software library or work",
"which has been distributed under these terms. A \"work based on the",
"Library\" means either the Library or any derivative work under",
"copyright law: that is to say, a work containing the Library or a",
"portion of it, either verbatim or with modifications and/or translated",
"straightforwardly into another language. (Hereinafter, translation is",
"included without limitation in the term \"modification\".)",
"",
" \"Source code\" for a work means the preferred form of the work for",
"making modifications to it. For a library, complete source code means",
"all the source code for all modules it contains, plus any associated",
"interface definition files, plus the scripts used to control compilation",
"and installation of the library.",
"",
" Activities other than copying, distribution and modification are not",
"covered by this License; they are outside its scope. The act of",
"running a program using the Library is not restricted, and output from",
"such a program is covered only if its contents constitute a work based",
"on the Library (independent of the use of the Library in a tool for",
"writing it). Whether that is true depends on what the Library does",
"and what the program that uses the Library does.",
"",
" 1. You may copy and distribute verbatim copies of the Library's",
"complete source code as you receive it, in any medium, provided that",
"you conspicuously and appropriately publish on each copy an",
"appropriate copyright notice and disclaimer of warranty; keep intact",
"all the notices that refer to this License and to the absence of any",
"warranty; and distribute a copy of this License along with the",
"Library.",
"",
" You may charge a fee for the physical act of transferring a copy,",
"and you may at your option offer warranty protection in exchange for a",
"fee.",
"",
" 2. You may modify your copy or copies of the Library or any portion",
"of it, thus forming a work based on the Library, and copy and",
"distribute such modifications or work under the terms of Section 1",
"above, provided that you also meet all of these conditions:",
"",
" a) The modified work must itself be a software library.",
"",
" b) You must cause the files modified to carry prominent notices",
" stating that you changed the files and the date of any change.",
"",
" c) You must cause the whole of the work to be licensed at no",
" charge to all third parties under the terms of this License.",
"",
" d) If a facility in the modified Library refers to a function or a",
" table of data to be supplied by an application program that uses",
" the facility, other than as an argument passed when the facility",
" is invoked, then you must make a good faith effort to ensure that,",
" in the event an application does not supply such function or",
" table, the facility still operates, and performs whatever part of",
" its purpose remains meaningful.",
"",
" (For example, a function in a library to compute square roots has",
" a purpose that is entirely well-defined independent of the",
" application. Therefore, Subsection 2d requires that any",
" application-supplied function or table used by this function must",
" be optional: if the application does not supply it, the square",
" root function must still compute square roots.)",
"",
"These requirements apply to the modified work as a whole. If",
"identifiable sections of that work are not derived from the Library,",
"and can be reasonably considered independent and separate works in",
"themselves, then this License, and its terms, do not apply to those",
"sections when you distribute them as separate works. But when you",
"distribute the same sections as part of a whole which is a work based",
"on the Library, the distribution of the whole must be on the terms of",
"this License, whose permissions for other licensees extend to the",
"entire whole, and thus to each and every part regardless of who wrote",
"it.",
"",
"Thus, it is not the intent of this section to claim rights or contest",
"your rights to work written entirely by you; rather, the intent is to",
"exercise the right to control the distribution of derivative or",
"collective works based on the Library.",
"",
"In addition, mere aggregation of another work not based on the Library",
"with the Library (or with a work based on the Library) on a volume of",
"a storage or distribution medium does not bring the other work under",
"the scope of this License.",
"",
" 3. You may opt to apply the terms of the ordinary GNU General Public",
"License instead of this License to a given copy of the Library. To do",
"this, you must alter all the notices that refer to this License, so",
"that they refer to the ordinary GNU General Public License, version 2,",
"instead of to this License. (If a newer version than version 2 of the",
"ordinary GNU General Public License has appeared, then you can specify",
"that version instead if you wish.) Do not make any other change in",
"these notices.",
"",
" Once this change is made in a given copy, it is irreversible for",
"that copy, so the ordinary GNU General Public License applies to all",
"subsequent copies and derivative works made from that copy.",
"",
" This option is useful when you wish to copy part of the code of",
"the Library into a program that is not a library.",
"",
" 4. You may copy and distribute the Library (or a portion or",
"derivative of it, under Section 2) in object code or executable form",
"under the terms of Sections 1 and 2 above provided that you accompany",
"it with the complete corresponding machine-readable source code, which",
"must be distributed under the terms of Sections 1 and 2 above on a",
"medium customarily used for software interchange.",
"",
" If distribution of object code is made by offering access to copy",
"from a designated place, then offering equivalent access to copy the",
"source code from the same place satisfies the requirement to",
"distribute the source code, even though third parties are not",
"compelled to copy the source along with the object code.",
"",
" 5. A program that contains no derivative of any portion of the",
"Library, but is designed to work with the Library by being compiled or",
"linked with it, is called a \"work that uses the Library\". Such a",
"work, in isolation, is not a derivative work of the Library, and",
"therefore falls outside the scope of this License.",
"",
" However, linking a \"work that uses the Library\" with the Library",
"creates an executable that is a derivative of the Library (because it",
"contains portions of the Library), rather than a \"work that uses the",
"library\". The executable is therefore covered by this License.",
"Section 6 states terms for distribution of such executables.",
"",
" When a \"work that uses the Library\" uses material from a header file",
"that is part of the Library, the object code for the work may be a",
"derivative work of the Library even though the source code is not.",
"Whether this is true is especially significant if the work can be",
"linked without the Library, or if the work is itself a library. The",
"threshold for this to be true is not precisely defined by law.",
"",
" If such an object file uses only numerical parameters, data",
"structure layouts and accessors, and small macros and small inline",
"functions (ten lines or less in length), then the use of the object",
"file is unrestricted, regardless of whether it is legally a derivative",
"work. (Executables containing this object code plus portions of the",
"Library will still fall under Section 6.)",
"",
" Otherwise, if the work is a derivative of the Library, you may",
"distribute the object code for the work under the terms of Section 6.",
"Any executables containing that work also fall under Section 6,",
"whether or not they are linked directly with the Library itself.",
"",
" 6. As an exception to the Sections above, you may also combine or",
"link a \"work that uses the Library\" with the Library to produce a",
"work containing portions of the Library, and distribute that work",
"under terms of your choice, provided that the terms permit",
"modification of the work for the customer's own use and reverse",
"engineering for debugging such modifications.",
"",
" You must give prominent notice with each copy of the work that the",
"Library is used in it and that the Library and its use are covered by",
"this License. You must supply a copy of this License. If the work",
"during execution displays copyright notices, you must include the",
"copyright notice for the Library among them, as well as a reference",
"directing the user to the copy of this License. Also, you must do one",
"of these things:",
"",
" a) Accompany the work with the complete corresponding",
" machine-readable source code for the Library including whatever",
" changes were used in the work (which must be distributed under",
" Sections 1 and 2 above); and, if the work is an executable linked",
" with the Library, with the complete machine-readable \"work that",
" uses the Library\", as object code and/or source code, so that the",
" user can modify the Library and then relink to produce a modified",
" executable containing the modified Library. (It is understood",
" that the user who changes the contents of definitions files in the",
" Library will not necessarily be able to recompile the application",
" to use the modified definitions.)",
"",
" b) Use a suitable shared library mechanism for linking with the",
" Library. A suitable mechanism is one that (1) uses at run time a",
" copy of the library already present on the user's computer system,",
" rather than copying library functions into the executable, and (2)",
" will operate properly with a modified version of the library, if",
" the user installs one, as long as the modified version is",
" interface-compatible with the version that the work was made with.",
"",
" c) Accompany the work with a written offer, valid for at",
" least three years, to give the same user the materials",
" specified in Subsection 6a, above, for a charge no more",
" than the cost of performing this distribution.",
"",
" d) If distribution of the work is made by offering access to copy",
" from a designated place, offer equivalent access to copy the above",
" specified materials from the same place.",
"",
" e) Verify that the user has already received a copy of these",
" materials or that you have already sent this user a copy.",
"",
" For an executable, the required form of the \"work that uses the",
"Library\" must include any data and utility programs needed for",
"reproducing the executable from it. However, as a special exception,",
"the materials to be distributed need not include anything that is",
"normally distributed (in either source or binary form) with the major",
"components (compiler, kernel, and so on) of the operating system on",
"which the executable runs, unless that component itself accompanies",
"the executable.",
"",
" It may happen that this requirement contradicts the license",
"restrictions of other proprietary libraries that do not normally",
"accompany the operating system. Such a contradiction means you cannot",
"use both them and the Library together in an executable that you",
"distribute.",
"",
" 7. You may place library facilities that are a work based on the",
"Library side-by-side in a single library together with other library",
"facilities not covered by this License, and distribute such a combined",
"library, provided that the separate distribution of the work based on",
"the Library and of the other library facilities is otherwise",
"permitted, and provided that you do these two things:",
"",
" a) Accompany the combined library with a copy of the same work",
" based on the Library, uncombined with any other library",
" facilities. This must be distributed under the terms of the",
" Sections above.",
"",
" b) Give prominent notice with the combined library of the fact",
" that part of it is a work based on the Library, and explaining",
" where to find the accompanying uncombined form of the same work.",
"",
" 8. You may not copy, modify, sublicense, link with, or distribute",
"the Library except as expressly provided under this License. Any",
"attempt otherwise to copy, modify, sublicense, link with, or",
"distribute the Library is void, and will automatically terminate your",
"rights under this License. However, parties who have received copies,",
"or rights, from you under this License will not have their licenses",
"terminated so long as such parties remain in full compliance.",
"",
" 9. You are not required to accept this License, since you have not",
"signed it. However, nothing else grants you permission to modify or",
"distribute the Library or its derivative works. These actions are",
"prohibited by law if you do not accept this License. Therefore, by",
"modifying or distributing the Library (or any work based on the",
"Library), you indicate your acceptance of this License to do so, and",
"all its terms and conditions for copying, distributing or modifying",
"the Library or works based on it.",
"",
" 10. Each time you redistribute the Library (or any work based on the",
"Library), the recipient automatically receives a license from the",
"original licensor to copy, distribute, link with or modify the Library",
"subject to these terms and conditions. You may not impose any further",
"restrictions on the recipients' exercise of the rights granted herein.",
"You are not responsible for enforcing compliance by third parties with",
"this License.",
"",
" 11. If, as a consequence of a court judgment or allegation of patent",
"infringement or for any other reason (not limited to patent issues),",
"conditions are imposed on you (whether by court order, agreement or",
"otherwise) that contradict the conditions of this License, they do not",
"excuse you from the conditions of this License. If you cannot",
"distribute so as to satisfy simultaneously your obligations under this",
"License and any other pertinent obligations, then as a consequence you",
"may not distribute the Library at all. For example, if a patent",
"license would not permit royalty-free redistribution of the Library by",
"all those who receive copies directly or indirectly through you, then",
"the only way you could satisfy both it and this License would be to",
"refrain entirely from distribution of the Library.",
"",
"If any portion of this section is held invalid or unenforceable under any",
"particular circumstance, the balance of the section is intended to apply,",
"and the section as a whole is intended to apply in other circumstances.",
"",
"It is not the purpose of this section to induce you to infringe any",
"patents or other property right claims or to contest validity of any",
"such claims; this section has the sole purpose of protecting the",
"integrity of the free software distribution system which is",
"implemented by public license practices. Many people have made",
"generous contributions to the wide range of software distributed",
"through that system in reliance on consistent application of that",
"system; it is up to the author/donor to decide if he or she is willing",
"to distribute software through any other system and a licensee cannot",
"impose that choice.",
"",
"This section is intended to make thoroughly clear what is believed to",
"be a consequence of the rest of this License.",
"",
" 12. If the distribution and/or use of the Library is restricted in",
"certain countries either by patents or by copyrighted interfaces, the",
"original copyright holder who places the Library under this License may add",
"an explicit geographical distribution limitation excluding those countries,",
"so that distribution is permitted only in or among countries not thus",
"excluded. In such case, this License incorporates the limitation as if",
"written in the body of this License.",
"",
" 13. The Free Software Foundation may publish revised and/or new",
"versions of the Lesser General Public License from time to time.",
"Such new versions will be similar in spirit to the present version,",
"but may differ in detail to address new problems or concerns.",
"",
"Each version is given a distinguishing version number. If the Library",
"specifies a version number of this License which applies to it and",
"\"any later version\", you have the option of following the terms and",
"conditions either of that version or of any later version published by",
"the Free Software Foundation. If the Library does not specify a",
"license version number, you may choose any version ever published by",
"the Free Software Foundation.",
"",
" 14. If you wish to incorporate parts of the Library into other free",
"programs whose distribution conditions are incompatible with these,",
"write to the author to ask for permission. For software which is",
"copyrighted by the Free Software Foundation, write to the Free",
"Software Foundation; we sometimes make exceptions for this. Our",
"decision will be guided by the two goals of preserving the free status",
"of all derivatives of our free software and of promoting the sharing",
"and reuse of software generally.",
"",
"\t\t\t NO WARRANTY",
"",
" 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO",
"WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.",
"EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR",
"OTHER PARTIES PROVIDE THE LIBRARY \"AS IS\" WITHOUT WARRANTY OF ANY",
"KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE",
"IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR",
"PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE",
"LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME",
"THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.",
"",
" 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN",
"WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY",
"AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU",
"FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR",
"CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE",
"LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING",
"RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A",
"FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF",
"SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH",
"DAMAGES.",
"",
"\t\t END OF TERMS AND CONDITIONS",
"",
" How to Apply These Terms to Your New Libraries",
"",
" If you develop a new library, and you want it to be of the greatest",
"possible use to the public, we recommend making it free software that",
"everyone can redistribute and change. You can do so by permitting",
"redistribution under these terms (or, alternatively, under the terms of the",
"ordinary General Public License).",
"",
" To apply these terms, attach the following notices to the library. It is",
"safest to attach them to the start of each source file to most effectively",
"convey the exclusion of warranty; and each file should have at least the",
"\"copyright\" line and a pointer to where the full notice is found.",
"",
" <one line to give the library's name and a brief idea of what it does.>",
" Copyright (C) <year> <name of author>",
"",
" This library is free software; you can redistribute it and/or",
" modify it under the terms of the GNU Lesser General Public",
" License as published by the Free Software Foundation; either",
" version 2.1 of the License, or (at your option) any later version.",
"",
" This library is distributed in the hope that it will be useful,",
" but WITHOUT ANY WARRANTY; without even the implied warranty of",
" MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU",
" Lesser General Public License for more details.",
"",
" You should have received a copy of the GNU Lesser General Public",
" License along with this library; if not, write to the Free Software",
" Foundation, Inc.,",
"51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA",
"",
"Also add information on how to contact you by electronic and paper mail.",
"",
"You should also get your employer (if you work as a programmer) or your",
"school, if any, to sign a \"copyright disclaimer\" for the library, if",
"necessary. Here is a sample; alter the names:",
"",
" Yoyodyne, Inc., hereby disclaims all copyright interest in the",
" library `Frob' (a library for tweaking knobs) written by James Random Hacker.",
"",
" <signature of Ty Coon>,",
"1 April 1990",
" Ty Coon, President of Vice",
"",
"That's all there is to it!"
],
"isProd": true
},
{
"isLicense": true,
"name": "@types/node",
"repositoryURL": "https://www.github.com/DefinitelyTyped/DefinitelyTyped",
"license": "MIT",
"licenseDetail": [
"This project is licensed under the MIT license.",
"Copyrights are respective of each contributor listed at the beginning of each definition file.",
"",
"Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the \"Software\"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:",
"",
"The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.",
"",
"THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE."
],
"isProd": true
},
{
"isLicense": true,
"name": "jsonfile",
"repositoryURL": "https+ssh://git@github.com/jprichardson/node-jsonfile",
"license": "MIT",
"licenseDetail": [
"(The MIT License)",
"",
"Copyright (c) 2012-2015, JP Richardson <jprichardson@gmail.com>",
"",
"Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files",
"(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify,",
" merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is",
" furnished to do so, subject to the following conditions:",
"",
"The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.",
"",
"THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE",
"WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS",
"OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,",
" ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE."
],
"isProd": true
},
{
"isLicense": true,
"name": "string_decoder",
"repositoryURL": "https://github.com/rvagg/string_decoder",
"license": "MIT",
"licenseDetail": [
"The MIT License (MIT)",
"",
"Node.js is licensed for use as follows:",
"",
"\"\"\"",
"Copyright Node.js contributors. All rights reserved.",
"",
"Permission is hereby granted, free of charge, to any person obtaining a copy",
"of this software and associated documentation files (the \"Software\"), to",
"deal in the Software without restriction, including without limitation the",
"rights to use, copy, modify, merge, publish, distribute, sublicense, and/or",
"sell copies of the Software, and to permit persons to whom the Software is",
"furnished to do so, subject to the following conditions:",
"",
"The above copyright notice and this permission notice shall be included in",
"all copies or substantial portions of the Software.",
"",
"THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR",
"IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,",
"FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE",
"AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER",
"LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING",
"FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS",
"IN THE SOFTWARE.",
"\"\"\"",
"",
"This license applies to parts of Node.js originating from the",
"https://github.com/joyent/node repository:",
"",
"\"\"\"",
"Copyright Joyent, Inc. and other Node contributors. All rights reserved.",
"Permission is hereby granted, free of charge, to any person obtaining a copy",
"of this software and associated documentation files (the \"Software\"), to",
"deal in the Software without restriction, including without limitation the",
"rights to use, copy, modify, merge, publish, distribute, sublicense, and/or",
"sell copies of the Software, and to permit persons to whom the Software is",
"furnished to do so, subject to the following conditions:",
"",
"The above copyright notice and this permission notice shall be included in",
"all copies or substantial portions of the Software.",
"",
"THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR",
"IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,",
"FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE",
"AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER",
"LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING",
"FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS",
"IN THE SOFTWARE.",
"\"\"\""
],
"isProd": true
}
]

View file

@ -1,7 +1,7 @@
# Visual Studio Code - Open Source
[![Build Status](https://travis-ci.org/Microsoft/vscode.svg?branch=master)](https://travis-ci.org/Microsoft/vscode)
[![Build status](https://ci.appveyor.com/api/projects/status/vuhlhg80tj3e2a0l?svg=true)](https://ci.appveyor.com/project/VSCode/vscode)
[![Build Status](https://ci.appveyor.com/api/projects/status/vuhlhg80tj3e2a0l/branch/master?svg=true)](https://ci.appveyor.com/project/VSCode/vscode)
[![Coverage Status](https://img.shields.io/coveralls/Microsoft/vscode/master.svg)](https://coveralls.io/github/Microsoft/vscode?branch=master)
[![Gitter](https://img.shields.io/badge/chat-on%20gitter-blue.svg)](https://gitter.im/Microsoft/vscode)
@ -9,7 +9,7 @@
a code editor with what developers need for their core edit-build-debug cycle. Code
provides comprehensive editing and debugging support, an extensibility model, and lightweight integration with existing tools.
VS Code is updated monthly with new features and bug fixes. You can download it for Windows, Mac and Linux on [VS Code's website](https://code.visualstudio.com/Download). To get the latest releases everyday, you can install the [Insiders version of VS Code](https://code.visualstudio.com/insiders). This builds from the master branch and is updated at least daily.
VS Code is updated monthly with new features and bug fixes. You can download it for Windows, macOS, and Linux on [VS Code's website](https://code.visualstudio.com/Download). To get the latest releases everyday, you can install the [Insiders version of VS Code](https://code.visualstudio.com/insiders). This builds from the master branch and is updated at least daily.
<p align="center">
<img alt="VS Code in action" src="https://cloud.githubusercontent.com/assets/11839736/16642200/6624dde0-43bd-11e6-8595-c81885ba0dc2.png">
@ -31,11 +31,11 @@ please see the document [How to Contribute](https://github.com/Microsoft/vscode/
* [Coding Guidelines](https://github.com/Microsoft/vscode/wiki/Coding-Guidelines)
* [Submitting pull requests](https://github.com/Microsoft/vscode/wiki/How-to-Contribute#pull-requests)
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
Please see also our [Code of Conduct](CODE_OF_CONDUCT.md).
## Feedback
* Ask a question on [Stack Overflow](http://stackoverflow.com/questions/tagged/vscode).
* Ask a question on [Stack Overflow](https://stackoverflow.com/questions/tagged/vscode).
* Request a new feature on [GitHub](CONTRIBUTING.md).
* Vote for [popular feature requests](https://github.com/Microsoft/vscode/issues?q=is%3Aopen+is%3Aissue+label%3Afeature-request+sort%3Areactions-%2B1-desc).
* File a bug in [GitHub Issues](https://github.com/Microsoft/vscode/issues).

View file

@ -5,62 +5,62 @@ Do Not Translate or Localize
This project incorporates components from the projects listed below. The original copyright notices and the licenses under which Microsoft received such components are set forth below. Microsoft reserves all rights not expressly granted herein, whether by implication, estoppel or otherwise.
1. atom/language-c version 0.51.3 (https://github.com/atom/language-c)
2. atom/language-sass version 0.52.0 (https://github.com/atom/language-sass)
3. atom/language-xml (https://github.com/atom/language-xml)
4. Benvie/JavaScriptNext.tmLanguage (https://github.com/Benvie/JavaScriptNext.tmLanguage)
5. chjj-marked version 0.3.2 (https://github.com/npmcomponent/chjj-marked)
6. chriskempson/tomorrow-theme (https://github.com/chriskempson/tomorrow-theme)
7. Colorsublime-Themes version 0.1.0 (https://github.com/Colorsublime/Colorsublime-Themes)
8. daaain/Handlebars (https://github.com/daaain/Handlebars)
9. davidrios/jade-tmbundle (https://github.com/davidrios/jade-tmbundle)
10. definitelytyped (https://github.com/DefinitelyTyped/DefinitelyTyped)
11. demyte/language-cshtml (https://github.com/demyte/language-cshtml)
12. freebroccolo/atom-language-swift (https://github.com/freebroccolo/atom-language-swift)
13. HTML 5.1 W3C Working Draft version 08 October 2015 (http://www.w3.org/TR/2015/WD-html51-20151008/)
14. Ionic documentation version 1.2.4 (https://github.com/driftyco/ionic-site)
15. ionide/ionide-fsgrammar (https://github.com/ionide/ionide-fsgrammar)
16. js-beautify version 1.6.8 (https://github.com/beautify-web/js-beautify)
17. Jxck/assert version 1.0.0 (https://github.com/Jxck/assert)
18. language-docker (https://github.com/docker/docker)
19. language-go version 0.39.0 (https://github.com/atom/language-go)
20. language-less (https://github.com/atom/language-less)
21. language-php (https://github.com/atom/language-php)
22. language-rust version 0.4.4 (https://github.com/zargony/atom-language-rust)
23. MagicStack/MagicPython (https://github.com/MagicStack/MagicPython)
24. Microsoft/TypeScript-TmLanguage version 0.0.1 (https://github.com/Microsoft/TypeScript-TmLanguage)
25. mmcgrana/textmate-clojure (https://github.com/mmcgrana/textmate-clojure)
26. octicons-code version 3.1.0 (https://octicons.github.com)
27. octicons-font version 3.1.0 (https://octicons.github.com)
28. seti-ui version 0.1.0 (https://github.com/jesseweed/seti-ui)
29. string_scorer version 0.1.20 (https://github.com/joshaven/string_score)
30. sublimehq/Packages (https://github.com/sublimehq/Packages)
31. SublimeText/PowerShell (https://github.com/SublimeText/PowerShell)
32. textmate/asp.vb.net.tmbundle (https://github.com/textmate/asp.vb.net.tmbundle)
33. textmate/c.tmbundle (https://github.com/textmate/c.tmbundle)
34. textmate/coffee-script.tmbundle (https://github.com/textmate/coffee-script.tmbundle)
35. textmate/css.tmbundle (https://github.com/textmate/css.tmbundle)
36. textmate/diff.tmbundle (https://github.com/textmate/diff.tmbundle)
37. textmate/git.tmbundle (https://github.com/textmate/git.tmbundle)
38. textmate/groovy.tmbundle (https://github.com/textmate/groovy.tmbundle)
39. textmate/html.tmbundle (https://github.com/textmate/html.tmbundle)
40. textmate/ini.tmbundle (https://github.com/textmate/ini.tmbundle)
41. textmate/java.tmbundle (https://github.com/textmate/java.tmbundle)
42. textmate/javadoc.tmbundle (https://github.com/textmate/javadoc.tmbundle)
43. textmate/javascript.tmbundle (https://github.com/textmate/javascript.tmbundle)
44. textmate/lua.tmbundle (https://github.com/textmate/lua.tmbundle)
45. textmate/make.tmbundle (https://github.com/textmate/make.tmbundle)
46. textmate/markdown.tmbundle (https://github.com/textmate/markdown.tmbundle)
47. textmate/objective-c.tmbundle (https://github.com/textmate/objective-c.tmbundle)
48. textmate/perl.tmbundle (https://github.com/textmate/perl.tmbundle)
49. textmate/r.tmbundle (https://github.com/textmate/r.tmbundle)
50. textmate/ruby.tmbundle (https://github.com/textmate/ruby.tmbundle)
51. textmate/shellscript.tmbundle (https://github.com/textmate/shellscript.tmbundle)
52. textmate/sql.tmbundle (https://github.com/textmate/sql.tmbundle)
53. textmate/yaml.tmbundle (https://github.com/textmate/yaml.tmbundle)
54. typescript-legacy version 1.5 (https://github.com/Microsoft/TypeScript)
55. TypeScript-TmLanguage version 0.1.8 (https://github.com/Microsoft/TypeScript-TmLanguage)
56. unity-shader-files version 0.1.0 (https://github.com/nadege/unity-shader-files)
1. atom/language-c (https://github.com/atom/language-c)
2. atom/language-clojure (https://github.com/atom/language-clojure)
3. atom/language-coffee-script (https://github.com/atom/language-coffee-script)
4. atom/language-css (https://github.com/atom/language-css)
5. atom/language-java (https://github.com/atom/language-java)
6. atom/language-objective-c (https://github.com/atom/language-objective-c)
7. atom/language-sass version 0.52.0 (https://github.com/atom/language-sass)
8. atom/language-xml (https://github.com/atom/language-xml)
9. Benvie/JavaScriptNext.tmLanguage (https://github.com/Microsoft/vscode-JSON.tmLanguage)
10. chjj-marked version 0.3.6 (https://github.com/npmcomponent/chjj-marked)
11. chriskempson/tomorrow-theme (https://github.com/chriskempson/tomorrow-theme)
12. Colorsublime-Themes version 0.1.0 (https://github.com/Colorsublime/Colorsublime-Themes)
13. daaain/Handlebars (https://github.com/daaain/Handlebars)
14. davidrios/jade-tmbundle (https://github.com/davidrios/jade-tmbundle)
15. definitelytyped (https://github.com/DefinitelyTyped/DefinitelyTyped)
16. demyte/language-cshtml (https://github.com/demyte/language-cshtml)
17. dotnet/csharp-tmLanguage version 0.1.0 (https://github.com/dotnet/csharp-tmLanguage)
18. expand-abbreviation version 0.5.8 (https://github.com/emmetio/expand-abbreviation)
19. fadeevab/make.tmbundle (https://github.com/fadeevab/make.tmbundle)
20. freebroccolo/atom-language-swift (https://github.com/freebroccolo/atom-language-swift)
21. HTML 5.1 W3C Working Draft version 08 October 2015 (http://www.w3.org/TR/2015/WD-html51-20151008/)
22. Ionic documentation version 1.2.4 (https://github.com/ionic-team/ionic-site)
23. ionide/ionide-fsgrammar (https://github.com/ionide/ionide-fsgrammar)
24. js-beautify version 1.6.8 (https://github.com/beautify-web/js-beautify)
25. Jxck/assert version 1.0.0 (https://github.com/Jxck/assert)
26. language-docker (https://github.com/moby/moby)
27. language-go version 0.39.0 (https://github.com/atom/language-go)
28. language-less (https://github.com/atom/language-less)
29. language-php (https://github.com/atom/language-php)
30. language-rust version 0.4.9 (https://github.com/zargony/atom-language-rust)
31. MagicStack/MagicPython (https://github.com/MagicStack/MagicPython)
32. Microsoft/TypeScript-TmLanguage version 0.0.1 (https://github.com/Microsoft/TypeScript-TmLanguage)
33. octicons-code version 3.1.0 (https://octicons.github.com)
34. octicons-font version 3.1.0 (https://octicons.github.com)
35. seti-ui version 0.1.0 (https://github.com/jesseweed/seti-ui)
36. shaders-tmLanguage version 0.1.0 (https://github.com/tgjones/shaders-tmLanguage)
37. string_scorer version 0.1.20 (https://github.com/joshaven/string_score)
38. sublimehq/Packages (https://github.com/sublimehq/Packages)
39. SublimeText/PowerShell (https://github.com/SublimeText/PowerShell)
40. textmate/asp.vb.net.tmbundle (https://github.com/textmate/asp.vb.net.tmbundle)
41. textmate/c.tmbundle (https://github.com/textmate/c.tmbundle)
42. textmate/diff.tmbundle (https://github.com/textmate/diff.tmbundle)
43. textmate/git.tmbundle (https://github.com/textmate/git.tmbundle)
44. textmate/groovy.tmbundle (https://github.com/textmate/groovy.tmbundle)
45. textmate/html.tmbundle (https://github.com/textmate/html.tmbundle)
46. textmate/ini.tmbundle (https://github.com/textmate/ini.tmbundle)
47. textmate/javascript.tmbundle (https://github.com/textmate/javascript.tmbundle)
48. textmate/lua.tmbundle (https://github.com/textmate/lua.tmbundle)
49. textmate/markdown.tmbundle (https://github.com/textmate/markdown.tmbundle)
50. textmate/perl.tmbundle (https://github.com/textmate/perl.tmbundle)
51. textmate/r.tmbundle (https://github.com/textmate/r.tmbundle)
52. textmate/ruby.tmbundle (https://github.com/textmate/ruby.tmbundle)
53. textmate/shellscript.tmbundle (https://github.com/textmate/shellscript.tmbundle)
54. textmate/sql.tmbundle (https://github.com/textmate/sql.tmbundle)
55. textmate/yaml.tmbundle (https://github.com/textmate/yaml.tmbundle)
56. TypeScript-TmLanguage version 0.1.8 (https://github.com/Microsoft/TypeScript-TmLanguage)
57. vscode-swift version 0.0.1 (https://github.com/owensd/vscode-swift)
@ -101,6 +101,224 @@ suitability for any purpose.
=========================================
END OF atom/language-c NOTICES AND INFORMATION
%% atom/language-clojure NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright (c) 2014 GitHub Inc.
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
This package was derived from a TextMate bundle located at
https://github.com/mmcgrana/textmate-clojure and distributed under the
following license, located in `LICENSE.md`:
The MIT License (MIT)
Copyright (c) 2010- Mark McGranaghan
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
=========================================
END OF atom/language-clojure NOTICES AND INFORMATION
%% atom/language-coffee-script NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
Copyright (c) 2014 GitHub Inc.
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
This package was derived from a TextMate bundle located at
https://github.com/jashkenas/coffee-script-tmbundle and distributed under the
following license, located in `LICENSE`:
Copyright (c) 2009-2014 Jeremy Ashkenas
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
=========================================
END OF atom/language-coffee-script NOTICES AND INFORMATION
%% atom/language-css NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
Copyright (c) 2014 GitHub Inc.
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
This package was derived from a TextMate bundle located at
https://github.com/textmate/css.tmbundle and distributed under the following
license, located in `README.mdown`:
Permission to copy, use, modify, sell and distribute this
software is granted. This software is provided "as is" without
express or implied warranty, and with no claim as to its
suitability for any purpose.
=========================================
END OF atom/language-css NOTICES AND INFORMATION
%% atom/language-java NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
Copyright (c) 2014 GitHub Inc.
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
This package was derived from a TextMate bundle located at
https://github.com/textmate/java.tmbundle and distributed under the following
license, located in `README.mdown`:
Permission to copy, use, modify, sell and distribute this
software is granted. This software is provided "as is" without
express or implied warranty, and with no claim as to its
suitability for any purpose.
=========================================
END OF atom/language-java NOTICES AND INFORMATION
%% atom/language-objective-c NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
Copyright (c) 2014 GitHub Inc.
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
This package was derived from a TextMate bundle located at
https://github.com/textmate/objective-c.tmbundle and distributed under the following
license, located in `README.mdown`:
Permission to copy, use, modify, sell and distribute this
software is granted. This software is provided "as is" without
express or implied warranty, and with no claim as to its
suitability for any purpose.
=========================================
END OF atom/language-objective-c NOTICES AND INFORMATION
%% atom/language-sass NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
@ -193,27 +411,25 @@ END OF atom/language-xml NOTICES AND INFORMATION
%% Benvie/JavaScriptNext.tmLanguage NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
vscode-JSON.tmLanguage
Copyright (c) 2015 simonzack, zertosh, benvie
Copyright (c) Microsoft Corporation
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
All rights reserved.
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
MIT License
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the ""Software""), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=========================================
END OF Benvie/JavaScriptNext.tmLanguage NOTICES AND INFORMATION
@ -365,6 +581,76 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=========================================
END OF demyte/language-cshtml NOTICES AND INFORMATION
%% dotnet/csharp-tmLanguage NOTICES AND INFORMATION BEGIN HERE
=========================================
MIT License
Copyright (c) 2016 .NET Foundation
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
=========================================
END OF dotnet/csharp-tmLanguage NOTICES AND INFORMATION
%% expand-abbreviation NOTICES AND INFORMATION BEGIN HERE
=========================================
MIT License
Copyright (c) 2017 Emmet.io
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
=========================================
END OF expand-abbreviation NOTICES AND INFORMATION
%% fadeevab/make.tmbundle NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright (c) textmate-make.tmbundle project authors
If not otherwise specified (see below), files in this repository fall under the following license:
Permission to copy, use, modify, sell and distribute this
software is granted. This software is provided "as is" without
express or implied warranty, and with no claim as to its
suitability for any purpose.
An exception is made for files in readable text which contain their own license information,
or files where an accompanying file exists (in the same directory) with a "-license" suffix added
to the base-name name of the original file, and an extension of txt, html, or similar. For example
"tidy" is accompanied by "tidy-license.txt".
=========================================
END OF fadeevab/make.tmbundle NOTICES AND INFORMATION
%% freebroccolo/atom-language-swift NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
@ -717,7 +1003,7 @@ Apache License
END OF TERMS AND CONDITIONS
Copyright 2013-2016 Docker, Inc.
Copyright 2013-2017 Docker, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@ -928,7 +1214,7 @@ END OF language-rust NOTICES AND INFORMATION
=========================================
The MIT License
Copyright (c) 2015 MagicStack Inc. http://magic.io
Copyright (c) 2015-present MagicStack Inc. http://magic.io
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
@ -1133,32 +1419,6 @@ Copyright (c) Microsoft Corporation. All rights reserved.
=========================================
END OF Microsoft/TypeScript-TmLanguage NOTICES AND INFORMATION
%% mmcgrana/textmate-clojure NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
Copyright (c) 2010- Mark McGranaghan
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
=========================================
END OF mmcgrana/textmate-clojure NOTICES AND INFORMATION
%% octicons-code NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
@ -1301,6 +1561,32 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=========================================
END OF seti-ui NOTICES AND INFORMATION
%% shaders-tmLanguage NOTICES AND INFORMATION BEGIN HERE
=========================================
MIT License
Copyright (c) 2017 Tim Jones
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
=========================================
END OF shaders-tmLanguage NOTICES AND INFORMATION
%% string_scorer NOTICES AND INFORMATION BEGIN HERE
=========================================
This software is released under the MIT license:
@ -1394,51 +1680,6 @@ to the base-name name of the original file, and an extension of txt, html, or si
=========================================
END OF textmate/c.tmbundle NOTICES AND INFORMATION
%% textmate/coffee-script.tmbundle NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright (c) 2009-2014 Jeremy Ashkenas
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
=========================================
END OF textmate/coffee-script.tmbundle NOTICES AND INFORMATION
%% textmate/css.tmbundle NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright (c) textmate-css.tmbundle project authors
If not otherwise specified (see below), files in this folder fall under the following license:
Permission to copy, use, modify, sell and distribute this
software is granted. This software is provided "as is" without
express or implied warranty, and with no claim as to its
suitability for any purpose.
An exception is made for files in readable text which contain their own license information,
or files where an accompanying file exists (in the same directory) with a "-license" suffix added
to the base-name name of the original file, and an extension of txt, html, or similar. For example
"tidy" is accompanied by "tidy-license.txt".
=========================================
END OF textmate/css.tmbundle NOTICES AND INFORMATION
%% textmate/diff.tmbundle NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright (c) textmate-diff.tmbundle project authors
@ -1538,42 +1779,6 @@ to the base-name name of the original file, and an extension of txt, html, or si
=========================================
END OF textmate/ini.tmbundle NOTICES AND INFORMATION
%% textmate/java.tmbundle NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright (c) textmate-java.tmbundle project authors
If not otherwise specified (see below), files in this repository fall under the following license:
Permission to copy, use, modify, sell and distribute this
software is granted. This software is provided "as is" without
express or implied warranty, and with no claim as to its
suitability for any purpose.
An exception is made for files in readable text which contain their own license information,
or files where an accompanying file exists (in the same directory) with a "-license" suffix added
to the base-name name of the original file, and an extension of txt, html, or similar. For example
"tidy" is accompanied by "tidy-license.txt".
=========================================
END OF textmate/java.tmbundle NOTICES AND INFORMATION
%% textmate/javadoc.tmbundle NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright (c) textmate-javadoc.tmbundle project authors
If not otherwise specified (see below), files in this repository fall under the following license:
Permission to copy, use, modify, sell and distribute this
software is granted. This software is provided "as is" without
express or implied warranty, and with no claim as to its
suitability for any purpose.
An exception is made for files in readable text which contain their own license information,
or files where an accompanying file exists (in the same directory) with a "-license" suffix added
to the base-name name of the original file, and an extension of txt, html, or similar. For example
"tidy" is accompanied by "tidy-license.txt".
=========================================
END OF textmate/javadoc.tmbundle NOTICES AND INFORMATION
%% textmate/javascript.tmbundle NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright (c) textmate-javascript.tmbundle project authors
@ -1610,24 +1815,6 @@ to the base-name name of the original file, and an extension of txt, html, or si
=========================================
END OF textmate/lua.tmbundle NOTICES AND INFORMATION
%% textmate/make.tmbundle NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright (c) textmate-make.tmbundle project authors
If not otherwise specified (see below), files in this repository fall under the following license:
Permission to copy, use, modify, sell and distribute this
software is granted. This software is provided "as is" without
express or implied warranty, and with no claim as to its
suitability for any purpose.
An exception is made for files in readable text which contain their own license information,
or files where an accompanying file exists (in the same directory) with a "-license" suffix added
to the base-name name of the original file, and an extension of txt, html, or similar. For example
"tidy" is accompanied by "tidy-license.txt".
=========================================
END OF textmate/make.tmbundle NOTICES AND INFORMATION
%% textmate/markdown.tmbundle NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright (c) markdown.tmbundle authors
@ -1646,24 +1833,6 @@ to the base-name name of the original file, and an extension of txt, html, or si
=========================================
END OF textmate/markdown.tmbundle NOTICES AND INFORMATION
%% textmate/objective-c.tmbundle NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright (c) textmate-objective-c.tmbundle project authors
If not otherwise specified (see below), files in this repository fall under the following license:
Permission to copy, use, modify, sell and distribute this
software is granted. This software is provided "as is" without
express or implied warranty, and with no claim as to its
suitability for any purpose.
An exception is made for files in readable text which contain their own license information,
or files where an accompanying file exists (in the same directory) with a "-license" suffix added
to the base-name name of the original file, and an extension of txt, html, or similar. For example
"tidy" is accompanied by "tidy-license.txt".
=========================================
END OF textmate/objective-c.tmbundle NOTICES AND INFORMATION
%% textmate/perl.tmbundle NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright (c) textmate-perl.tmbundle project authors
@ -1772,68 +1941,6 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SO
=========================================
END OF textmate/yaml.tmbundle NOTICES AND INFORMATION
%% typescript-legacy NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright (c) Microsoft Corporation. All rights reserved.
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
You must give any other recipients of the Work or Derivative Works a copy of this License; and
You must cause any modified files to carry prominent notices stating that You changed the files; and
You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
=========================================
END OF typescript-legacy NOTICES AND INFORMATION
%% TypeScript-TmLanguage NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright (c) Microsoft Corporation. All rights reserved.
@ -2017,34 +2124,6 @@ Copyright (c) Microsoft Corporation. All rights reserved.
=========================================
END OF TypeScript-TmLanguage NOTICES AND INFORMATION
%% unity-shader-files NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
Copyright (c) 2014 Teddy Bradford (for original ./Grammars/shaderlab.cson file)
Copyright (c) 2015 Nadège Michel
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=========================================
END OF unity-shader-files NOTICES AND INFORMATION
%% vscode-swift NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)

View file

@ -3,14 +3,14 @@ environment:
VSCODE_BUILD_VERBOSE: true
install:
- ps: Install-Product node 6.6.0 x64
- npm install -g npm --silent
- ps: Install-Product node 7.4.0 x64
- npm install -g npm@4 --silent
- npm install -g gulp mocha --silent
build_script:
- .\scripts\npm.bat install --force
- .\scripts\npm.bat install
- gulp electron
- gulp compile
- npm run compile
test_script:
- node --version

View file

@ -21,8 +21,8 @@ var editorEntryPoints = [
{
name: 'vs/editor/editor.main',
include: [],
exclude: [],
prepend: [ 'vs/css.js', 'vs/nls.js' ],
exclude: [ 'vs/css', 'vs/nls' ],
prepend: [ 'out-build/vs/css.js', 'out-build/vs/nls.js' ],
},
{
name: 'vs/base/common/worker/simpleWorker',
@ -38,7 +38,6 @@ var editorResources = [
'!out-build/vs/base/browser/ui/splitview/**/*',
'!out-build/vs/base/browser/ui/toolbar/**/*',
'!out-build/vs/base/browser/ui/octiconLabel/**/*',
'!out-build/vs/editor/contrib/defineKeybinding/**/*',
'!out-build/vs/workbench/**',
'!**/test/**'
];

View file

@ -9,6 +9,7 @@ const gulp = require('gulp');
const filter = require('gulp-filter');
const es = require('event-stream');
const gulptslint = require('gulp-tslint');
const gulpeslint = require('gulp-eslint');
const tsfmt = require('typescript-formatter');
const tslint = require('tslint');
@ -40,17 +41,23 @@ const eolFilter = [
'!**/*.{svg,exe,png,bmp,scpt,bat,cmd,cur,ttf,woff,eot}',
'!build/{lib,tslintRules}/**/*.js',
'!build/monaco/**',
'!build/win32/**'
'!build/win32/**',
'!build/**/*.sh',
'!build/tfs/**/*.js',
'!**/Dockerfile'
];
const indentationFilter = [
'**',
'!ThirdPartyNotices.txt',
'!**/*.md',
'!**/*.ps1',
'!**/*.template',
'!**/*.yml',
'!**/lib/**',
'!**/*.d.ts',
'!extensions/**/*.d.ts',
'!src/typings/**/*.d.ts',
'!src/vs/*/**/*.d.ts',
'!**/*.d.ts.recipe',
'!test/assert.js',
'!**/package.json',
@ -84,15 +91,29 @@ const copyrightFilter = [
'!**/*.xpm',
'!**/*.opts',
'!**/*.disabled',
'!build/**/*.init',
'!resources/win32/bin/code.js',
'!extensions/markdown/media/tomorrow.css',
'!extensions/html/server/src/modes/typescript/*'
];
const eslintFilter = [
'src/**/*.js',
'build/gulpfile.*.js',
'!src/vs/loader.js',
'!src/vs/css.js',
'!src/vs/nls.js',
'!src/vs/css.build.js',
'!src/vs/nls.build.js',
'!src/**/winjs.base.raw.js',
'!src/**/raw.marked.js',
'!**/test/**'
];
const tslintFilter = [
'src/**/*.ts',
'test/**/*.ts',
'extensions/**/*.ts',
'!**/*.d.ts',
'!**/fixtures/**',
'!**/typings/**',
'!**/node_modules/**',
@ -119,6 +140,14 @@ function reportFailures(failures) {
});
}
gulp.task('eslint', () => {
return gulp.src(all, { base: '.' })
.pipe(filter(eslintFilter))
.pipe(gulpeslint('src/.eslintrc'))
.pipe(gulpeslint.formatEach('compact'))
.pipe(gulpeslint.failAfterError());
});
gulp.task('tslint', () => {
const options = { summarizeFailureOutput: true };
@ -171,7 +200,6 @@ const hygiene = exports.hygiene = (some, options) => {
});
const formatting = es.map(function (file, cb) {
tsfmt.processString(file.path, file.contents.toString('utf8'), {
verify: true,
tsfmt: true,
@ -204,17 +232,27 @@ const hygiene = exports.hygiene = (some, options) => {
this.emit('data', file);
});
return gulp.src(some || all, { base: '.' })
const result = gulp.src(some || all, { base: '.' })
.pipe(filter(f => !f.stat.isDirectory()))
.pipe(filter(eolFilter))
.pipe(options.skipEOL ? es.through() : eol)
.pipe(filter(indentationFilter))
.pipe(indentation)
.pipe(filter(copyrightFilter))
.pipe(copyrights)
.pipe(copyrights);
const typescript = result
.pipe(filter(tslintFilter))
.pipe(formatting)
.pipe(tsl)
.pipe(tsl);
const javascript = result
.pipe(filter(eslintFilter))
.pipe(gulpeslint('src/.eslintrc'))
.pipe(gulpeslint.formatEach('compact'))
.pipe(gulpeslint.failAfterError());
return es.merge(typescript, javascript)
.pipe(es.through(null, function () {
if (errorCount > 0) {
this.emit('error', 'Hygiene failed with ' + errorCount + ' errors. Check \'build/gulpfile.hygiene.js\'.');
@ -238,6 +276,14 @@ if (require.main === module) {
cp.exec('git config core.autocrlf', (err, out) => {
const skipEOL = out.trim() === 'true';
if (process.argv.length > 2) {
return hygiene(process.argv.slice(2), { skipEOL: skipEOL }).on('error', err => {
console.error();
console.error(err);
process.exit(1);
});
}
cp.exec('git diff --cached --name-only', { maxBuffer: 2000 * 1024 }, (err, out) => {
if (err) {
console.error();

View file

@ -1,28 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var gulp = require('gulp');
var tsb = require('gulp-tsb');
var util = require('./lib/util');
var watcher = require('./lib/watch');
var assign = require('object-assign');
var compilation = tsb.create(assign({ verbose: true }, require('./tsconfig.json').compilerOptions));
gulp.task('compile', function() {
return gulp.src('**/*.ts', { base: '.' })
.pipe(compilation())
.pipe(gulp.dest(''));
});
gulp.task('watch', function() {
var src = gulp.src('**/*.ts', { base: '.' });
return watcher('**/*.ts', { base: '.' })
.pipe(util.incremental(compilation, src))
.pipe(gulp.dest(''));
});
gulp.task('default', ['compile']);

View file

@ -31,8 +31,8 @@ gulp.task('mixin', function () {
return;
}
const url = `https://github.com/${ repo }/archive/${ pkg.distro }.zip`;
const opts = { base: '' };
const url = `https://github.com/${repo}/archive/${pkg.distro}.zip`;
const opts = { base: url };
const username = process.env['VSCODE_MIXIN_USERNAME'];
const password = process.env['VSCODE_MIXIN_PASSWORD'];
@ -42,37 +42,22 @@ gulp.task('mixin', function () {
console.log('Mixing in sources from \'' + url + '\':');
let all = remote(url, opts)
let all = remote('', opts)
.pipe(zip.src())
.pipe(filter(function (f) { return !f.isDirectory(); }))
.pipe(util.rebase(1));
if (quality) {
const build = all.pipe(filter('build/**'));
const productJsonFilter = filter('product.json', { restore: true });
const vsdaFilter = (function() {
const filter = [];
if (process.platform !== 'win32') { filter.push('!**/vsda_win32.node'); }
if (process.platform !== 'darwin') { filter.push('!**/vsda_darwin.node'); }
if (process.platform !== 'linux' || process.arch !== 'x64') { filter.push('!**/vsda_linux64.node'); }
if (process.platform !== 'linux' || process.arch === 'x64') { filter.push('!**/vsda_linux32.node'); }
return filter;
})();
const mixin = all
.pipe(filter(['quality/' + quality + '/**'].concat(vsdaFilter)))
.pipe(filter(['quality/' + quality + '/**']))
.pipe(util.rebase(2))
.pipe(productJsonFilter)
.pipe(buffer())
.pipe(json(function (patch) {
const original = require('../product.json');
return assign(original, patch);
}))
.pipe(json(o => assign({}, require('../product.json'), o)))
.pipe(productJsonFilter.restore);
all = es.merge(build, mixin);
all = es.merge(mixin);
}
return all

View file

@ -29,9 +29,12 @@ const packageJson = require('../package.json');
const product = require('../product.json');
const shrinkwrap = require('../npm-shrinkwrap.json');
const crypto = require('crypto');
const i18n = require('./lib/i18n');
const glob = require('glob');
const productDependencies = Object.keys(product.dependencies || {});
const dependencies = Object.keys(shrinkwrap.dependencies)
.concat(Array.isArray(product.extraNodeModules) ? product.extraNodeModules : []); // additional dependencies from our product configuration
.concat(productDependencies); // additional dependencies from our product configuration
const baseModules = Object.keys(process.binding('natives')).filter(n => !/^_|\//.test(n));
const nodeModules = ['electron', 'original-fs']
.concat(dependencies)
@ -40,12 +43,17 @@ const nodeModules = ['electron', 'original-fs']
// Build
const builtInExtensions = [
{ name: 'ms-vscode.node-debug', version: '1.9.12' },
{ name: 'ms-vscode.node-debug2', version: '1.9.12' }
{ name: 'ms-vscode.node-debug', version: '1.16.7' },
{ name: 'ms-vscode.node-debug2', version: '1.16.7' }
];
const excludedExtensions = [
'vscode-api-tests',
'vscode-colorize-tests'
];
const vscodeEntryPoints = _.flatten([
buildfile.entrypoint('vs/workbench/electron-browser/workbench.main'),
buildfile.entrypoint('vs/workbench/workbench.main'),
buildfile.base,
buildfile.workbench,
buildfile.code
@ -57,24 +65,22 @@ const vscodeResources = [
'out-build/bootstrap.js',
'out-build/bootstrap-amd.js',
'out-build/paths.js',
'out-build/vs/**/*.{svg,png,cur}',
'out-build/vs/**/*.{svg,png,cur,html}',
'out-build/vs/base/node/startupTimers.js',
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh}',
'out-build/vs/base/browser/ui/octiconLabel/octicons/**',
'out-build/vs/workbench/browser/media/*-theme.css',
'out-build/vs/workbench/electron-browser/bootstrap/**',
'out-build/vs/workbench/parts/debug/**/*.json',
'out-build/vs/workbench/parts/execution/**/*.scpt',
'out-build/vs/workbench/parts/git/**/*.html',
'out-build/vs/workbench/parts/git/**/*.sh',
'out-build/vs/workbench/parts/html/browser/webview.html',
'out-build/vs/workbench/parts/html/browser/webview-pre.js',
'out-build/vs/**/markdown.css',
'out-build/vs/workbench/parts/tasks/**/*.json',
'out-build/vs/workbench/parts/terminal/electron-browser/terminalProcess.js',
'out-build/vs/workbench/parts/walkThrough/**/*.md',
'out-build/vs/workbench/parts/welcomePage/**/*.html',
'out-build/vs/workbench/parts/welcome/walkThrough/**/*.md',
'out-build/vs/workbench/services/files/**/*.exe',
'out-build/vs/workbench/services/files/**/*.md',
'out-build/vs/code/electron-browser/sharedProcess.js',
'!**/test/**'
];
@ -84,6 +90,11 @@ const BUNDLED_FILE_HEADER = [
' *--------------------------------------------------------*/'
].join('\n');
var languages = ['chs', 'cht', 'jpn', 'kor', 'deu', 'fra', 'esn', 'rus', 'ita'];
if (process.env.VSCODE_QUALITY !== 'stable') {
languages = languages.concat(['ptb', 'hun', 'trk']); // Add languages requested by the community to non-stable builds
}
gulp.task('clean-optimized-vscode', util.rimraf('out-vscode'));
gulp.task('optimize-vscode', ['clean-optimized-vscode', 'compile-build', 'compile-extensions-build'], common.optimizeTask({
entryPoints: vscodeEntryPoints,
@ -91,7 +102,8 @@ gulp.task('optimize-vscode', ['clean-optimized-vscode', 'compile-build', 'compil
resources: vscodeResources,
loaderConfig: common.loaderConfig(nodeModules),
header: BUNDLED_FILE_HEADER,
out: 'out-vscode'
out: 'out-vscode',
languages: languages
}));
@ -117,11 +129,13 @@ const config = {
darwinIcon: 'resources/darwin/code.icns',
darwinBundleIdentifier: product.darwinBundleIdentifier,
darwinApplicationCategoryType: 'public.app-category.developer-tools',
darwinHelpBookFolder: 'VS Code HelpBook',
darwinHelpBookName: 'VS Code HelpBook',
darwinBundleDocumentTypes: [{
name: product.nameLong + ' document',
role: 'Editor',
ostypes: ["TEXT", "utxt", "TUTX", "****"],
extensions: ["ascx", "asp", "aspx", "bash", "bash_login", "bash_logout", "bash_profile", "bashrc", "bat", "bowerrc", "c", "cc", "clj", "cljs", "cljx", "clojure", "cmd", "coffee", "config", "cpp", "cs", "cshtml", "csproj", "css", "csx", "ctp", "cxx", "dockerfile", "dot", "dtd", "editorconfig", "edn", "eyaml", "eyml", "fs", "fsi", "fsscript", "fsx", "gemspec", "gitattributes", "gitconfig", "gitignore", "go", "h", "handlebars", "hbs", "hh", "hpp", "htm", "html", "hxx", "ini", "jade", "jav", "java", "js", "jscsrc", "jshintrc", "jshtm", "json", "jsp", "less", "lua", "m", "makefile", "markdown", "md", "mdoc", "mdown", "mdtext", "mdtxt", "mdwn", "mkd", "mkdn", "ml", "mli", "php", "phtml", "pl", "pl6", "pm", "pm6", "pod", "pp", "profile", "properties", "ps1", "psd1", "psgi", "psm1", "py", "r", "rb", "rhistory", "rprofile", "rs", "rt", "scss", "sh", "shtml", "sql", "svg", "svgz", "t", "ts", "txt", "vb", "wxi", "wxl", "wxs", "xaml", "xml", "yaml", "yml", "zlogin", "zlogout", "zprofile", "zsh", "zshenv", "zshrc"],
extensions: ["ascx", "asp", "aspx", "bash", "bash_login", "bash_logout", "bash_profile", "bashrc", "bat", "bowerrc", "c", "cc", "clj", "cljs", "cljx", "clojure", "cmd", "code-workspace", "coffee", "config", "cpp", "cs", "cshtml", "csproj", "css", "csx", "ctp", "cxx", "dockerfile", "dot", "dtd", "editorconfig", "edn", "eyaml", "eyml", "fs", "fsi", "fsscript", "fsx", "gemspec", "gitattributes", "gitconfig", "gitignore", "go", "h", "handlebars", "hbs", "hh", "hpp", "htm", "html", "hxx", "ini", "jade", "jav", "java", "js", "jscsrc", "jshintrc", "jshtm", "json", "jsp", "less", "lua", "m", "makefile", "markdown", "md", "mdoc", "mdown", "mdtext", "mdtxt", "mdwn", "mkd", "mkdn", "ml", "mli", "php", "phtml", "pl", "pl6", "pm", "pm6", "pod", "pp", "profile", "properties", "ps1", "psd1", "psgi", "psm1", "py", "r", "rb", "rhistory", "rprofile", "rs", "rt", "scss", "sh", "shtml", "sql", "svg", "svgz", "t", "ts", "txt", "vb", "wxi", "wxl", "wxs", "xaml", "xcodeproj", "xcworkspace", "xml", "yaml", "yml", "zlogin", "zlogout", "zprofile", "zsh", "zshenv", "zshrc"],
iconFile: 'resources/darwin/code_file.icns'
}],
darwinBundleURLTypes: [{
@ -132,25 +146,32 @@ const config = {
darwinCredits: darwinCreditsTemplate ? new Buffer(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : void 0,
linuxExecutableName: product.applicationName,
winIcon: 'resources/win32/code.ico',
token: process.env['GITHUB_TOKEN'] || void 0
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || void 0,
repo: product.electronRepository || void 0
};
function getElectron(arch) {
return () => {
const electronOpts = _.extend({}, config, {
platform: process.platform,
arch,
ffmpegChromium: true,
keepDefaultApp: true
});
return gulp.src('package.json')
.pipe(json({ name: product.nameShort }))
.pipe(electron(electronOpts))
.pipe(filter(['**', '!**/app/package.json']))
.pipe(vfs.dest('.build/electron'));
};
}
gulp.task('clean-electron', util.rimraf('.build/electron'));
gulp.task('electron', ['clean-electron'], getElectron(process.arch));
gulp.task('electron-ia32', ['clean-electron'], getElectron('ia32'));
gulp.task('electron-x64', ['clean-electron'], getElectron('x64'));
gulp.task('electron', ['clean-electron'], () => {
const platform = process.platform;
const arch = process.env.VSCODE_ELECTRON_PLATFORM || (platform === 'win32' ? 'ia32' : process.arch);
const opts = _.extend({}, config, { platform, arch, ffmpegChromium: true, keepDefaultApp: true });
const name = product.nameShort;
return gulp.src('package.json')
.pipe(json({ name }))
.pipe(electron(opts))
.pipe(filter(['**', '!**/app/package.json']))
.pipe(vfs.dest('.build/electron'));
});
const languages = ['chs', 'cht', 'jpn', 'kor', 'deu', 'fra', 'esn', 'rus', 'ita'];
/**
* Compute checksums for some files.
@ -191,55 +212,52 @@ function packageTask(platform, arch, opts) {
const destination = path.join(path.dirname(root), 'VSCode') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
platform = platform || process.platform;
arch = platform === 'win32' ? 'ia32' : arch;
return () => {
const out = opts.minified ? 'out-vscode-min' : 'out-vscode';
const checksums = computeChecksums(out, [
'vs/workbench/electron-browser/workbench.main.js',
'vs/workbench/electron-browser/workbench.main.css',
'vs/workbench/workbench.main.js',
'vs/workbench/workbench.main.css',
'vs/workbench/electron-browser/bootstrap/index.html',
'vs/workbench/electron-browser/bootstrap/index.js'
'vs/workbench/electron-browser/bootstrap/index.js',
'vs/workbench/electron-browser/bootstrap/preload.js'
]);
const src = gulp.src(out + '/**', { base: '.' })
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + out), 'out'); }))
.pipe(util.setExecutableBit(['**/*.sh']));
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + out), 'out'); }));
const extensionsList = [
'extensions/*/**',
'!extensions/*/src/**',
'!extensions/*/out/**/test/**',
'!extensions/*/test/**',
'!extensions/*/build/**',
'!extensions/**/node_modules/@types/**',
'!extensions/*/{client,server}/src/**',
'!extensions/*/{client,server}/test/**',
'!extensions/*/{client,server}/out/**/test/**',
'!extensions/*/{client,server}/out/**/typings/**',
'!extensions/**/.vscode/**',
'!extensions/**/tsconfig.json',
'!extensions/typescript/bin/**',
'!extensions/vscode-api-tests/**',
'!extensions/vscode-colorize-tests/**',
...builtInExtensions.map(e => `!extensions/${e.name}/**`)
];
const root = path.resolve(path.join(__dirname, '..'));
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name));
const nlsFilter = filter('**/*.nls.json', { restore: true });
const extensions = gulp.src(extensionsList, { base: '.' })
// TODO@Dirk: this filter / buffer is here to make sure the nls.json files are buffered
.pipe(nlsFilter)
.pipe(buffer())
.pipe(nlsDev.createAdditionalLanguageFiles(languages, path.join(__dirname, '..', 'i18n')))
.pipe(nlsFilter.restore);
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
const nlsFilter = filter('**/*.nls.json', { restore: true });
return ext.fromLocal(extension.path)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`))
// // TODO@Dirk: this filter / buffer is here to make sure the nls.json files are buffered
.pipe(nlsFilter)
.pipe(buffer())
.pipe(nlsDev.createAdditionalLanguageFiles(languages, path.join(__dirname, '..', 'i18n')))
.pipe(nlsFilter.restore);
}));
const localExtensionDependencies = gulp.src('extensions/node_modules/**', { base: '.' });
const marketplaceExtensions = es.merge(...builtInExtensions.map(extension => {
return ext.src(extension.name, extension.version)
return ext.fromMarketplace(extension.name, extension.version)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
}));
const sources = es.merge(src, extensions, marketplaceExtensions)
const sources = es.merge(src, localExtensions, localExtensionDependencies, marketplaceExtensions)
.pipe(util.setExecutableBit(['**/*.sh']))
.pipe(filter(['**', '!**/*.js.map']));
let version = packageJson.version;
@ -259,6 +277,8 @@ function packageTask(platform, arch, opts) {
const license = gulp.src(['LICENSES.chromium.html', 'LICENSE.txt', 'ThirdPartyNotices.txt', 'licenses/**'], { base: '.' });
const watermark = gulp.src(['resources/letterpress.svg', 'resources/letterpress-dark.svg', 'resources/letterpress-hc.svg'], { base: '.' });
// TODO the API should be copied to `out` during compile, not here
const api = gulp.src('src/vs/vscode.d.ts').pipe(rename('out/vs/vscode.d.ts'));
@ -266,18 +286,26 @@ function packageTask(platform, arch, opts) {
.map(function (d) { return ['node_modules/' + d + '/**', '!node_modules/' + d + '/**/{test,tests}/**']; }));
const deps = gulp.src(depsSrc, { base: '.', dot: true })
.pipe(filter(['**', '!**/package-lock.json']))
.pipe(util.cleanNodeModule('fsevents', ['binding.gyp', 'fsevents.cc', 'build/**', 'src/**', 'test/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('oniguruma', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('oniguruma', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/*.js']))
.pipe(util.cleanNodeModule('windows-mutex', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('native-keymap', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('native-watchdog', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('jschardet', ['dist/**']))
.pipe(util.cleanNodeModule('windows-foreground-love', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('windows-process-tree', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('gc-signals', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/index.js']))
.pipe(util.cleanNodeModule('node-pty', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['build/Release/**']));
.pipe(util.cleanNodeModule('v8-profiler', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/index.js']))
.pipe(util.cleanNodeModule('node-pty', ['binding.gyp', 'build/**', 'src/**', 'tools/**'], ['build/Release/**']))
.pipe(util.cleanNodeModule('nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['**/*.node', '**/*.a']))
.pipe(util.cleanNodeModule('vsda', ['binding.gyp', 'README.md', 'build/**', '*.bat', '*.sh', '*.cpp', '*.h'], ['build/Release/vsda.node']));
let all = es.merge(
packageJsonStream,
productJsonStream,
license,
watermark,
api,
sources,
deps
@ -322,24 +350,77 @@ function packageTask(platform, arch, opts) {
const buildRoot = path.dirname(root);
gulp.task('clean-vscode-win32', util.rimraf(path.join(buildRoot, 'VSCode-win32')));
gulp.task('clean-vscode-win32-ia32', util.rimraf(path.join(buildRoot, 'VSCode-win32-ia32')));
gulp.task('clean-vscode-win32-x64', util.rimraf(path.join(buildRoot, 'VSCode-win32-x64')));
gulp.task('clean-vscode-darwin', util.rimraf(path.join(buildRoot, 'VSCode-darwin')));
gulp.task('clean-vscode-linux-ia32', util.rimraf(path.join(buildRoot, 'VSCode-linux-ia32')));
gulp.task('clean-vscode-linux-x64', util.rimraf(path.join(buildRoot, 'VSCode-linux-x64')));
gulp.task('clean-vscode-linux-arm', util.rimraf(path.join(buildRoot, 'VSCode-linux-arm')));
gulp.task('vscode-win32', ['optimize-vscode', 'clean-vscode-win32'], packageTask('win32'));
gulp.task('vscode-win32-ia32', ['optimize-vscode', 'clean-vscode-win32-ia32'], packageTask('win32', 'ia32'));
gulp.task('vscode-win32-x64', ['optimize-vscode', 'clean-vscode-win32-x64'], packageTask('win32', 'x64'));
gulp.task('vscode-darwin', ['optimize-vscode', 'clean-vscode-darwin'], packageTask('darwin'));
gulp.task('vscode-linux-ia32', ['optimize-vscode', 'clean-vscode-linux-ia32'], packageTask('linux', 'ia32'));
gulp.task('vscode-linux-x64', ['optimize-vscode', 'clean-vscode-linux-x64'], packageTask('linux', 'x64'));
gulp.task('vscode-linux-arm', ['optimize-vscode', 'clean-vscode-linux-arm'], packageTask('linux', 'arm'));
gulp.task('vscode-win32-min', ['minify-vscode', 'clean-vscode-win32'], packageTask('win32', null, { minified: true }));
gulp.task('vscode-win32-ia32-min', ['minify-vscode', 'clean-vscode-win32-ia32'], packageTask('win32', 'ia32', { minified: true }));
gulp.task('vscode-win32-x64-min', ['minify-vscode', 'clean-vscode-win32-x64'], packageTask('win32', 'x64', { minified: true }));
gulp.task('vscode-darwin-min', ['minify-vscode', 'clean-vscode-darwin'], packageTask('darwin', null, { minified: true }));
gulp.task('vscode-linux-ia32-min', ['minify-vscode', 'clean-vscode-linux-ia32'], packageTask('linux', 'ia32', { minified: true }));
gulp.task('vscode-linux-x64-min', ['minify-vscode', 'clean-vscode-linux-x64'], packageTask('linux', 'x64', { minified: true }));
gulp.task('vscode-linux-arm-min', ['minify-vscode', 'clean-vscode-linux-arm'], packageTask('linux', 'arm', { minified: true }));
// Transifex Localizations
const vscodeLanguages = [
'zh-hans',
'zh-hant',
'ja',
'ko',
'de',
'fr',
'es',
'ru',
'it',
'pt-br',
'hu',
'tr'
];
const setupDefaultLanguages = [
'zh-hans',
'zh-hant',
'ko'
];
const apiHostname = process.env.TRANSIFEX_API_URL;
const apiName = process.env.TRANSIFEX_API_NAME;
const apiToken = process.env.TRANSIFEX_API_TOKEN;
gulp.task('vscode-translations-push', ['optimize-vscode'], function () {
const pathToMetadata = './out-vscode/nls.metadata.json';
const pathToExtensions = './extensions/**/*.nls.json';
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
return es.merge(
gulp.src(pathToMetadata).pipe(i18n.prepareXlfFiles()),
gulp.src(pathToSetup).pipe(i18n.prepareXlfFiles()),
gulp.src(pathToExtensions).pipe(i18n.prepareXlfFiles('vscode-extensions'))
).pipe(i18n.pushXlfFiles(apiHostname, apiName, apiToken));
});
gulp.task('vscode-translations-pull', function () {
return es.merge(
i18n.pullXlfFiles('vscode-editor', apiHostname, apiName, apiToken, vscodeLanguages),
i18n.pullXlfFiles('vscode-workbench', apiHostname, apiName, apiToken, vscodeLanguages),
i18n.pullXlfFiles('vscode-extensions', apiHostname, apiName, apiToken, vscodeLanguages),
i18n.pullXlfFiles('vscode-setup', apiHostname, apiName, apiToken, setupDefaultLanguages)
).pipe(vfs.dest('../vscode-localization'));
});
gulp.task('vscode-translations-import', function () {
return gulp.src('../vscode-localization/**/*.xlf').pipe(i18n.prepareJsonFiles()).pipe(vfs.dest('./i18n'));
});
// Sourcemaps
gulp.task('upload-vscode-sourcemaps', ['minify-vscode'], () => {

View file

@ -111,7 +111,7 @@ function buildDebPackage(arch) {
'mkdir -p deb',
'fakeroot dpkg-deb -b ' + product.applicationName + '-' + debArch + ' deb',
'dpkg-scanpackages deb /dev/null > Packages'
], { cwd: '.build/linux/deb/' + debArch});
], { cwd: '.build/linux/deb/' + debArch });
}
function getRpmBuildPath(rpmArch) {
@ -175,8 +175,7 @@ function buildRpmPackage(arch) {
return shell.task([
'mkdir -p ' + destination,
'HOME="$(pwd)/' + destination + '" fakeroot rpmbuild -bb ' + rpmBuildPath + '/SPECS/' + product.applicationName + '.spec --target=' + rpmArch,
'cp "' + rpmOut + '/$(ls ' + rpmOut + ')" ' + destination + '/',
'createrepo ' + destination
'cp "' + rpmOut + '/$(ls ' + rpmOut + ')" ' + destination + '/'
]);
}
@ -213,13 +212,13 @@ function prepareFlatpak(arch) {
.pipe(replace('@@LICENSE@@', product.licenseName))
.pipe(rename('share/appdata/' + flatpakManifest.appId + '.appdata.xml')));
all.push(gulp.src(binaryDir + '/**/*', { base:binaryDir })
all.push(gulp.src(binaryDir + '/**/*', { base: binaryDir })
.pipe(rename(function (p) {
p.dirname = 'share/' + product.applicationName + '/' + p.dirname;
})));
return es.merge(all).pipe(vfs.dest(destination));
}
};
}
function buildFlatpak(arch) {
@ -250,7 +249,7 @@ function buildFlatpak(arch) {
}
return function (cb) {
require('flatpak-bundler').bundle(manifest, buildOptions, cb);
}
};
}
gulp.task('clean-vscode-linux-ia32-deb', util.rimraf('.build/linux/deb/i386'));
@ -260,16 +259,16 @@ gulp.task('clean-vscode-linux-ia32-rpm', util.rimraf('.build/linux/rpm/i386'));
gulp.task('clean-vscode-linux-x64-rpm', util.rimraf('.build/linux/rpm/x86_64'));
gulp.task('clean-vscode-linux-arm-rpm', util.rimraf('.build/linux/rpm/armhf'));
gulp.task('vscode-linux-ia32-prepare-deb', ['clean-vscode-linux-ia32-deb', 'vscode-linux-ia32-min'], prepareDebPackage('ia32'));
gulp.task('vscode-linux-x64-prepare-deb', ['clean-vscode-linux-x64-deb', 'vscode-linux-x64-min'], prepareDebPackage('x64'));
gulp.task('vscode-linux-arm-prepare-deb', ['clean-vscode-linux-arm-deb', 'vscode-linux-arm-min'], prepareDebPackage('arm'));
gulp.task('vscode-linux-ia32-prepare-deb', ['clean-vscode-linux-ia32-deb'], prepareDebPackage('ia32'));
gulp.task('vscode-linux-x64-prepare-deb', ['clean-vscode-linux-x64-deb'], prepareDebPackage('x64'));
gulp.task('vscode-linux-arm-prepare-deb', ['clean-vscode-linux-arm-deb'], prepareDebPackage('arm'));
gulp.task('vscode-linux-ia32-build-deb', ['vscode-linux-ia32-prepare-deb'], buildDebPackage('ia32'));
gulp.task('vscode-linux-x64-build-deb', ['vscode-linux-x64-prepare-deb'], buildDebPackage('x64'));
gulp.task('vscode-linux-arm-build-deb', ['vscode-linux-arm-prepare-deb'], buildDebPackage('arm'));
gulp.task('vscode-linux-ia32-prepare-rpm', ['clean-vscode-linux-ia32-rpm', 'vscode-linux-ia32-min'], prepareRpmPackage('ia32'));
gulp.task('vscode-linux-x64-prepare-rpm', ['clean-vscode-linux-x64-rpm', 'vscode-linux-x64-min'], prepareRpmPackage('x64'));
gulp.task('vscode-linux-arm-prepare-rpm', ['clean-vscode-linux-arm-rpm', 'vscode-linux-arm-min'], prepareRpmPackage('arm'));
gulp.task('vscode-linux-ia32-prepare-rpm', ['clean-vscode-linux-ia32-rpm'], prepareRpmPackage('ia32'));
gulp.task('vscode-linux-x64-prepare-rpm', ['clean-vscode-linux-x64-rpm'], prepareRpmPackage('x64'));
gulp.task('vscode-linux-arm-prepare-rpm', ['clean-vscode-linux-arm-rpm'], prepareRpmPackage('arm'));
gulp.task('vscode-linux-ia32-build-rpm', ['vscode-linux-ia32-prepare-rpm'], buildRpmPackage('ia32'));
gulp.task('vscode-linux-x64-build-rpm', ['vscode-linux-x64-prepare-rpm'], buildRpmPackage('x64'));
gulp.task('vscode-linux-arm-build-rpm', ['vscode-linux-arm-prepare-rpm'], buildRpmPackage('arm'));
@ -278,9 +277,9 @@ gulp.task('clean-vscode-linux-ia32-flatpak', util.rimraf('.build/linux/flatpak/i
gulp.task('clean-vscode-linux-x64-flatpak', util.rimraf('.build/linux/flatpak/x86_64'));
gulp.task('clean-vscode-linux-arm-flatpak', util.rimraf('.build/linux/flatpak/arm'));
gulp.task('vscode-linux-ia32-prepare-flatpak', ['clean-vscode-linux-ia32-flatpak', 'vscode-linux-ia32-min'], prepareFlatpak('ia32'));
gulp.task('vscode-linux-x64-prepare-flatpak', ['clean-vscode-linux-x64-flatpak', 'vscode-linux-x64-min'], prepareFlatpak('x64'));
gulp.task('vscode-linux-arm-prepare-flatpak', ['clean-vscode-linux-arm-flatpak', 'vscode-linux-arm-min'], prepareFlatpak('arm'));
gulp.task('vscode-linux-ia32-prepare-flatpak', ['clean-vscode-linux-ia32-flatpak'], prepareFlatpak('ia32'));
gulp.task('vscode-linux-x64-prepare-flatpak', ['clean-vscode-linux-x64-flatpak'], prepareFlatpak('x64'));
gulp.task('vscode-linux-arm-prepare-flatpak', ['clean-vscode-linux-arm-flatpak'], prepareFlatpak('arm'));
gulp.task('vscode-linux-ia32-flatpak', ['vscode-linux-ia32-prepare-flatpak'], buildFlatpak('ia32'));
gulp.task('vscode-linux-x64-flatpak', ['vscode-linux-x64-prepare-flatpak'], buildFlatpak('x64'));

View file

@ -9,12 +9,16 @@ const gulp = require('gulp');
const path = require('path');
const assert = require('assert');
const cp = require('child_process');
const _7z = require('7zip')['7z'];
const util = require('./lib/util');
const pkg = require('../package.json');
const product = require('../product.json');
const repoPath = path.dirname(__dirname);
const buildPath = path.join(path.dirname(repoPath), 'VSCode-win32');
const buildPath = arch => path.join(path.dirname(repoPath), `VSCode-win32-${arch}`);
const zipDir = arch => path.join(repoPath, '.build', `win32-${arch}`, 'archive');
const zipPath = arch => path.join(zipDir(arch), `VSCode-win32-${arch}.zip`);
const setupDir = arch => path.join(repoPath, '.build', `win32-${arch}`, 'setup');
const issPath = path.join(__dirname, 'win32', 'code.iss');
const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup-compiler'))), 'bin', 'ISCC.exe');
@ -24,9 +28,9 @@ function packageInnoSetup(iss, options, cb) {
const definitions = options.definitions || {};
const keys = Object.keys(definitions);
keys.forEach(key => assert(typeof definitions[key] === 'string', `Missing value for '${ key }' in Inno Setup package step`));
keys.forEach(key => assert(typeof definitions[key] === 'string', `Missing value for '${key}' in Inno Setup package step`));
const defs = keys.map(key => `/d${ key }=${ definitions[key] }`);
const defs = keys.map(key => `/d${key}=${definitions[key]}`);
const args = [iss].concat(defs);
cp.spawn(innoSetupPath, args, { stdio: 'inherit' })
@ -34,25 +38,55 @@ function packageInnoSetup(iss, options, cb) {
.on('exit', () => cb(null));
}
function buildWin32Setup(cb) {
const definitions = {
NameLong: product.nameLong,
NameShort: product.nameShort,
DirName: product.win32DirName,
Version: pkg.version,
RawVersion: pkg.version.replace(/-\w+$/, ''),
NameVersion: product.win32NameVersion,
ExeBasename: product.nameShort,
RegValueName: product.win32RegValueName,
AppMutex: product.win32MutexName,
AppId: product.win32AppId,
AppUserId: product.win32AppUserModelId,
SourceDir: buildPath,
RepoDir: repoPath
};
function buildWin32Setup(arch) {
return cb => {
const ia32AppId = product.win32AppId;
const x64AppId = product.win32x64AppId;
packageInnoSetup(issPath, { definitions }, cb);
const definitions = {
NameLong: product.nameLong,
NameShort: product.nameShort,
DirName: product.win32DirName,
Version: pkg.version,
RawVersion: pkg.version.replace(/-\w+$/, ''),
NameVersion: product.win32NameVersion,
ExeBasename: product.nameShort,
RegValueName: product.win32RegValueName,
ShellNameShort: product.win32ShellNameShort,
AppMutex: product.win32MutexName,
Arch: arch,
AppId: arch === 'ia32' ? ia32AppId : x64AppId,
IncompatibleAppId: arch === 'ia32' ? x64AppId : ia32AppId,
AppUserId: product.win32AppUserModelId,
ArchitecturesAllowed: arch === 'ia32' ? '' : 'x64',
ArchitecturesInstallIn64BitMode: arch === 'ia32' ? '' : 'x64',
SourceDir: buildPath(arch),
RepoDir: repoPath,
OutputDir: setupDir(arch)
};
packageInnoSetup(issPath, { definitions }, cb);
};
}
gulp.task('clean-vscode-win32-setup', util.rimraf('.build/win32/setup'));
gulp.task('vscode-win32-setup', ['clean-vscode-win32-setup'], buildWin32Setup);
gulp.task('clean-vscode-win32-ia32-setup', util.rimraf(setupDir('ia32')));
gulp.task('vscode-win32-ia32-setup', ['clean-vscode-win32-ia32-setup'], buildWin32Setup('ia32'));
gulp.task('clean-vscode-win32-x64-setup', util.rimraf(setupDir('x64')));
gulp.task('vscode-win32-x64-setup', ['clean-vscode-win32-x64-setup'], buildWin32Setup('x64'));
function archiveWin32Setup(arch) {
return cb => {
const args = ['a', '-tzip', zipPath(arch), '.', '-r'];
cp.spawn(_7z, args, { stdio: 'inherit', cwd: buildPath(arch) })
.on('error', cb)
.on('exit', () => cb(null));
};
}
gulp.task('clean-vscode-win32-ia32-archive', util.rimraf(zipDir('ia32')));
gulp.task('vscode-win32-ia32-archive', ['clean-vscode-win32-ia32-archive'], archiveWin32Setup('ia32'));
gulp.task('clean-vscode-win32-x64-archive', util.rimraf(zipDir('x64')));
gulp.task('vscode-win32-x64-archive', ['clean-vscode-win32-x64-archive'], archiveWin32Setup('x64'));

View file

@ -1,451 +1,456 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
"use strict";
var fs = require("fs");
var path = require("path");
var vm = require("vm");
/**
* Bundle `entryPoints` given config `config`.
*/
function bundle(entryPoints, config, callback) {
var entryPointsMap = {};
entryPoints.forEach(function (module) {
entryPointsMap[module.name] = module;
});
var allMentionedModulesMap = {};
entryPoints.forEach(function (module) {
allMentionedModulesMap[module.name] = true;
(module.include || []).forEach(function (includedModule) {
allMentionedModulesMap[includedModule] = true;
});
(module.exclude || []).forEach(function (excludedModule) {
allMentionedModulesMap[excludedModule] = true;
});
});
var code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js'));
var r = vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
var loaderModule = { exports: {} };
r.call({}, require, loaderModule, loaderModule.exports);
var loader = loaderModule.exports;
config.isBuild = true;
loader.config(config);
loader(['require'], function (localRequire) {
var resolvePath = function (path) {
var r = localRequire.toUrl(path);
if (!/\.js/.test(r)) {
return r + '.js';
}
return r;
};
for (var moduleId in entryPointsMap) {
var entryPoint = entryPointsMap[moduleId];
if (entryPoint.append) {
entryPoint.append = entryPoint.append.map(resolvePath);
}
if (entryPoint.prepend) {
entryPoint.prepend = entryPoint.prepend.map(resolvePath);
}
}
});
loader(Object.keys(allMentionedModulesMap), function () {
var modules = loader.getBuildInfo();
var partialResult = emitEntryPoints(modules, entryPointsMap);
var cssInlinedResources = loader('vs/css').getInlinedResources();
callback(null, {
files: partialResult.files,
cssInlinedResources: cssInlinedResources,
bundleData: partialResult.bundleData
});
}, function (err) { return callback(err, null); });
}
exports.bundle = bundle;
function emitEntryPoints(modules, entryPoints) {
var modulesMap = {};
modules.forEach(function (m) {
modulesMap[m.id] = m;
});
var modulesGraph = {};
modules.forEach(function (m) {
modulesGraph[m.id] = m.dependencies;
});
var sortedModules = topologicalSort(modulesGraph);
var result = [];
var usedPlugins = {};
var bundleData = {
graph: modulesGraph,
bundles: {}
};
Object.keys(entryPoints).forEach(function (moduleToBundle) {
var info = entryPoints[moduleToBundle];
var rootNodes = [moduleToBundle].concat(info.include || []);
var allDependencies = visit(rootNodes, modulesGraph);
var excludes = ['require', 'exports', 'module'].concat(info.exclude || []);
excludes.forEach(function (excludeRoot) {
var allExcludes = visit([excludeRoot], modulesGraph);
Object.keys(allExcludes).forEach(function (exclude) {
delete allDependencies[exclude];
});
});
var includedModules = sortedModules.filter(function (module) {
return allDependencies[module];
});
bundleData.bundles[moduleToBundle] = includedModules;
var res = emitEntryPoint(modulesMap, modulesGraph, moduleToBundle, includedModules, info.prepend, info.append, info.dest);
result = result.concat(res.files);
for (var pluginName in res.usedPlugins) {
usedPlugins[pluginName] = usedPlugins[pluginName] || res.usedPlugins[pluginName];
}
});
Object.keys(usedPlugins).forEach(function (pluginName) {
var plugin = usedPlugins[pluginName];
if (typeof plugin.finishBuild === 'function') {
var write = function (filename, contents) {
result.push({
dest: filename,
sources: [{
path: null,
contents: contents
}]
});
};
plugin.finishBuild(write);
}
});
return {
files: extractStrings(removeDuplicateTSBoilerplate(result)),
bundleData: bundleData
};
}
function extractStrings(destFiles) {
var parseDefineCall = function (moduleMatch, depsMatch) {
var module = moduleMatch.replace(/^"|"$/g, '');
var deps = depsMatch.split(',');
deps = deps.map(function (dep) {
dep = dep.trim();
dep = dep.replace(/^"|"$/g, '');
dep = dep.replace(/^'|'$/g, '');
var prefix = null;
var _path = null;
var pieces = dep.split('!');
if (pieces.length > 1) {
prefix = pieces[0] + '!';
_path = pieces[1];
}
else {
prefix = '';
_path = pieces[0];
}
if (/^\.\//.test(_path) || /^\.\.\//.test(_path)) {
var res = path.join(path.dirname(module), _path).replace(/\\/g, '/');
return prefix + res;
}
return prefix + _path;
});
return {
module: module,
deps: deps
};
};
destFiles.forEach(function (destFile, index) {
if (!/\.js$/.test(destFile.dest)) {
return;
}
if (/\.nls\.js$/.test(destFile.dest)) {
return;
}
// Do one pass to record the usage counts for each module id
var useCounts = {};
destFile.sources.forEach(function (source) {
var matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/);
if (!matches) {
return;
}
var defineCall = parseDefineCall(matches[1], matches[2]);
useCounts[defineCall.module] = (useCounts[defineCall.module] || 0) + 1;
defineCall.deps.forEach(function (dep) {
useCounts[dep] = (useCounts[dep] || 0) + 1;
});
});
var sortedByUseModules = Object.keys(useCounts);
sortedByUseModules.sort(function (a, b) {
return useCounts[b] - useCounts[a];
});
var replacementMap = {};
sortedByUseModules.forEach(function (module, index) {
replacementMap[module] = index;
});
destFile.sources.forEach(function (source) {
source.contents = source.contents.replace(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/, function (_, moduleMatch, depsMatch) {
var defineCall = parseDefineCall(moduleMatch, depsMatch);
return "define(__m[" + replacementMap[defineCall.module] + "/*" + defineCall.module + "*/], __M([" + defineCall.deps.map(function (dep) { return replacementMap[dep] + '/*' + dep + '*/'; }).join(',') + "])";
});
});
destFile.sources.unshift({
path: null,
contents: [
'(function() {',
"var __m = " + JSON.stringify(sortedByUseModules) + ";",
"var __M = function(deps) {",
" var result = [];",
" for (var i = 0, len = deps.length; i < len; i++) {",
" result[i] = __m[deps[i]];",
" }",
" return result;",
"};"
].join('\n')
});
destFile.sources.push({
path: null,
contents: '}).call(this);'
});
});
return destFiles;
}
function removeDuplicateTSBoilerplate(destFiles) {
// Taken from typescript compiler => emitFiles
var BOILERPLATE = [
{ start: /^var __extends/, end: /^};$/ },
{ start: /^var __assign/, end: /^};$/ },
{ start: /^var __decorate/, end: /^};$/ },
{ start: /^var __metadata/, end: /^};$/ },
{ start: /^var __param/, end: /^};$/ },
{ start: /^var __awaiter/, end: /^};$/ },
];
destFiles.forEach(function (destFile) {
var SEEN_BOILERPLATE = [];
destFile.sources.forEach(function (source) {
var lines = source.contents.split(/\r\n|\n|\r/);
var newLines = [];
var IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE;
for (var i = 0; i < lines.length; i++) {
var line = lines[i];
if (IS_REMOVING_BOILERPLATE) {
newLines.push('');
if (END_BOILERPLATE.test(line)) {
IS_REMOVING_BOILERPLATE = false;
}
}
else {
for (var j = 0; j < BOILERPLATE.length; j++) {
var boilerplate = BOILERPLATE[j];
if (boilerplate.start.test(line)) {
if (SEEN_BOILERPLATE[j]) {
IS_REMOVING_BOILERPLATE = true;
END_BOILERPLATE = boilerplate.end;
}
else {
SEEN_BOILERPLATE[j] = true;
}
}
}
if (IS_REMOVING_BOILERPLATE) {
newLines.push('');
}
else {
newLines.push(line);
}
}
}
source.contents = newLines.join('\n');
});
});
return destFiles;
}
function emitEntryPoint(modulesMap, deps, entryPoint, includedModules, prepend, append, dest) {
if (!dest) {
dest = entryPoint + '.js';
}
var mainResult = {
sources: [],
dest: dest
}, results = [mainResult];
var usedPlugins = {};
var getLoaderPlugin = function (pluginName) {
if (!usedPlugins[pluginName]) {
usedPlugins[pluginName] = modulesMap[pluginName].exports;
}
return usedPlugins[pluginName];
};
includedModules.forEach(function (c) {
var bangIndex = c.indexOf('!');
if (bangIndex >= 0) {
var pluginName = c.substr(0, bangIndex);
var plugin = getLoaderPlugin(pluginName);
mainResult.sources.push(emitPlugin(entryPoint, plugin, pluginName, c.substr(bangIndex + 1)));
return;
}
var module = modulesMap[c];
if (module.path === 'empty:') {
return;
}
var contents = readFileAndRemoveBOM(module.path);
if (module.shim) {
mainResult.sources.push(emitShimmedModule(c, deps[c], module.shim, module.path, contents));
}
else {
mainResult.sources.push(emitNamedModule(c, deps[c], module.defineLocation, module.path, contents));
}
});
Object.keys(usedPlugins).forEach(function (pluginName) {
var plugin = usedPlugins[pluginName];
if (typeof plugin.writeFile === 'function') {
var req = (function () {
throw new Error('no-no!');
});
req.toUrl = function (something) { return something; };
var write = function (filename, contents) {
results.push({
dest: filename,
sources: [{
path: null,
contents: contents
}]
});
};
plugin.writeFile(pluginName, entryPoint, req, write, {});
}
});
var toIFile = function (path) {
var contents = readFileAndRemoveBOM(path);
return {
path: path,
contents: contents
};
};
var toPrepend = (prepend || []).map(toIFile);
var toAppend = (append || []).map(toIFile);
mainResult.sources = toPrepend.concat(mainResult.sources).concat(toAppend);
return {
files: results,
usedPlugins: usedPlugins
};
}
function readFileAndRemoveBOM(path) {
var BOM_CHAR_CODE = 65279;
var contents = fs.readFileSync(path, 'utf8');
// Remove BOM
if (contents.charCodeAt(0) === BOM_CHAR_CODE) {
contents = contents.substring(1);
}
return contents;
}
function emitPlugin(entryPoint, plugin, pluginName, moduleName) {
var result = '';
if (typeof plugin.write === 'function') {
var write = (function (what) {
result += what;
});
write.getEntryPoint = function () {
return entryPoint;
};
write.asModule = function (moduleId, code) {
code = code.replace(/^define\(/, 'define("' + moduleId + '",');
result += code;
};
plugin.write(pluginName, moduleName, write);
}
return {
path: null,
contents: result
};
}
function emitNamedModule(moduleId, myDeps, defineCallPosition, path, contents) {
// `defineCallPosition` is the position in code: |define()
var defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col);
// `parensOffset` is the position in code: define|()
var parensOffset = contents.indexOf('(', defineCallOffset);
var insertStr = '"' + moduleId + '", ';
return {
path: path,
contents: contents.substr(0, parensOffset + 1) + insertStr + contents.substr(parensOffset + 1)
};
}
function emitShimmedModule(moduleId, myDeps, factory, path, contents) {
var strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : '');
var strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');';
return {
path: path,
contents: contents + '\n;\n' + strDefine
};
}
/**
* Convert a position (line:col) to (offset) in string `str`
*/
function positionToOffset(str, desiredLine, desiredCol) {
if (desiredLine === 1) {
return desiredCol - 1;
}
var line = 1, lastNewLineOffset = -1;
do {
if (desiredLine === line) {
return lastNewLineOffset + 1 + desiredCol - 1;
}
lastNewLineOffset = str.indexOf('\n', lastNewLineOffset + 1);
line++;
} while (lastNewLineOffset >= 0);
return -1;
}
/**
* Return a set of reachable nodes in `graph` starting from `rootNodes`
*/
function visit(rootNodes, graph) {
var result = {}, queue = rootNodes;
rootNodes.forEach(function (node) {
result[node] = true;
});
while (queue.length > 0) {
var el = queue.shift();
var myEdges = graph[el] || [];
myEdges.forEach(function (toNode) {
if (!result[toNode]) {
result[toNode] = true;
queue.push(toNode);
}
});
}
return result;
}
/**
* Perform a topological sort on `graph`
*/
function topologicalSort(graph) {
var allNodes = {}, outgoingEdgeCount = {}, inverseEdges = {};
Object.keys(graph).forEach(function (fromNode) {
allNodes[fromNode] = true;
outgoingEdgeCount[fromNode] = graph[fromNode].length;
graph[fromNode].forEach(function (toNode) {
allNodes[toNode] = true;
outgoingEdgeCount[toNode] = outgoingEdgeCount[toNode] || 0;
inverseEdges[toNode] = inverseEdges[toNode] || [];
inverseEdges[toNode].push(fromNode);
});
});
// https://en.wikipedia.org/wiki/Topological_sorting
var S = [], L = [];
Object.keys(allNodes).forEach(function (node) {
if (outgoingEdgeCount[node] === 0) {
delete outgoingEdgeCount[node];
S.push(node);
}
});
while (S.length > 0) {
// Ensure the exact same order all the time with the same inputs
S.sort();
var n = S.shift();
L.push(n);
var myInverseEdges = inverseEdges[n] || [];
myInverseEdges.forEach(function (m) {
outgoingEdgeCount[m]--;
if (outgoingEdgeCount[m] === 0) {
delete outgoingEdgeCount[m];
S.push(m);
}
});
}
if (Object.keys(outgoingEdgeCount).length > 0) {
throw new Error('Cannot do topological sort on cyclic graph, remaining nodes: ' + Object.keys(outgoingEdgeCount));
}
return L;
}
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
var fs = require("fs");
var path = require("path");
var vm = require("vm");
/**
* Bundle `entryPoints` given config `config`.
*/
function bundle(entryPoints, config, callback) {
var entryPointsMap = {};
entryPoints.forEach(function (module) {
entryPointsMap[module.name] = module;
});
var allMentionedModulesMap = {};
entryPoints.forEach(function (module) {
allMentionedModulesMap[module.name] = true;
(module.include || []).forEach(function (includedModule) {
allMentionedModulesMap[includedModule] = true;
});
(module.exclude || []).forEach(function (excludedModule) {
allMentionedModulesMap[excludedModule] = true;
});
});
var code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js'));
var r = vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
var loaderModule = { exports: {} };
r.call({}, require, loaderModule, loaderModule.exports);
var loader = loaderModule.exports;
config.isBuild = true;
config.paths = config.paths || {};
config.paths['vs/nls'] = 'out-build/vs/nls.build';
config.paths['vs/css'] = 'out-build/vs/css.build';
loader.config(config);
loader(['require'], function (localRequire) {
var resolvePath = function (path) {
var r = localRequire.toUrl(path);
if (!/\.js/.test(r)) {
return r + '.js';
}
return r;
};
for (var moduleId in entryPointsMap) {
var entryPoint = entryPointsMap[moduleId];
if (entryPoint.append) {
entryPoint.append = entryPoint.append.map(resolvePath);
}
if (entryPoint.prepend) {
entryPoint.prepend = entryPoint.prepend.map(resolvePath);
}
}
});
loader(Object.keys(allMentionedModulesMap), function () {
var modules = loader.getBuildInfo();
var partialResult = emitEntryPoints(modules, entryPointsMap);
var cssInlinedResources = loader('vs/css').getInlinedResources();
callback(null, {
files: partialResult.files,
cssInlinedResources: cssInlinedResources,
bundleData: partialResult.bundleData
});
}, function (err) { return callback(err, null); });
}
exports.bundle = bundle;
function emitEntryPoints(modules, entryPoints) {
var modulesMap = {};
modules.forEach(function (m) {
modulesMap[m.id] = m;
});
var modulesGraph = {};
modules.forEach(function (m) {
modulesGraph[m.id] = m.dependencies;
});
var sortedModules = topologicalSort(modulesGraph);
var result = [];
var usedPlugins = {};
var bundleData = {
graph: modulesGraph,
bundles: {}
};
Object.keys(entryPoints).forEach(function (moduleToBundle) {
var info = entryPoints[moduleToBundle];
var rootNodes = [moduleToBundle].concat(info.include || []);
var allDependencies = visit(rootNodes, modulesGraph);
var excludes = ['require', 'exports', 'module'].concat(info.exclude || []);
excludes.forEach(function (excludeRoot) {
var allExcludes = visit([excludeRoot], modulesGraph);
Object.keys(allExcludes).forEach(function (exclude) {
delete allDependencies[exclude];
});
});
var includedModules = sortedModules.filter(function (module) {
return allDependencies[module];
});
bundleData.bundles[moduleToBundle] = includedModules;
var res = emitEntryPoint(modulesMap, modulesGraph, moduleToBundle, includedModules, info.prepend, info.append, info.dest);
result = result.concat(res.files);
for (var pluginName in res.usedPlugins) {
usedPlugins[pluginName] = usedPlugins[pluginName] || res.usedPlugins[pluginName];
}
});
Object.keys(usedPlugins).forEach(function (pluginName) {
var plugin = usedPlugins[pluginName];
if (typeof plugin.finishBuild === 'function') {
var write = function (filename, contents) {
result.push({
dest: filename,
sources: [{
path: null,
contents: contents
}]
});
};
plugin.finishBuild(write);
}
});
return {
// TODO@TS 2.1.2
files: extractStrings(removeDuplicateTSBoilerplate(result)),
bundleData: bundleData
};
}
function extractStrings(destFiles) {
var parseDefineCall = function (moduleMatch, depsMatch) {
var module = moduleMatch.replace(/^"|"$/g, '');
var deps = depsMatch.split(',');
deps = deps.map(function (dep) {
dep = dep.trim();
dep = dep.replace(/^"|"$/g, '');
dep = dep.replace(/^'|'$/g, '');
var prefix = null;
var _path = null;
var pieces = dep.split('!');
if (pieces.length > 1) {
prefix = pieces[0] + '!';
_path = pieces[1];
}
else {
prefix = '';
_path = pieces[0];
}
if (/^\.\//.test(_path) || /^\.\.\//.test(_path)) {
var res = path.join(path.dirname(module), _path).replace(/\\/g, '/');
return prefix + res;
}
return prefix + _path;
});
return {
module: module,
deps: deps
};
};
destFiles.forEach(function (destFile, index) {
if (!/\.js$/.test(destFile.dest)) {
return;
}
if (/\.nls\.js$/.test(destFile.dest)) {
return;
}
// Do one pass to record the usage counts for each module id
var useCounts = {};
destFile.sources.forEach(function (source) {
var matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/);
if (!matches) {
return;
}
var defineCall = parseDefineCall(matches[1], matches[2]);
useCounts[defineCall.module] = (useCounts[defineCall.module] || 0) + 1;
defineCall.deps.forEach(function (dep) {
useCounts[dep] = (useCounts[dep] || 0) + 1;
});
});
var sortedByUseModules = Object.keys(useCounts);
sortedByUseModules.sort(function (a, b) {
return useCounts[b] - useCounts[a];
});
var replacementMap = {};
sortedByUseModules.forEach(function (module, index) {
replacementMap[module] = index;
});
destFile.sources.forEach(function (source) {
source.contents = source.contents.replace(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/, function (_, moduleMatch, depsMatch) {
var defineCall = parseDefineCall(moduleMatch, depsMatch);
return "define(__m[" + replacementMap[defineCall.module] + "/*" + defineCall.module + "*/], __M([" + defineCall.deps.map(function (dep) { return replacementMap[dep] + '/*' + dep + '*/'; }).join(',') + "])";
});
});
destFile.sources.unshift({
path: null,
contents: [
'(function() {',
"var __m = " + JSON.stringify(sortedByUseModules) + ";",
"var __M = function(deps) {",
" var result = [];",
" for (var i = 0, len = deps.length; i < len; i++) {",
" result[i] = __m[deps[i]];",
" }",
" return result;",
"};"
].join('\n')
});
destFile.sources.push({
path: null,
contents: '}).call(this);'
});
});
return destFiles;
}
function removeDuplicateTSBoilerplate(destFiles) {
// Taken from typescript compiler => emitFiles
var BOILERPLATE = [
{ start: /^var __extends/, end: /^}\)\(\);$/ },
{ start: /^var __assign/, end: /^};$/ },
{ start: /^var __decorate/, end: /^};$/ },
{ start: /^var __metadata/, end: /^};$/ },
{ start: /^var __param/, end: /^};$/ },
{ start: /^var __awaiter/, end: /^};$/ },
];
destFiles.forEach(function (destFile) {
var SEEN_BOILERPLATE = [];
destFile.sources.forEach(function (source) {
var lines = source.contents.split(/\r\n|\n|\r/);
var newLines = [];
var IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE;
for (var i = 0; i < lines.length; i++) {
var line = lines[i];
if (IS_REMOVING_BOILERPLATE) {
newLines.push('');
if (END_BOILERPLATE.test(line)) {
IS_REMOVING_BOILERPLATE = false;
}
}
else {
for (var j = 0; j < BOILERPLATE.length; j++) {
var boilerplate = BOILERPLATE[j];
if (boilerplate.start.test(line)) {
if (SEEN_BOILERPLATE[j]) {
IS_REMOVING_BOILERPLATE = true;
END_BOILERPLATE = boilerplate.end;
}
else {
SEEN_BOILERPLATE[j] = true;
}
}
}
if (IS_REMOVING_BOILERPLATE) {
newLines.push('');
}
else {
newLines.push(line);
}
}
}
source.contents = newLines.join('\n');
});
});
return destFiles;
}
function emitEntryPoint(modulesMap, deps, entryPoint, includedModules, prepend, append, dest) {
if (!dest) {
dest = entryPoint + '.js';
}
var mainResult = {
sources: [],
dest: dest
}, results = [mainResult];
var usedPlugins = {};
var getLoaderPlugin = function (pluginName) {
if (!usedPlugins[pluginName]) {
usedPlugins[pluginName] = modulesMap[pluginName].exports;
}
return usedPlugins[pluginName];
};
includedModules.forEach(function (c) {
var bangIndex = c.indexOf('!');
if (bangIndex >= 0) {
var pluginName = c.substr(0, bangIndex);
var plugin = getLoaderPlugin(pluginName);
mainResult.sources.push(emitPlugin(entryPoint, plugin, pluginName, c.substr(bangIndex + 1)));
return;
}
var module = modulesMap[c];
if (module.path === 'empty:') {
return;
}
var contents = readFileAndRemoveBOM(module.path);
if (module.shim) {
mainResult.sources.push(emitShimmedModule(c, deps[c], module.shim, module.path, contents));
}
else {
mainResult.sources.push(emitNamedModule(c, deps[c], module.defineLocation, module.path, contents));
}
});
Object.keys(usedPlugins).forEach(function (pluginName) {
var plugin = usedPlugins[pluginName];
if (typeof plugin.writeFile === 'function') {
var req = (function () {
throw new Error('no-no!');
});
req.toUrl = function (something) { return something; };
var write = function (filename, contents) {
results.push({
dest: filename,
sources: [{
path: null,
contents: contents
}]
});
};
plugin.writeFile(pluginName, entryPoint, req, write, {});
}
});
var toIFile = function (path) {
var contents = readFileAndRemoveBOM(path);
return {
path: path,
contents: contents
};
};
var toPrepend = (prepend || []).map(toIFile);
var toAppend = (append || []).map(toIFile);
mainResult.sources = toPrepend.concat(mainResult.sources).concat(toAppend);
return {
files: results,
usedPlugins: usedPlugins
};
}
function readFileAndRemoveBOM(path) {
var BOM_CHAR_CODE = 65279;
var contents = fs.readFileSync(path, 'utf8');
// Remove BOM
if (contents.charCodeAt(0) === BOM_CHAR_CODE) {
contents = contents.substring(1);
}
return contents;
}
function emitPlugin(entryPoint, plugin, pluginName, moduleName) {
var result = '';
if (typeof plugin.write === 'function') {
var write = (function (what) {
result += what;
});
write.getEntryPoint = function () {
return entryPoint;
};
write.asModule = function (moduleId, code) {
code = code.replace(/^define\(/, 'define("' + moduleId + '",');
result += code;
};
plugin.write(pluginName, moduleName, write);
}
return {
path: null,
contents: result
};
}
function emitNamedModule(moduleId, myDeps, defineCallPosition, path, contents) {
// `defineCallPosition` is the position in code: |define()
var defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col);
// `parensOffset` is the position in code: define|()
var parensOffset = contents.indexOf('(', defineCallOffset);
var insertStr = '"' + moduleId + '", ';
return {
path: path,
contents: contents.substr(0, parensOffset + 1) + insertStr + contents.substr(parensOffset + 1)
};
}
function emitShimmedModule(moduleId, myDeps, factory, path, contents) {
var strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : '');
var strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');';
return {
path: path,
contents: contents + '\n;\n' + strDefine
};
}
/**
* Convert a position (line:col) to (offset) in string `str`
*/
function positionToOffset(str, desiredLine, desiredCol) {
if (desiredLine === 1) {
return desiredCol - 1;
}
var line = 1, lastNewLineOffset = -1;
do {
if (desiredLine === line) {
return lastNewLineOffset + 1 + desiredCol - 1;
}
lastNewLineOffset = str.indexOf('\n', lastNewLineOffset + 1);
line++;
} while (lastNewLineOffset >= 0);
return -1;
}
/**
* Return a set of reachable nodes in `graph` starting from `rootNodes`
*/
function visit(rootNodes, graph) {
var result = {}, queue = rootNodes;
rootNodes.forEach(function (node) {
result[node] = true;
});
while (queue.length > 0) {
var el = queue.shift();
var myEdges = graph[el] || [];
myEdges.forEach(function (toNode) {
if (!result[toNode]) {
result[toNode] = true;
queue.push(toNode);
}
});
}
return result;
}
/**
* Perform a topological sort on `graph`
*/
function topologicalSort(graph) {
var allNodes = {}, outgoingEdgeCount = {}, inverseEdges = {};
Object.keys(graph).forEach(function (fromNode) {
allNodes[fromNode] = true;
outgoingEdgeCount[fromNode] = graph[fromNode].length;
graph[fromNode].forEach(function (toNode) {
allNodes[toNode] = true;
outgoingEdgeCount[toNode] = outgoingEdgeCount[toNode] || 0;
inverseEdges[toNode] = inverseEdges[toNode] || [];
inverseEdges[toNode].push(fromNode);
});
});
// https://en.wikipedia.org/wiki/Topological_sorting
var S = [], L = [];
Object.keys(allNodes).forEach(function (node) {
if (outgoingEdgeCount[node] === 0) {
delete outgoingEdgeCount[node];
S.push(node);
}
});
while (S.length > 0) {
// Ensure the exact same order all the time with the same inputs
S.sort();
var n = S.shift();
L.push(n);
var myInverseEdges = inverseEdges[n] || [];
myInverseEdges.forEach(function (m) {
outgoingEdgeCount[m]--;
if (outgoingEdgeCount[m] === 0) {
delete outgoingEdgeCount[m];
S.push(m);
}
});
}
if (Object.keys(outgoingEdgeCount).length > 0) {
throw new Error('Cannot do topological sort on cyclic graph, remaining nodes: ' + Object.keys(outgoingEdgeCount));
}
return L;
}

View file

@ -22,24 +22,24 @@ interface IBuildModuleInfo {
}
interface IBuildModuleInfoMap {
[moduleId:string]: IBuildModuleInfo;
[moduleId: string]: IBuildModuleInfo;
}
interface ILoaderPlugin {
write(pluginName:string, moduleName:string, write:ILoaderPluginWriteFunc): void;
writeFile(pluginName:string, entryPoint:string, req:ILoaderPluginReqFunc, write:(filename:string, contents:string)=>void, config:any): void;
finishBuild(write:(filename:string, contents:string)=>void): void;
write(pluginName: string, moduleName: string, write: ILoaderPluginWriteFunc): void;
writeFile(pluginName: string, entryPoint: string, req: ILoaderPluginReqFunc, write: (filename: string, contents: string) => void, config: any): void;
finishBuild(write: (filename: string, contents: string) => void): void;
}
interface ILoaderPluginWriteFunc {
(something:string): void;
(something: string): void;
getEntryPoint(): string;
asModule(moduleId:string, code:string): void;
asModule(moduleId: string, code: string): void;
}
interface ILoaderPluginReqFunc {
(something:string): void;
toUrl(something:string): string;
(something: string): void;
toUrl(something: string): string;
}
export interface IEntryPoint {
@ -52,15 +52,15 @@ export interface IEntryPoint {
}
interface IEntryPointMap {
[moduleId:string]: IEntryPoint;
[moduleId: string]: IEntryPoint;
}
export interface IGraph {
[node:string]: string[];
[node: string]: string[];
}
interface INodeSet {
[node:string]: boolean;
[node: string]: boolean;
}
export interface IFile {
@ -75,7 +75,7 @@ export interface IConcatFile {
export interface IBundleData {
graph: IGraph;
bundles: {[moduleId:string]:string[];};
bundles: { [moduleId: string]: string[]; };
}
export interface IBundleResult {
@ -91,40 +91,44 @@ interface IPartialBundleResult {
export interface ILoaderConfig {
isBuild?: boolean;
paths?: { [path: string]: any; };
}
/**
* Bundle `entryPoints` given config `config`.
*/
export function bundle(entryPoints:IEntryPoint[], config:ILoaderConfig, callback:(err:any, result:IBundleResult) => void): void {
let entryPointsMap:IEntryPointMap = {};
entryPoints.forEach((module:IEntryPoint) => {
export function bundle(entryPoints: IEntryPoint[], config: ILoaderConfig, callback: (err: any, result: IBundleResult) => void): void {
let entryPointsMap: IEntryPointMap = {};
entryPoints.forEach((module: IEntryPoint) => {
entryPointsMap[module.name] = module;
});
let allMentionedModulesMap: {[modules:string]:boolean;} = {};
entryPoints.forEach((module:IEntryPoint) => {
let allMentionedModulesMap: { [modules: string]: boolean; } = {};
entryPoints.forEach((module: IEntryPoint) => {
allMentionedModulesMap[module.name] = true;
(module.include||[]).forEach(function(includedModule) {
(module.include || []).forEach(function (includedModule) {
allMentionedModulesMap[includedModule] = true;
});
(module.exclude||[]).forEach(function(excludedModule) {
(module.exclude || []).forEach(function (excludedModule) {
allMentionedModulesMap[excludedModule] = true;
});
});
var code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js'));
var r: Function = <any> vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
var r: Function = <any>vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
var loaderModule = { exports: {} };
r.call({}, require, loaderModule, loaderModule.exports);
var loader:any = loaderModule.exports;
var loader: any = loaderModule.exports;
config.isBuild = true;
config.paths = config.paths || {};
config.paths['vs/nls'] = 'out-build/vs/nls.build';
config.paths['vs/css'] = 'out-build/vs/css.build';
loader.config(config);
loader(['require'], (localRequire) => {
let resolvePath = (path:string) => {
let resolvePath = (path: string) => {
let r = localRequire.toUrl(path);
if (!/\.js/.test(r)) {
return r + '.js';
@ -154,14 +158,14 @@ export function bundle(entryPoints:IEntryPoint[], config:ILoaderConfig, callback
}, (err) => callback(err, null));
}
function emitEntryPoints(modules:IBuildModuleInfo[], entryPoints:IEntryPointMap): IPartialBundleResult {
function emitEntryPoints(modules: IBuildModuleInfo[], entryPoints: IEntryPointMap): IPartialBundleResult {
let modulesMap: IBuildModuleInfoMap = {};
modules.forEach((m:IBuildModuleInfo) => {
modules.forEach((m: IBuildModuleInfo) => {
modulesMap[m.id] = m;
});
let modulesGraph:IGraph = {};
modules.forEach((m:IBuildModuleInfo) => {
let modulesGraph: IGraph = {};
modules.forEach((m: IBuildModuleInfo) => {
modulesGraph[m.id] = m.dependencies;
});
@ -169,25 +173,25 @@ function emitEntryPoints(modules:IBuildModuleInfo[], entryPoints:IEntryPointMap)
let result: IConcatFile[] = [];
let usedPlugins: IPluginMap = {};
let bundleData:IBundleData = {
let bundleData: IBundleData = {
graph: modulesGraph,
bundles: {}
};
Object.keys(entryPoints).forEach((moduleToBundle:string) => {
Object.keys(entryPoints).forEach((moduleToBundle: string) => {
let info = entryPoints[moduleToBundle];
let rootNodes = [moduleToBundle].concat(info.include || []);
let allDependencies = visit(rootNodes, modulesGraph);
let excludes:string[] = ['require', 'exports', 'module'].concat(info.exclude || []);
let excludes: string[] = ['require', 'exports', 'module'].concat(info.exclude || []);
excludes.forEach((excludeRoot:string) => {
excludes.forEach((excludeRoot: string) => {
let allExcludes = visit([excludeRoot], modulesGraph);
Object.keys(allExcludes).forEach((exclude:string) => {
Object.keys(allExcludes).forEach((exclude: string) => {
delete allDependencies[exclude];
});
});
let includedModules = sortedModules.filter((module:string) => {
let includedModules = sortedModules.filter((module: string) => {
return allDependencies[module];
});
@ -209,10 +213,10 @@ function emitEntryPoints(modules:IBuildModuleInfo[], entryPoints:IEntryPointMap)
}
});
Object.keys(usedPlugins).forEach((pluginName:string) => {
Object.keys(usedPlugins).forEach((pluginName: string) => {
let plugin = usedPlugins[pluginName];
if (typeof plugin.finishBuild === 'function') {
let write = (filename:string, contents:string) => {
let write = (filename: string, contents: string) => {
result.push({
dest: filename,
sources: [{
@ -226,21 +230,22 @@ function emitEntryPoints(modules:IBuildModuleInfo[], entryPoints:IEntryPointMap)
});
return {
// TODO@TS 2.1.2
files: extractStrings(removeDuplicateTSBoilerplate(result)),
bundleData: bundleData
};
}
function extractStrings(destFiles:IConcatFile[]):IConcatFile[] {
let parseDefineCall = (moduleMatch:string, depsMatch:string) => {
function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
let parseDefineCall = (moduleMatch: string, depsMatch: string) => {
let module = moduleMatch.replace(/^"|"$/g, '');
let deps = depsMatch.split(',');
deps = deps.map((dep) => {
dep = dep.trim();
dep = dep.replace(/^"|"$/g, '');
dep = dep.replace(/^'|'$/g, '');
let prefix:string = null;
let _path:string = null;
let prefix: string = null;
let _path: string = null;
let pieces = dep.split('!');
if (pieces.length > 1) {
prefix = pieces[0] + '!';
@ -271,7 +276,7 @@ function extractStrings(destFiles:IConcatFile[]):IConcatFile[] {
}
// Do one pass to record the usage counts for each module id
let useCounts: {[moduleId:string]:number;} = {};
let useCounts: { [moduleId: string]: number; } = {};
destFile.sources.forEach((source) => {
let matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/);
if (!matches) {
@ -290,7 +295,7 @@ function extractStrings(destFiles:IConcatFile[]):IConcatFile[] {
return useCounts[b] - useCounts[a];
});
let replacementMap: {[moduleId:string]:number;} = {};
let replacementMap: { [moduleId: string]: number; } = {};
sortedByUseModules.forEach((module, index) => {
replacementMap[module] = index;
});
@ -325,10 +330,10 @@ function extractStrings(destFiles:IConcatFile[]):IConcatFile[] {
return destFiles;
}
function removeDuplicateTSBoilerplate(destFiles:IConcatFile[]):IConcatFile[] {
function removeDuplicateTSBoilerplate(destFiles: IConcatFile[]): IConcatFile[] {
// Taken from typescript compiler => emitFiles
let BOILERPLATE = [
{ start: /^var __extends/, end: /^};$/ },
{ start: /^var __extends/, end: /^}\)\(\);$/ },
{ start: /^var __assign/, end: /^};$/ },
{ start: /^var __decorate/, end: /^};$/ },
{ start: /^var __metadata/, end: /^};$/ },
@ -377,7 +382,7 @@ function removeDuplicateTSBoilerplate(destFiles:IConcatFile[]):IConcatFile[] {
}
interface IPluginMap {
[moduleId:string]:ILoaderPlugin;
[moduleId: string]: ILoaderPlugin;
}
interface IEmitEntryPointResult {
@ -386,10 +391,10 @@ interface IEmitEntryPointResult {
}
function emitEntryPoint(
modulesMap:IBuildModuleInfoMap,
deps:IGraph,
entryPoint:string,
includedModules:string[],
modulesMap: IBuildModuleInfoMap,
deps: IGraph,
entryPoint: string,
includedModules: string[],
prepend: string[],
append: string[],
dest: string
@ -398,20 +403,20 @@ function emitEntryPoint(
dest = entryPoint + '.js';
}
let mainResult: IConcatFile = {
sources: [],
dest: dest
},
sources: [],
dest: dest
},
results: IConcatFile[] = [mainResult];
let usedPlugins: IPluginMap = {};
let getLoaderPlugin = (pluginName:string):ILoaderPlugin => {
let getLoaderPlugin = (pluginName: string): ILoaderPlugin => {
if (!usedPlugins[pluginName]) {
usedPlugins[pluginName] = modulesMap[pluginName].exports;
}
return usedPlugins[pluginName];
};
includedModules.forEach((c:string) => {
includedModules.forEach((c: string) => {
let bangIndex = c.indexOf('!');
if (bangIndex >= 0) {
@ -436,15 +441,15 @@ function emitEntryPoint(
}
});
Object.keys(usedPlugins).forEach((pluginName:string) => {
Object.keys(usedPlugins).forEach((pluginName: string) => {
let plugin = usedPlugins[pluginName];
if (typeof plugin.writeFile === 'function') {
let req:ILoaderPluginReqFunc = <any>(() => {
let req: ILoaderPluginReqFunc = <any>(() => {
throw new Error('no-no!');
});
req.toUrl = something => something;
let write = (filename:string, contents:string) => {
let write = (filename: string, contents: string) => {
results.push({
dest: filename,
sources: [{
@ -457,7 +462,7 @@ function emitEntryPoint(
}
});
let toIFile = (path):IFile => {
let toIFile = (path): IFile => {
let contents = readFileAndRemoveBOM(path);
return {
path: path,
@ -465,8 +470,8 @@ function emitEntryPoint(
};
};
let toPrepend = (prepend||[]).map(toIFile);
let toAppend = (append||[]).map(toIFile);
let toPrepend = (prepend || []).map(toIFile);
let toAppend = (append || []).map(toIFile);
mainResult.sources = toPrepend.concat(mainResult.sources).concat(toAppend);
@ -476,7 +481,7 @@ function emitEntryPoint(
};
}
function readFileAndRemoveBOM(path:string): string {
function readFileAndRemoveBOM(path: string): string {
var BOM_CHAR_CODE = 65279;
var contents = fs.readFileSync(path, 'utf8');
// Remove BOM
@ -486,7 +491,7 @@ function readFileAndRemoveBOM(path:string): string {
return contents;
}
function emitPlugin(entryPoint:string, plugin:ILoaderPlugin, pluginName:string, moduleName:string): IFile {
function emitPlugin(entryPoint: string, plugin: ILoaderPlugin, pluginName: string, moduleName: string): IFile {
let result = '';
if (typeof plugin.write === 'function') {
let write: ILoaderPluginWriteFunc = <any>((what) => {
@ -495,8 +500,8 @@ function emitPlugin(entryPoint:string, plugin:ILoaderPlugin, pluginName:string,
write.getEntryPoint = () => {
return entryPoint;
};
write.asModule = (moduleId:string, code:string) => {
code = code.replace(/^define\(/, 'define("'+moduleId+'",');
write.asModule = (moduleId: string, code: string) => {
code = code.replace(/^define\(/, 'define("' + moduleId + '",');
result += code;
};
plugin.write(pluginName, moduleName, write);
@ -507,7 +512,7 @@ function emitPlugin(entryPoint:string, plugin:ILoaderPlugin, pluginName:string,
};
}
function emitNamedModule(moduleId:string, myDeps:string[], defineCallPosition:IPosition, path:string, contents:string): IFile {
function emitNamedModule(moduleId: string, myDeps: string[], defineCallPosition: IPosition, path: string, contents: string): IFile {
// `defineCallPosition` is the position in code: |define()
let defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col);
@ -523,7 +528,7 @@ function emitNamedModule(moduleId:string, myDeps:string[], defineCallPosition:IP
};
}
function emitShimmedModule(moduleId:string, myDeps:string[], factory:string, path:string, contents:string): IFile {
function emitShimmedModule(moduleId: string, myDeps: string[], factory: string, path: string, contents: string): IFile {
let strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : '');
let strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');';
return {
@ -535,7 +540,7 @@ function emitShimmedModule(moduleId:string, myDeps:string[], factory:string, pat
/**
* Convert a position (line:col) to (offset) in string `str`
*/
function positionToOffset(str:string, desiredLine:number, desiredCol:number): number {
function positionToOffset(str: string, desiredLine: number, desiredCol: number): number {
if (desiredLine === 1) {
return desiredCol - 1;
}
@ -558,8 +563,8 @@ function positionToOffset(str:string, desiredLine:number, desiredCol:number): nu
/**
* Return a set of reachable nodes in `graph` starting from `rootNodes`
*/
function visit(rootNodes:string[], graph:IGraph):INodeSet {
let result:INodeSet = {},
function visit(rootNodes: string[], graph: IGraph): INodeSet {
let result: INodeSet = {},
queue = rootNodes;
rootNodes.forEach((node) => {
@ -583,13 +588,13 @@ function visit(rootNodes:string[], graph:IGraph):INodeSet {
/**
* Perform a topological sort on `graph`
*/
function topologicalSort(graph:IGraph): string[] {
function topologicalSort(graph: IGraph): string[] {
let allNodes:INodeSet = {},
outgoingEdgeCount:{[node:string]:number;} = {},
inverseEdges:IGraph = {};
let allNodes: INodeSet = {},
outgoingEdgeCount: { [node: string]: number; } = {},
inverseEdges: IGraph = {};
Object.keys(graph).forEach((fromNode:string) => {
Object.keys(graph).forEach((fromNode: string) => {
allNodes[fromNode] = true;
outgoingEdgeCount[fromNode] = graph[fromNode].length;
@ -606,7 +611,7 @@ function topologicalSort(graph:IGraph): string[] {
let S: string[] = [],
L: string[] = [];
Object.keys(allNodes).forEach((node:string) => {
Object.keys(allNodes).forEach((node: string) => {
if (outgoingEdgeCount[node] === 0) {
delete outgoingEdgeCount[node];
S.push(node);
@ -617,11 +622,11 @@ function topologicalSort(graph:IGraph): string[] {
// Ensure the exact same order all the time with the same inputs
S.sort();
let n:string = S.shift();
let n: string = S.shift();
L.push(n);
let myInverseEdges = inverseEdges[n] || [];
myInverseEdges.forEach((m:string) => {
myInverseEdges.forEach((m: string) => {
outgoingEdgeCount[m]--;
if (outgoingEdgeCount[m] === 0) {
delete outgoingEdgeCount[m];

View file

@ -1,134 +1,170 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
var gulp = require("gulp");
var tsb = require("gulp-tsb");
var es = require("event-stream");
var watch = require('./watch');
var nls = require("./nls");
var util = require("./util");
var reporter_1 = require("./reporter");
var path = require("path");
var bom = require("gulp-bom");
var sourcemaps = require("gulp-sourcemaps");
var _ = require("underscore");
var monacodts = require("../monaco/api");
var fs = require("fs");
var reporter = reporter_1.createReporter();
var rootDir = path.join(__dirname, '../../src');
var options = require('../../src/tsconfig.json').compilerOptions;
options.verbose = false;
options.sourceMap = true;
options.rootDir = rootDir;
options.sourceRoot = util.toFileUri(rootDir);
function createCompile(build, emitError) {
var opts = _.clone(options);
opts.inlineSources = !!build;
opts.noFilesystemLookup = true;
var ts = tsb.create(opts, null, null, function (err) { return reporter(err.toString()); });
return function (token) {
var utf8Filter = util.filter(function (data) { return /(\/|\\)test(\/|\\).*utf8/.test(data.path); });
var tsFilter = util.filter(function (data) { return /\.ts$/.test(data.path); });
var noDeclarationsFilter = util.filter(function (data) { return !(/\.d\.ts$/.test(data.path)); });
var input = es.through();
var output = input
.pipe(utf8Filter)
.pipe(bom())
.pipe(utf8Filter.restore)
.pipe(tsFilter)
.pipe(util.loadSourcemaps())
.pipe(ts(token))
.pipe(noDeclarationsFilter)
.pipe(build ? nls() : es.through())
.pipe(noDeclarationsFilter.restore)
.pipe(sourcemaps.write('.', {
addComment: false,
includeContent: !!build,
sourceRoot: options.sourceRoot
}))
.pipe(tsFilter.restore)
.pipe(reporter.end(emitError));
return es.duplex(input, output);
};
}
function compileTask(out, build) {
var compile = createCompile(build, true);
return function () {
var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
return src
.pipe(compile())
.pipe(gulp.dest(out))
.pipe(monacodtsTask(out, false));
};
}
exports.compileTask = compileTask;
function watchTask(out, build) {
var compile = createCompile(build);
return function () {
var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
var watchSrc = watch('src/**', { base: 'src' });
return watchSrc
.pipe(util.incremental(compile, src, true))
.pipe(gulp.dest(out))
.pipe(monacodtsTask(out, true));
};
}
exports.watchTask = watchTask;
function monacodtsTask(out, isWatch) {
var timer = null;
var runSoon = function (howSoon) {
if (timer !== null) {
clearTimeout(timer);
timer = null;
}
timer = setTimeout(function () {
timer = null;
runNow();
}, howSoon);
};
var runNow = function () {
if (timer !== null) {
clearTimeout(timer);
timer = null;
}
// if (reporter.hasErrors()) {
// monacodts.complainErrors();
// return;
// }
var result = monacodts.run(out);
if (!result.isTheSame) {
if (isWatch) {
fs.writeFileSync(result.filePath, result.content);
}
else {
resultStream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
}
}
};
var resultStream;
if (isWatch) {
var filesToWatchMap_1 = {};
monacodts.getFilesToWatch(out).forEach(function (filePath) {
filesToWatchMap_1[path.normalize(filePath)] = true;
});
watch('build/monaco/*').pipe(es.through(function () {
runSoon(5000);
}));
resultStream = es.through(function (data) {
var filePath = path.normalize(data.path);
if (filesToWatchMap_1[filePath]) {
runSoon(5000);
}
this.emit('data', data);
});
}
else {
resultStream = es.through(null, function () {
runNow();
this.emit('end');
});
}
return resultStream;
}
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
var gulp = require("gulp");
var tsb = require("gulp-tsb");
var es = require("event-stream");
var watch = require('./watch');
var nls = require("./nls");
var util = require("./util");
var reporter_1 = require("./reporter");
var path = require("path");
var bom = require("gulp-bom");
var sourcemaps = require("gulp-sourcemaps");
var _ = require("underscore");
var monacodts = require("../monaco/api");
var fs = require("fs");
var reporter = reporter_1.createReporter();
var rootDir = path.join(__dirname, '../../src');
var options = require('../../src/tsconfig.json').compilerOptions;
options.verbose = false;
options.sourceMap = true;
options.rootDir = rootDir;
options.sourceRoot = util.toFileUri(rootDir);
function createCompile(build, emitError) {
var opts = _.clone(options);
opts.inlineSources = !!build;
opts.noFilesystemLookup = true;
var ts = tsb.create(opts, null, null, function (err) { return reporter(err.toString()); });
return function (token) {
var utf8Filter = util.filter(function (data) { return /(\/|\\)test(\/|\\).*utf8/.test(data.path); });
var tsFilter = util.filter(function (data) { return /\.ts$/.test(data.path); });
var noDeclarationsFilter = util.filter(function (data) { return !(/\.d\.ts$/.test(data.path)); });
var input = es.through();
var output = input
.pipe(utf8Filter)
.pipe(bom())
.pipe(utf8Filter.restore)
.pipe(tsFilter)
.pipe(util.loadSourcemaps())
.pipe(ts(token))
.pipe(noDeclarationsFilter)
.pipe(build ? nls() : es.through())
.pipe(noDeclarationsFilter.restore)
.pipe(sourcemaps.write('.', {
addComment: false,
includeContent: !!build,
sourceRoot: options.sourceRoot
}))
.pipe(tsFilter.restore)
.pipe(reporter.end(emitError));
return es.duplex(input, output);
};
}
function compileTask(out, build) {
return function () {
var compile = createCompile(build, true);
var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
return src
.pipe(compile())
.pipe(gulp.dest(out))
.pipe(monacodtsTask(out, false));
};
}
exports.compileTask = compileTask;
function watchTask(out, build) {
return function () {
var compile = createCompile(build);
var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
var watchSrc = watch('src/**', { base: 'src' });
return watchSrc
.pipe(util.incremental(compile, src, true))
.pipe(gulp.dest(out))
.pipe(monacodtsTask(out, true));
};
}
exports.watchTask = watchTask;
function reloadTypeScriptNodeModule() {
var util = require('gulp-util');
function log(message) {
var rest = [];
for (var _i = 1; _i < arguments.length; _i++) {
rest[_i - 1] = arguments[_i];
}
util.log.apply(util, [util.colors.cyan('[memory watch dog]'), message].concat(rest));
}
function heapUsed() {
return (process.memoryUsage().heapUsed / 1024 / 1024).toFixed(2) + ' MB';
}
return es.through(function (data) {
this.emit('data', data);
}, function () {
log('memory usage after compilation finished: ' + heapUsed());
// It appears we are running into some variant of
// https://bugs.chromium.org/p/v8/issues/detail?id=2073
//
// Even though all references are dropped, some
// optimized methods in the TS compiler end up holding references
// to the entire TypeScript language host (>600MB)
//
// The idea is to force v8 to drop references to these
// optimized methods, by "reloading" the typescript node module
log('Reloading typescript node module...');
var resolvedName = require.resolve('typescript');
var originalModule = require.cache[resolvedName];
delete require.cache[resolvedName];
var newExports = require('typescript');
require.cache[resolvedName] = originalModule;
for (var prop in newExports) {
if (newExports.hasOwnProperty(prop)) {
originalModule.exports[prop] = newExports[prop];
}
}
log('typescript node module reloaded.');
this.emit('end');
});
}
function monacodtsTask(out, isWatch) {
var neededFiles = {};
monacodts.getFilesToWatch(out).forEach(function (filePath) {
filePath = path.normalize(filePath);
neededFiles[filePath] = true;
});
var inputFiles = {};
for (var filePath in neededFiles) {
if (/\bsrc(\/|\\)vs\b/.test(filePath)) {
// This file is needed from source => simply read it now
inputFiles[filePath] = fs.readFileSync(filePath).toString();
}
}
var setInputFile = function (filePath, contents) {
if (inputFiles[filePath] === contents) {
// no change
return;
}
inputFiles[filePath] = contents;
var neededInputFilesCount = Object.keys(neededFiles).length;
var availableInputFilesCount = Object.keys(inputFiles).length;
if (neededInputFilesCount === availableInputFilesCount) {
run();
}
};
var run = function () {
var result = monacodts.run(out, inputFiles);
if (!result.isTheSame) {
if (isWatch) {
fs.writeFileSync(result.filePath, result.content);
}
else {
resultStream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
}
}
};
var resultStream;
if (isWatch) {
watch('build/monaco/*').pipe(es.through(function () {
run();
}));
}
resultStream = es.through(function (data) {
var filePath = path.normalize(data.path);
if (neededFiles[filePath]) {
setInputFile(filePath, data.contents.toString());
}
this.emit('data', data);
});
return resultStream;
}

View file

@ -36,6 +36,7 @@ function createCompile(build: boolean, emitError?: boolean): (token?: util.ICanc
const ts = tsb.create(opts, null, null, err => reporter(err.toString()));
return function (token?: util.ICancellationToken) {
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
const tsFilter = util.filter(data => /\.ts$/.test(data.path));
const noDeclarationsFilter = util.filter(data => !(/\.d\.ts$/.test(data.path)));
@ -48,6 +49,7 @@ function createCompile(build: boolean, emitError?: boolean): (token?: util.ICanc
.pipe(tsFilter)
.pipe(util.loadSourcemaps())
.pipe(ts(token))
// .pipe(build ? reloadTypeScriptNodeModule() : es.through())
.pipe(noDeclarationsFilter)
.pipe(build ? nls() : es.through())
.pipe(noDeclarationsFilter.restore)
@ -64,12 +66,13 @@ function createCompile(build: boolean, emitError?: boolean): (token?: util.ICanc
}
export function compileTask(out: string, build: boolean): () => NodeJS.ReadWriteStream {
const compile = createCompile(build, true);
return function () {
const compile = createCompile(build, true);
const src = es.merge(
gulp.src('src/**', { base: 'src' }),
gulp.src('node_modules/typescript/lib/lib.d.ts')
gulp.src('node_modules/typescript/lib/lib.d.ts'),
);
return src
@ -80,12 +83,13 @@ export function compileTask(out: string, build: boolean): () => NodeJS.ReadWrite
}
export function watchTask(out: string, build: boolean): () => NodeJS.ReadWriteStream {
const compile = createCompile(build);
return function () {
const compile = createCompile(build);
const src = es.merge(
gulp.src('src/**', { base: 'src' }),
gulp.src('node_modules/typescript/lib/lib.d.ts')
gulp.src('node_modules/typescript/lib/lib.d.ts'),
);
const watchSrc = watch('src/**', { base: 'src' });
@ -96,30 +100,84 @@ export function watchTask(out: string, build: boolean): () => NodeJS.ReadWriteSt
};
}
function monacodtsTask(out: string, isWatch: boolean): NodeJS.ReadWriteStream {
let timer: NodeJS.Timer = null;
function reloadTypeScriptNodeModule(): NodeJS.ReadWriteStream {
var util = require('gulp-util');
function log(message: any, ...rest: any[]): void {
util.log(util.colors.cyan('[memory watch dog]'), message, ...rest);
}
const runSoon = function (howSoon: number) {
if (timer !== null) {
clearTimeout(timer);
timer = null;
function heapUsed(): string {
return (process.memoryUsage().heapUsed / 1024 / 1024).toFixed(2) + ' MB';
}
return es.through(function (data) {
this.emit('data', data);
}, function () {
log('memory usage after compilation finished: ' + heapUsed());
// It appears we are running into some variant of
// https://bugs.chromium.org/p/v8/issues/detail?id=2073
//
// Even though all references are dropped, some
// optimized methods in the TS compiler end up holding references
// to the entire TypeScript language host (>600MB)
//
// The idea is to force v8 to drop references to these
// optimized methods, by "reloading" the typescript node module
log('Reloading typescript node module...');
var resolvedName = require.resolve('typescript');
var originalModule = require.cache[resolvedName];
delete require.cache[resolvedName];
var newExports = require('typescript');
require.cache[resolvedName] = originalModule;
for (var prop in newExports) {
if (newExports.hasOwnProperty(prop)) {
originalModule.exports[prop] = newExports[prop];
}
}
log('typescript node module reloaded.');
this.emit('end');
});
}
function monacodtsTask(out: string, isWatch: boolean): NodeJS.ReadWriteStream {
const neededFiles: { [file: string]: boolean; } = {};
monacodts.getFilesToWatch(out).forEach(function (filePath) {
filePath = path.normalize(filePath);
neededFiles[filePath] = true;
});
const inputFiles: { [file: string]: string; } = {};
for (let filePath in neededFiles) {
if (/\bsrc(\/|\\)vs\b/.test(filePath)) {
// This file is needed from source => simply read it now
inputFiles[filePath] = fs.readFileSync(filePath).toString();
}
}
const setInputFile = (filePath: string, contents: string) => {
if (inputFiles[filePath] === contents) {
// no change
return;
}
inputFiles[filePath] = contents;
const neededInputFilesCount = Object.keys(neededFiles).length;
const availableInputFilesCount = Object.keys(inputFiles).length;
if (neededInputFilesCount === availableInputFilesCount) {
run();
}
timer = setTimeout(function () {
timer = null;
runNow();
}, howSoon);
};
const runNow = function () {
if (timer !== null) {
clearTimeout(timer);
timer = null;
}
// if (reporter.hasErrors()) {
// monacodts.complainErrors();
// return;
// }
const result = monacodts.run(out);
const run = () => {
const result = monacodts.run(out, inputFiles);
if (!result.isTheSame) {
if (isWatch) {
fs.writeFileSync(result.filePath, result.content);
@ -132,32 +190,18 @@ function monacodtsTask(out: string, isWatch: boolean): NodeJS.ReadWriteStream {
let resultStream: NodeJS.ReadWriteStream;
if (isWatch) {
const filesToWatchMap: { [file: string]: boolean; } = {};
monacodts.getFilesToWatch(out).forEach(function (filePath) {
filesToWatchMap[path.normalize(filePath)] = true;
});
watch('build/monaco/*').pipe(es.through(function () {
runSoon(5000);
run();
}));
resultStream = es.through(function (data) {
const filePath = path.normalize(data.path);
if (filesToWatchMap[filePath]) {
runSoon(5000);
}
this.emit('data', data);
});
} else {
resultStream = es.through(null, function () {
runNow();
this.emit('end');
});
}
resultStream = es.through(function (data) {
const filePath = path.normalize(data.path);
if (neededFiles[filePath]) {
setInputFile(filePath, data.contents.toString());
}
this.emit('data', data);
});
return resultStream;
}

View file

@ -1,95 +1,118 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
"use strict";
var event_stream_1 = require("event-stream");
var assign = require("object-assign");
var remote = require("gulp-remote-src");
var flatmap = require('gulp-flatmap');
var vzip = require('gulp-vinyl-zip');
var filter = require('gulp-filter');
var rename = require('gulp-rename');
var util = require('gulp-util');
var buffer = require('gulp-buffer');
var json = require('gulp-json-editor');
function error(err) {
var result = event_stream_1.through();
setTimeout(function () { return result.emit('error', err); });
return result;
}
var baseHeaders = {
'X-Market-Client-Id': 'VSCode Build',
'User-Agent': 'VSCode Build',
};
function src(extensionName, version) {
var filterType = 7;
var value = extensionName;
var criterium = { filterType: filterType, value: value };
var criteria = [criterium];
var pageNumber = 1;
var pageSize = 1;
var sortBy = 0;
var sortOrder = 0;
var flags = 0x1 | 0x2 | 0x80;
var assetTypes = ['Microsoft.VisualStudio.Services.VSIXPackage'];
var filters = [{ criteria: criteria, pageNumber: pageNumber, pageSize: pageSize, sortBy: sortBy, sortOrder: sortOrder }];
var body = JSON.stringify({ filters: filters, assetTypes: assetTypes, flags: flags });
var headers = assign({}, baseHeaders, {
'Content-Type': 'application/json',
'Accept': 'application/json;api-version=3.0-preview.1',
'Content-Length': body.length
});
var options = {
base: 'https://marketplace.visualstudio.com/_apis/public/gallery',
requestOptions: {
method: 'POST',
gzip: true,
headers: headers,
body: body
}
};
return remote('/extensionquery', options)
.pipe(flatmap(function (stream, f) {
var rawResult = f.contents.toString('utf8');
var result = JSON.parse(rawResult);
var extension = result.results[0].extensions[0];
if (!extension) {
return error("No such extension: " + extension);
}
var metadata = {
id: extension.extensionId,
publisherId: extension.publisher,
publisherDisplayName: extension.publisher.displayName
};
var extensionVersion = extension.versions.filter(function (v) { return v.version === version; })[0];
if (!extensionVersion) {
return error("No such extension version: " + extensionName + " @ " + version);
}
var asset = extensionVersion.files.filter(function (f) { return f.assetType === 'Microsoft.VisualStudio.Services.VSIXPackage'; })[0];
if (!asset) {
return error("No VSIX found for extension version: " + extensionName + " @ " + version);
}
util.log('Downloading extension:', util.colors.yellow(extensionName + "@" + version), '...');
var options = {
base: asset.source,
requestOptions: {
gzip: true,
headers: baseHeaders
}
};
return remote('', options)
.pipe(flatmap(function (stream) {
var packageJsonFilter = filter('package.json', { restore: true });
return stream
.pipe(vzip.src())
.pipe(filter('extension/**'))
.pipe(rename(function (p) { return p.dirname = p.dirname.replace(/^extension\/?/, ''); }))
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(json({ __metadata: metadata }))
.pipe(packageJsonFilter.restore);
}));
}));
}
exports.src = src;
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
var es = require("event-stream");
var assign = require("object-assign");
var remote = require("gulp-remote-src");
var flatmap = require('gulp-flatmap');
var vzip = require('gulp-vinyl-zip');
var filter = require('gulp-filter');
var rename = require('gulp-rename');
var util = require('gulp-util');
var buffer = require('gulp-buffer');
var json = require('gulp-json-editor');
var fs = require("fs");
var path = require("path");
var vsce = require("vsce");
var File = require("vinyl");
function fromLocal(extensionPath) {
var result = es.through();
vsce.listFiles({ cwd: extensionPath })
.then(function (fileNames) {
var files = fileNames
.map(function (fileName) { return path.join(extensionPath, fileName); })
.map(function (filePath) { return new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath)
}); });
es.readArray(files).pipe(result);
})
.catch(function (err) { return result.emit('error', err); });
return result;
}
exports.fromLocal = fromLocal;
function error(err) {
var result = es.through();
setTimeout(function () { return result.emit('error', err); });
return result;
}
var baseHeaders = {
'X-Market-Client-Id': 'VSCode Build',
'User-Agent': 'VSCode Build',
};
function fromMarketplace(extensionName, version) {
var filterType = 7;
var value = extensionName;
var criterium = { filterType: filterType, value: value };
var criteria = [criterium];
var pageNumber = 1;
var pageSize = 1;
var sortBy = 0;
var sortOrder = 0;
var flags = 0x1 | 0x2 | 0x80;
var assetTypes = ['Microsoft.VisualStudio.Services.VSIXPackage'];
var filters = [{ criteria: criteria, pageNumber: pageNumber, pageSize: pageSize, sortBy: sortBy, sortOrder: sortOrder }];
var body = JSON.stringify({ filters: filters, assetTypes: assetTypes, flags: flags });
var headers = assign({}, baseHeaders, {
'Content-Type': 'application/json',
'Accept': 'application/json;api-version=3.0-preview.1',
'Content-Length': body.length
});
var options = {
base: 'https://marketplace.visualstudio.com/_apis/public/gallery',
requestOptions: {
method: 'POST',
gzip: true,
headers: headers,
body: body
}
};
return remote('/extensionquery', options)
.pipe(flatmap(function (stream, f) {
var rawResult = f.contents.toString('utf8');
var result = JSON.parse(rawResult);
var extension = result.results[0].extensions[0];
if (!extension) {
return error("No such extension: " + extension);
}
var metadata = {
id: extension.extensionId,
publisherId: extension.publisher,
publisherDisplayName: extension.publisher.displayName
};
var extensionVersion = extension.versions.filter(function (v) { return v.version === version; })[0];
if (!extensionVersion) {
return error("No such extension version: " + extensionName + " @ " + version);
}
var asset = extensionVersion.files.filter(function (f) { return f.assetType === 'Microsoft.VisualStudio.Services.VSIXPackage'; })[0];
if (!asset) {
return error("No VSIX found for extension version: " + extensionName + " @ " + version);
}
util.log('Downloading extension:', util.colors.yellow(extensionName + "@" + version), '...');
var options = {
base: asset.source,
requestOptions: {
gzip: true,
headers: baseHeaders
}
};
return remote('', options)
.pipe(flatmap(function (stream) {
var packageJsonFilter = filter('package.json', { restore: true });
return stream
.pipe(vzip.src())
.pipe(filter('extension/**'))
.pipe(rename(function (p) { return p.dirname = p.dirname.replace(/^extension\/?/, ''); }))
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(json({ __metadata: metadata }))
.pipe(packageJsonFilter.restore);
}));
}));
}
exports.fromMarketplace = fromMarketplace;

View file

@ -3,7 +3,7 @@
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { through } from 'event-stream';
import * as es from 'event-stream';
import { Stream } from 'stream';
import assign = require('object-assign');
import remote = require('gulp-remote-src');
@ -14,9 +14,34 @@ const rename = require('gulp-rename');
const util = require('gulp-util');
const buffer = require('gulp-buffer');
const json = require('gulp-json-editor');
import * as fs from 'fs';
import * as path from 'path';
import * as vsce from 'vsce';
import * as File from 'vinyl';
export function fromLocal(extensionPath: string): Stream {
const result = es.through();
vsce.listFiles({ cwd: extensionPath })
.then(fileNames => {
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath) as any
}));
es.readArray(files).pipe(result);
})
.catch(err => result.emit('error', err));
return result;
}
function error(err: any): Stream {
const result = through();
const result = es.through();
setTimeout(() => result.emit('error', err));
return result;
}
@ -26,7 +51,7 @@ const baseHeaders = {
'User-Agent': 'VSCode Build',
};
export function src(extensionName: string, version: string): Stream {
export function fromMarketplace(extensionName: string, version: string): Stream {
const filterType = 7;
const value = extensionName;
const criterium = { filterType, value };
@ -61,7 +86,7 @@ export function src(extensionName: string, version: string): Stream {
const result = JSON.parse(rawResult);
const extension = result.results[0].extensions[0];
if (!extension) {
return error(`No such extension: ${ extension }`);
return error(`No such extension: ${extension}`);
}
const metadata = {
@ -72,15 +97,15 @@ export function src(extensionName: string, version: string): Stream {
const extensionVersion = extension.versions.filter(v => v.version === version)[0];
if (!extensionVersion) {
return error(`No such extension version: ${ extensionName } @ ${ version }`);
return error(`No such extension version: ${extensionName} @ ${version}`);
}
const asset = extensionVersion.files.filter(f => f.assetType === 'Microsoft.VisualStudio.Services.VSIXPackage')[0];
if (!asset) {
return error(`No VSIX found for extension version: ${ extensionName } @ ${ version }`);
return error(`No VSIX found for extension version: ${extensionName} @ ${version}`);
}
util.log('Downloading extension:', util.colors.yellow(`${ extensionName }@${ version }`), '...');
util.log('Downloading extension:', util.colors.yellow(`${extensionName}@${version}`), '...');
const options = {
base: asset.source,

View file

@ -1,51 +1,53 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
"use strict";
var path = require("path");
var fs = require("fs");
/**
* Returns the sha1 commit version of a repository or undefined in case of failure.
*/
function getVersion(repo) {
var git = path.join(repo, '.git');
var headPath = path.join(git, 'HEAD');
var head;
try {
head = fs.readFileSync(headPath, 'utf8').trim();
}
catch (e) {
return void 0;
}
if (/^[0-9a-f]{40}$/i.test(head)) {
return head;
}
var refMatch = /^ref: (.*)$/.exec(head);
if (!refMatch) {
return void 0;
}
var ref = refMatch[1];
var refPath = path.join(git, ref);
try {
return fs.readFileSync(refPath, 'utf8').trim();
}
catch (e) {
}
var packedRefsPath = path.join(git, 'packed-refs');
var refsRaw;
try {
refsRaw = fs.readFileSync(packedRefsPath, 'utf8').trim();
}
catch (e) {
return void 0;
}
var refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
var refsMatch;
var refs = {};
while (refsMatch = refsRegex.exec(refsRaw)) {
refs[refsMatch[2]] = refsMatch[1];
}
return refs[ref];
}
exports.getVersion = getVersion;
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
var path = require("path");
var fs = require("fs");
/**
* Returns the sha1 commit version of a repository or undefined in case of failure.
*/
function getVersion(repo) {
var git = path.join(repo, '.git');
var headPath = path.join(git, 'HEAD');
var head;
try {
head = fs.readFileSync(headPath, 'utf8').trim();
}
catch (e) {
return void 0;
}
if (/^[0-9a-f]{40}$/i.test(head)) {
return head;
}
var refMatch = /^ref: (.*)$/.exec(head);
if (!refMatch) {
return void 0;
}
var ref = refMatch[1];
var refPath = path.join(git, ref);
try {
return fs.readFileSync(refPath, 'utf8').trim();
}
catch (e) {
// noop
}
var packedRefsPath = path.join(git, 'packed-refs');
var refsRaw;
try {
refsRaw = fs.readFileSync(packedRefsPath, 'utf8').trim();
}
catch (e) {
return void 0;
}
var refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
var refsMatch;
var refs = {};
while (refsMatch = refsRegex.exec(refsRaw)) {
refs[refsMatch[2]] = refsMatch[1];
}
return refs[ref];
}
exports.getVersion = getVersion;

View file

@ -2,7 +2,7 @@
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
"use strict";
'use strict';
import * as path from 'path';
import * as fs from 'fs';
@ -10,7 +10,7 @@ import * as fs from 'fs';
/**
* Returns the sha1 commit version of a repository or undefined in case of failure.
*/
export function getVersion(repo:string): string {
export function getVersion(repo: string): string {
const git = path.join(repo, '.git');
const headPath = path.join(git, 'HEAD');
let head: string;
@ -41,7 +41,7 @@ export function getVersion(repo:string): string {
}
const packedRefsPath = path.join(git, 'packed-refs');
let refsRaw:string;
let refsRaw: string;
try {
refsRaw = fs.readFileSync(packedRefsPath, 'utf8').trim();
@ -51,7 +51,7 @@ export function getVersion(repo:string): string {
const refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
let refsMatch: RegExpExecArray;
let refs:{[ref:string]:string} = {};
let refs: { [ref: string]: string } = {};
while (refsMatch = refsRegex.exec(refsRaw)) {
refs[refsMatch[2]] = refsMatch[1];

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,198 @@
{
"editor": [
{
"name": "vs/platform",
"project": "vscode-editor"
},
{
"name": "vs/editor/contrib",
"project": "vscode-editor"
},
{
"name": "vs/editor",
"project": "vscode-editor"
},
{
"name": "vs/base",
"project": "vscode-editor"
}
],
"workbench": [
{
"name": "vs/code",
"project": "vscode-workbench"
},
{
"name": "vs/workbench",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/cli",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/codeEditor",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/debug",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/emmet",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/execution",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/explorers",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/extensions",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/feedback",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/files",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/html",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/markers",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/nps",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/output",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/performance",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/preferences",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/quickopen",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/relauncher",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/scm",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/search",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/snippets",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/surveys",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/tasks",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/terminal",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/themes",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/trust",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/update",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/views",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/watermark",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/welcome",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/configuration",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/crashReporter",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/editor",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/extensions",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/files",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/keybinding",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/message",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/mode",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/progress",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/textfile",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/themes",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/textMate",
"project": "vscode-workbench"
},
{
"name": "setup_messages",
"project": "vscode-workbench"
}
]
}

View file

@ -6,12 +6,16 @@
import * as path from 'path';
import * as fs from 'fs';
import { through } from 'event-stream';
import { through, readable } from 'event-stream';
import { ThroughStream } from 'through';
import File = require('vinyl');
import * as Is from 'is';
import * as xml2js from 'xml2js';
import * as glob from 'glob';
import * as https from 'https';
var util = require('gulp-util');
var iconv = require('iconv-lite');
function log(message: any, ...rest: any[]): void {
util.log(util.colors.green('[i18n]'), message, ...rest);
@ -21,6 +25,23 @@ interface Map<V> {
[key: string]: V;
}
interface Item {
id: string;
message: string;
comment: string;
}
export interface Resource {
name: string;
project: string;
}
interface ParsedXLF {
messages: Map<string>;
originalFilePath: string;
language: string;
}
interface LocalizeInfo {
key: string;
comment: string[];
@ -52,17 +73,204 @@ module BundledFormat {
}
}
const vscodeLanguages: string[] = [
'chs',
'cht',
'jpn',
'kor',
'deu',
'fra',
'esn',
'rus',
'ita'
];
interface ValueFormat {
message: string;
comment: string[];
}
interface PackageJsonFormat {
[key: string]: string | ValueFormat;
}
module PackageJsonFormat {
export function is(value: any): value is PackageJsonFormat {
if (Is.undef(value) || !Is.object(value)) {
return false;
}
return Object.keys(value).every(key => {
let element = value[key];
return Is.string(element) || (Is.object(element) && Is.defined(element.message) && Is.defined(element.comment));
});
}
}
interface ModuleJsonFormat {
messages: string[];
keys: (string | LocalizeInfo)[];
}
module ModuleJsonFormat {
export function is(value: any): value is ModuleJsonFormat {
let candidate = value as ModuleJsonFormat;
return Is.defined(candidate)
&& Is.array(candidate.messages) && candidate.messages.every(message => Is.string(message))
&& Is.array(candidate.keys) && candidate.keys.every(key => Is.string(key) || LocalizeInfo.is(key));
}
}
export class Line {
private buffer: string[] = [];
constructor(private indent: number = 0) {
if (indent > 0) {
this.buffer.push(new Array(indent + 1).join(' '));
}
}
public append(value: string): Line {
this.buffer.push(value);
return this;
}
public toString(): string {
return this.buffer.join('');
}
}
class TextModel {
private _lines: string[];
constructor(contents: string) {
this._lines = contents.split(/\r\n|\r|\n/);
}
public get lines(): string[] {
return this._lines;
}
}
export class XLF {
private buffer: string[];
private files: Map<Item[]>;
constructor(public project: string) {
this.buffer = [];
this.files = Object.create(null);
}
public toString(): string {
this.appendHeader();
for (let file in this.files) {
this.appendNewLine(`<file original="${file}" source-language="en" datatype="plaintext"><body>`, 2);
for (let item of this.files[file]) {
this.addStringItem(item);
}
this.appendNewLine('</body></file>', 2);
}
this.appendFooter();
return this.buffer.join('\r\n');
}
public addFile(original: string, keys: any[], messages: string[]) {
this.files[original] = [];
let existingKeys = [];
for (let key of keys) {
// Ignore duplicate keys because Transifex does not populate those with translated values.
if (existingKeys.indexOf(key) !== -1) {
continue;
}
existingKeys.push(key);
let message: string = encodeEntities(messages[keys.indexOf(key)]);
let comment: string = undefined;
// Check if the message contains description (if so, it becomes an object type in JSON)
if (Is.string(key)) {
this.files[original].push({ id: key, message: message, comment: comment });
} else {
if (key['comment'] && key['comment'].length > 0) {
comment = key['comment'].map(comment => encodeEntities(comment)).join('\r\n');
}
this.files[original].push({ id: key['key'], message: message, comment: comment });
}
}
}
private addStringItem(item: Item): void {
if (!item.id || !item.message) {
throw new Error('No item ID or value specified.');
}
this.appendNewLine(`<trans-unit id="${item.id}">`, 4);
this.appendNewLine(`<source xml:lang="en">${item.message}</source>`, 6);
if (item.comment) {
this.appendNewLine(`<note>${item.comment}</note>`, 6);
}
this.appendNewLine('</trans-unit>', 4);
}
private appendHeader(): void {
this.appendNewLine('<?xml version="1.0" encoding="utf-8"?>', 0);
this.appendNewLine('<xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2">', 0);
}
private appendFooter(): void {
this.appendNewLine('</xliff>', 0);
}
private appendNewLine(content: string, indent?: number): void {
let line = new Line(indent);
line.append(content);
this.buffer.push(line.toString());
}
static parse = function (xlfString: string): Promise<ParsedXLF[]> {
return new Promise((resolve, reject) => {
let parser = new xml2js.Parser();
let files: { messages: Map<string>, originalFilePath: string, language: string }[] = [];
parser.parseString(xlfString, function (err, result) {
if (err) {
reject(`Failed to parse XLIFF string. ${err}`);
}
const fileNodes: any[] = result['xliff']['file'];
if (!fileNodes) {
reject('XLIFF file does not contain "xliff" or "file" node(s) required for parsing.');
}
fileNodes.forEach((file) => {
const originalFilePath = file.$.original;
if (!originalFilePath) {
reject('XLIFF file node does not contain original attribute to determine the original location of the resource file.');
}
const language = file.$['target-language'].toLowerCase();
if (!language) {
reject('XLIFF file node does not contain target-language attribute to determine translated language.');
}
let messages: Map<string> = {};
const transUnits = file.body[0]['trans-unit'];
transUnits.forEach(unit => {
const key = unit.$.id;
if (!unit.target) {
return; // No translation available
}
const val = unit.target.toString();
if (key && val) {
messages[key] = decodeEntities(val);
} else {
reject('XLIFF file does not contain full localization data. ID or target translation for one of the trans-unit nodes is not present.');
}
});
files.push({ messages: messages, originalFilePath: originalFilePath, language: language });
});
resolve(files);
});
});
};
}
const iso639_3_to_2: Map<string> = {
'chs': 'zh-cn',
@ -85,6 +293,30 @@ const iso639_3_to_2: Map<string> = {
'trk': 'tr'
};
/**
* Used to map Transifex to VS Code language code representation.
*/
const iso639_2_to_3: Map<string> = {
'zh-hans': 'chs',
'zh-hant': 'cht',
'cs-cz': 'csy',
'de': 'deu',
'en': 'enu',
'es': 'esn',
'fr': 'fra',
'hu': 'hun',
'it': 'ita',
'ja': 'jpn',
'ko': 'kor',
'nl': 'nld',
'pl': 'plk',
'pt-br': 'ptb',
'pt': 'ptg',
'ru': 'rus',
'sv-se': 'sve',
'tr': 'trk'
};
interface IDirectoryInfo {
name: string;
iso639_2: string;
@ -128,7 +360,7 @@ function stripComments(content: string): string {
// A line comment. If it ends in \r?\n then keep it.
let length = m4.length;
if (length > 2 && m4[length - 1] === '\n') {
return m4[length - 2] === '\r' ? '\r\n': '\n';
return m4[length - 2] === '\r' ? '\r\n' : '\n';
} else {
return '';
}
@ -138,13 +370,13 @@ function stripComments(content: string): string {
}
});
return result;
};
}
function escapeCharacters(value:string):string {
var result:string[] = [];
function escapeCharacters(value: string): string {
var result: string[] = [];
for (var i = 0; i < value.length; i++) {
var ch = value.charAt(i);
switch(ch) {
switch (ch) {
case '\'':
result.push('\\\'');
break;
@ -176,7 +408,7 @@ function escapeCharacters(value:string):string {
return result.join('');
}
function processCoreBundleFormat(fileHeader:string, json: BundledFormat, emitter: any) {
function processCoreBundleFormat(fileHeader: string, languages: string[], json: BundledFormat, emitter: any) {
let keysSection = json.keys;
let messageSection = json.messages;
let bundleSection = json.bundles;
@ -206,8 +438,13 @@ function processCoreBundleFormat(fileHeader:string, json: BundledFormat, emitter
});
let languageDirectory = path.join(__dirname, '..', '..', 'i18n');
let languages = sortLanguages(fs.readdirSync(languageDirectory).filter((item) => fs.statSync(path.join(languageDirectory, item)).isDirectory()));
languages.forEach((language) => {
let languageDirs;
if (languages) {
languageDirs = sortLanguages(languages);
} else {
languageDirs = sortLanguages(fs.readdirSync(languageDirectory).filter((item) => fs.statSync(path.join(languageDirectory, item)).isDirectory()));
}
languageDirs.forEach((language) => {
if (!language.iso639_2) {
return;
}
@ -267,19 +504,20 @@ function processCoreBundleFormat(fileHeader:string, json: BundledFormat, emitter
return;
}
messages.forEach((message, index) => {
contents.push(`\t\t"${escapeCharacters(message)}${index < messages.length ? '",': '"'}`);
contents.push(`\t\t"${escapeCharacters(message)}${index < messages.length ? '",' : '"'}`);
});
contents.push(index < modules.length - 1 ? '\t],' : '\t]');
});
contents.push('});');
emitter.emit('data', new File( { path: bundle + '.nls.' + language.iso639_2 + '.js', contents: new Buffer(contents.join('\n'), 'utf-8') }));
emitter.emit('data', new File({ path: bundle + '.nls.' + language.iso639_2 + '.js', contents: new Buffer(contents.join('\n'), 'utf-8') }));
});
});
Object.keys(statistics).forEach(key => {
let value = statistics[key];
log(`${key} has ${value} untranslated strings.`);
});
vscodeLanguages.forEach(language => {
languageDirs.forEach(dir => {
const language = dir.name;
let iso639_2 = iso639_3_to_2[language];
if (!iso639_2) {
log(`\tCouldn't find iso639 2 mapping for language ${language}. Using default language instead.`);
@ -292,8 +530,8 @@ function processCoreBundleFormat(fileHeader:string, json: BundledFormat, emitter
});
}
export function processNlsFiles(opts:{fileHeader:string;}): ThroughStream {
return through(function(file: File) {
export function processNlsFiles(opts: { fileHeader: string; languages: string[] }): ThroughStream {
return through(function (file: File) {
let fileName = path.basename(file.path);
if (fileName === 'nls.metadata.json') {
let json = null;
@ -303,9 +541,576 @@ export function processNlsFiles(opts:{fileHeader:string;}): ThroughStream {
this.emit('error', `Failed to read component file: ${file.relative}`);
}
if (BundledFormat.is(json)) {
processCoreBundleFormat(opts.fileHeader, json, this);
processCoreBundleFormat(opts.fileHeader, opts.languages, json, this);
}
}
this.emit('data', file);
});
}
export function prepareXlfFiles(projectName?: string, extensionName?: string): ThroughStream {
return through(
function (file: File) {
if (!file.isBuffer()) {
throw new Error(`Failed to read component file: ${file.relative}`);
}
const extension = path.extname(file.path);
if (extension === '.json') {
const json = JSON.parse((<Buffer>file.contents).toString('utf8'));
if (BundledFormat.is(json)) {
importBundleJson(file, json, this);
} else if (PackageJsonFormat.is(json) || ModuleJsonFormat.is(json)) {
importModuleOrPackageJson(file, json, projectName, this, extensionName);
} else {
throw new Error(`JSON format cannot be deduced for ${file.relative}.`);
}
} else if (extension === '.isl') {
importIsl(file, this);
}
}
);
}
const editorProject: string = 'vscode-editor',
workbenchProject: string = 'vscode-workbench',
extensionsProject: string = 'vscode-extensions',
setupProject: string = 'vscode-setup';
export function getResource(sourceFile: string): Resource {
let resource: string;
if (/^vs\/platform/.test(sourceFile)) {
return { name: 'vs/platform', project: editorProject };
} else if (/^vs\/editor\/contrib/.test(sourceFile)) {
return { name: 'vs/editor/contrib', project: editorProject };
} else if (/^vs\/editor/.test(sourceFile)) {
return { name: 'vs/editor', project: editorProject };
} else if (/^vs\/base/.test(sourceFile)) {
return { name: 'vs/base', project: editorProject };
} else if (/^vs\/code/.test(sourceFile)) {
return { name: 'vs/code', project: workbenchProject };
} else if (/^vs\/workbench\/parts/.test(sourceFile)) {
resource = sourceFile.split('/', 4).join('/');
return { name: resource, project: workbenchProject };
} else if (/^vs\/workbench\/services/.test(sourceFile)) {
resource = sourceFile.split('/', 4).join('/');
return { name: resource, project: workbenchProject };
} else if (/^vs\/workbench/.test(sourceFile)) {
return { name: 'vs/workbench', project: workbenchProject };
}
throw new Error(`Could not identify the XLF bundle for ${sourceFile}`);
}
function importBundleJson(file: File, json: BundledFormat, stream: ThroughStream): void {
let bundleXlfs: Map<XLF> = Object.create(null);
for (let source in json.keys) {
const projectResource = getResource(source);
const resource = projectResource.name;
const project = projectResource.project;
const keys = json.keys[source];
const messages = json.messages[source];
if (keys.length !== messages.length) {
throw new Error(`There is a mismatch between keys and messages in ${file.relative}`);
}
let xlf = bundleXlfs[resource] ? bundleXlfs[resource] : bundleXlfs[resource] = new XLF(project);
xlf.addFile('src/' + source, keys, messages);
}
for (let resource in bundleXlfs) {
const newFilePath = `${bundleXlfs[resource].project}/${resource.replace(/\//g, '_')}.xlf`;
const xlfFile = new File({ path: newFilePath, contents: new Buffer(bundleXlfs[resource].toString(), 'utf-8') });
stream.emit('data', xlfFile);
}
}
// Keeps existing XLF instances and a state of how many files were already processed for faster file emission
var extensions: Map<{ xlf: XLF, processed: number }> = Object.create(null);
function importModuleOrPackageJson(file: File, json: ModuleJsonFormat | PackageJsonFormat, projectName: string, stream: ThroughStream, extensionName?: string): void {
if (ModuleJsonFormat.is(json) && json['keys'].length !== json['messages'].length) {
throw new Error(`There is a mismatch between keys and messages in ${file.relative}`);
}
// Prepare the source path for <original/> attribute in XLF & extract messages from JSON
const formattedSourcePath = file.relative.replace(/\\/g, '/');
const messages = Object.keys(json).map((key) => json[key].toString());
// Stores the amount of localization files to be transformed to XLF before the emission
let localizationFilesCount,
originalFilePath;
// If preparing XLF for external extension, then use different glob pattern and source path
if (extensionName) {
localizationFilesCount = glob.sync('**/*.nls.json').length;
originalFilePath = `${formattedSourcePath.substr(0, formattedSourcePath.length - '.nls.json'.length)}`;
} else {
// Used for vscode/extensions folder
extensionName = formattedSourcePath.split('/')[0];
localizationFilesCount = glob.sync(`./extensions/${extensionName}/**/*.nls.json`).length;
originalFilePath = `extensions/${formattedSourcePath.substr(0, formattedSourcePath.length - '.nls.json'.length)}`;
}
let extension = extensions[extensionName] ?
extensions[extensionName] : extensions[extensionName] = { xlf: new XLF(projectName), processed: 0 };
// .nls.json can come with empty array of keys and messages, check for it
if (ModuleJsonFormat.is(json) && json.keys.length !== 0) {
extension.xlf.addFile(originalFilePath, json.keys, json.messages);
} else if (PackageJsonFormat.is(json) && Object.keys(json).length !== 0) {
extension.xlf.addFile(originalFilePath, Object.keys(json), messages);
}
// Check if XLF is populated with file nodes to emit it
if (++extensions[extensionName].processed === localizationFilesCount) {
const newFilePath = path.join(projectName, extensionName + '.xlf');
const xlfFile = new File({ path: newFilePath, contents: new Buffer(extension.xlf.toString(), 'utf-8') });
stream.emit('data', xlfFile);
}
}
function importIsl(file: File, stream: ThroughStream) {
let projectName: string,
resourceFile: string;
if (path.basename(file.path) === 'Default.isl') {
projectName = setupProject;
resourceFile = 'setup_default.xlf';
} else {
projectName = workbenchProject;
resourceFile = 'setup_messages.xlf';
}
let xlf = new XLF(projectName),
keys: string[] = [],
messages: string[] = [];
let model = new TextModel(file.contents.toString());
let inMessageSection = false;
model.lines.forEach(line => {
if (line.length === 0) {
return;
}
let firstChar = line.charAt(0);
switch (firstChar) {
case ';':
// Comment line;
return;
case '[':
inMessageSection = '[Messages]' === line || '[CustomMessages]' === line;
return;
}
if (!inMessageSection) {
return;
}
let sections: string[] = line.split('=');
if (sections.length !== 2) {
throw new Error(`Badly formatted message found: ${line}`);
} else {
let key = sections[0];
let value = sections[1];
if (key.length > 0 && value.length > 0) {
keys.push(key);
messages.push(value);
}
}
});
const originalPath = file.path.substring(file.cwd.length + 1, file.path.split('.')[0].length).replace(/\\/g, '/');
xlf.addFile(originalPath, keys, messages);
// Emit only upon all ISL files combined into single XLF instance
const newFilePath = path.join(projectName, resourceFile);
const xlfFile = new File({ path: newFilePath, contents: new Buffer(xlf.toString(), 'utf-8') });
stream.emit('data', xlfFile);
}
export function pushXlfFiles(apiHostname: string, username: string, password: string): ThroughStream {
let tryGetPromises = [];
let updateCreatePromises = [];
return through(function (file: File) {
const project = path.dirname(file.relative);
const fileName = path.basename(file.path);
const slug = fileName.substr(0, fileName.length - '.xlf'.length);
const credentials = `${username}:${password}`;
// Check if resource already exists, if not, then create it.
let promise = tryGetResource(project, slug, apiHostname, credentials);
tryGetPromises.push(promise);
promise.then(exists => {
if (exists) {
promise = updateResource(project, slug, file, apiHostname, credentials);
} else {
promise = createResource(project, slug, file, apiHostname, credentials);
}
updateCreatePromises.push(promise);
});
}, function () {
// End the pipe only after all the communication with Transifex API happened
Promise.all(tryGetPromises).then(() => {
Promise.all(updateCreatePromises).then(() => {
this.emit('end');
}).catch((reason) => { throw new Error(reason); });
}).catch((reason) => { throw new Error(reason); });
});
}
function tryGetResource(project: string, slug: string, apiHostname: string, credentials: string): Promise<boolean> {
return new Promise((resolve, reject) => {
const options = {
hostname: apiHostname,
path: `/api/2/project/${project}/resource/${slug}/?details`,
auth: credentials,
method: 'GET'
};
const request = https.request(options, (response) => {
if (response.statusCode === 404) {
resolve(false);
} else if (response.statusCode === 200) {
resolve(true);
} else {
reject(`Failed to query resource ${project}/${slug}. Response: ${response.statusCode} ${response.statusMessage}`);
}
});
request.on('error', (err) => {
reject(`Failed to get ${project}/${slug} on Transifex: ${err}`);
});
request.end();
});
}
function createResource(project: string, slug: string, xlfFile: File, apiHostname: string, credentials: any): Promise<any> {
return new Promise((resolve, reject) => {
const data = JSON.stringify({
'content': xlfFile.contents.toString(),
'name': slug,
'slug': slug,
'i18n_type': 'XLIFF'
});
const options = {
hostname: apiHostname,
path: `/api/2/project/${project}/resources`,
headers: {
'Content-Type': 'application/json',
'Content-Length': Buffer.byteLength(data)
},
auth: credentials,
method: 'POST'
};
let request = https.request(options, (res) => {
if (res.statusCode === 201) {
log(`Resource ${project}/${slug} successfully created on Transifex.`);
} else {
reject(`Something went wrong in the request creating ${slug} in ${project}. ${res.statusCode}`);
}
});
request.on('error', (err) => {
reject(`Failed to create ${project}/${slug} on Transifex: ${err}`);
});
request.write(data);
request.end();
});
}
/**
* The following link provides information about how Transifex handles updates of a resource file:
* https://dev.befoolish.co/tx-docs/public/projects/updating-content#what-happens-when-you-update-files
*/
function updateResource(project: string, slug: string, xlfFile: File, apiHostname: string, credentials: string): Promise<any> {
return new Promise((resolve, reject) => {
const data = JSON.stringify({ content: xlfFile.contents.toString() });
const options = {
hostname: apiHostname,
path: `/api/2/project/${project}/resource/${slug}/content`,
headers: {
'Content-Type': 'application/json',
'Content-Length': Buffer.byteLength(data)
},
auth: credentials,
method: 'PUT'
};
let request = https.request(options, (res) => {
if (res.statusCode === 200) {
res.setEncoding('utf8');
let responseBuffer: string = '';
res.on('data', function (chunk) {
responseBuffer += chunk;
});
res.on('end', () => {
const response = JSON.parse(responseBuffer);
log(`Resource ${project}/${slug} successfully updated on Transifex. Strings added: ${response.strings_added}, updated: ${response.strings_added}, deleted: ${response.strings_added}`);
resolve();
});
} else {
reject(`Something went wrong in the request updating ${slug} in ${project}. ${res.statusCode}`);
}
});
request.on('error', (err) => {
reject(`Failed to update ${project}/${slug} on Transifex: ${err}`);
});
request.write(data);
request.end();
});
}
function obtainProjectResources(projectName: string): Resource[] {
let resources: Resource[] = [];
if (projectName === editorProject) {
const json = fs.readFileSync('./build/lib/i18n.resources.json', 'utf8');
resources = JSON.parse(json).editor;
} else if (projectName === workbenchProject) {
const json = fs.readFileSync('./build/lib/i18n.resources.json', 'utf8');
resources = JSON.parse(json).workbench;
} else if (projectName === extensionsProject) {
let extensionsToLocalize: string[] = glob.sync('./extensions/**/*.nls.json').map(extension => extension.split('/')[2]);
let resourcesToPull: string[] = [];
extensionsToLocalize.forEach(extension => {
if (resourcesToPull.indexOf(extension) === -1) { // remove duplicate elements returned by glob
resourcesToPull.push(extension);
resources.push({ name: extension, project: projectName });
}
});
} else if (projectName === setupProject) {
resources.push({ name: 'setup_default', project: setupProject });
}
return resources;
}
export function pullXlfFiles(projectName: string, apiHostname: string, username: string, password: string, languages: string[], resources?: Resource[]): NodeJS.ReadableStream {
if (!resources) {
resources = obtainProjectResources(projectName);
}
if (!resources) {
throw new Error('Transifex projects and resources must be defined to be able to pull translations from Transifex.');
}
const credentials = `${username}:${password}`;
let expectedTranslationsCount = languages.length * resources.length;
let translationsRetrieved = 0, called = false;
return readable(function (count, callback) {
// Mark end of stream when all resources were retrieved
if (translationsRetrieved === expectedTranslationsCount) {
return this.emit('end');
}
if (!called) {
called = true;
const stream = this;
// Retrieve XLF files from main projects
languages.map(function (language) {
resources.map(function (resource) {
retrieveResource(language, resource, apiHostname, credentials).then((file: File) => {
stream.emit('data', file);
translationsRetrieved++;
}).catch(error => { throw new Error(error); });
});
});
}
callback();
});
}
function retrieveResource(language: string, resource: Resource, apiHostname, credentials): Promise<File> {
return new Promise<File>((resolve, reject) => {
const slug = resource.name.replace(/\//g, '_');
const project = resource.project;
const iso639 = language.toLowerCase();
const options = {
hostname: apiHostname,
path: `/api/2/project/${project}/resource/${slug}/translation/${iso639}?file&mode=onlyreviewed`,
auth: credentials,
method: 'GET'
};
let request = https.request(options, (res) => {
let xlfBuffer: Buffer[] = [];
res.on('data', (chunk: Buffer) => xlfBuffer.push(chunk));
res.on('end', () => {
if (res.statusCode === 200) {
resolve(new File({ contents: Buffer.concat(xlfBuffer), path: `${project}/${iso639_2_to_3[language]}/${slug}.xlf` }));
}
reject(`${slug} in ${project} returned no data. Response code: ${res.statusCode}.`);
});
});
request.on('error', (err) => {
reject(`Failed to query resource ${slug} with the following error: ${err}`);
});
request.end();
});
}
export function prepareJsonFiles(): ThroughStream {
let parsePromises: Promise<ParsedXLF[]>[] = [];
return through(function (xlf: File) {
let stream = this;
let parsePromise = XLF.parse(xlf.contents.toString());
parsePromises.push(parsePromise);
parsePromise.then(
function (resolvedFiles) {
resolvedFiles.forEach(file => {
let messages = file.messages, translatedFile;
// ISL file path always starts with 'build/'
if (/^build\//.test(file.originalFilePath)) {
const defaultLanguages = { 'zh-hans': true, 'zh-hant': true, 'ko': true };
if (path.basename(file.originalFilePath) === 'Default' && !defaultLanguages[file.language]) {
return;
}
translatedFile = createIslFile('..', file.originalFilePath, messages, iso639_2_to_3[file.language]);
} else {
translatedFile = createI18nFile(iso639_2_to_3[file.language], file.originalFilePath, messages);
}
stream.emit('data', translatedFile);
});
},
function (rejectReason) {
throw new Error(`XLF parsing error: ${rejectReason}`);
}
);
}, function () {
Promise.all(parsePromises)
.then(() => { this.emit('end'); })
.catch(reason => { throw new Error(reason); });
});
}
function createI18nFile(base: string, originalFilePath: string, messages: Map<string>): File {
let content = [
'/*---------------------------------------------------------------------------------------------',
' * Copyright (c) Microsoft Corporation. All rights reserved.',
' * Licensed under the MIT License. See License.txt in the project root for license information.',
' *--------------------------------------------------------------------------------------------*/',
'// Do not edit this file. It is machine generated.'
].join('\n') + '\n' + JSON.stringify(messages, null, '\t').replace(/\r\n/g, '\n');
return new File({
path: path.join(base, originalFilePath + '.i18n.json'),
contents: new Buffer(content, 'utf8')
});
}
const languageNames: Map<string> = {
'chs': 'Simplified Chinese',
'cht': 'Traditional Chinese',
'kor': 'Korean'
};
const languageIds: Map<string> = {
'chs': '$0804',
'cht': '$0404',
'kor': '$0412'
};
const encodings: Map<string> = {
'chs': 'CP936',
'cht': 'CP950',
'jpn': 'CP932',
'kor': 'CP949',
'deu': 'CP1252',
'fra': 'CP1252',
'esn': 'CP1252',
'rus': 'CP1251',
'ita': 'CP1252',
'ptb': 'CP1252',
'hun': 'CP1250',
'trk': 'CP1254'
};
function createIslFile(base: string, originalFilePath: string, messages: Map<string>, language: string): File {
let content: string[] = [];
let originalContent: TextModel;
if (path.basename(originalFilePath) === 'Default') {
originalContent = new TextModel(fs.readFileSync(originalFilePath + '.isl', 'utf8'));
} else {
originalContent = new TextModel(fs.readFileSync(originalFilePath + '.en.isl', 'utf8'));
}
originalContent.lines.forEach(line => {
if (line.length > 0) {
let firstChar = line.charAt(0);
if (firstChar === '[' || firstChar === ';') {
if (line === '; *** Inno Setup version 5.5.3+ English messages ***') {
content.push(`; *** Inno Setup version 5.5.3+ ${languageNames[language]} messages ***`);
} else {
content.push(line);
}
} else {
let sections: string[] = line.split('=');
let key = sections[0];
let translated = line;
if (key) {
if (key === 'LanguageName') {
translated = `${key}=${languageNames[language]}`;
} else if (key === 'LanguageID') {
translated = `${key}=${languageIds[language]}`;
} else if (key === 'LanguageCodePage') {
translated = `${key}=${encodings[language].substr(2)}`;
} else {
let translatedMessage = messages[key];
if (translatedMessage) {
translated = `${key}=${translatedMessage}`;
}
}
}
content.push(translated);
}
}
});
const tag = iso639_3_to_2[language];
const basename = path.basename(originalFilePath);
const filePath = `${path.join(base, path.dirname(originalFilePath), basename)}.${tag}.isl`;
return new File({
path: filePath,
contents: iconv.encode(new Buffer(content.join('\r\n'), 'utf8'), encodings[language])
});
}
function encodeEntities(value: string): string {
var result: string[] = [];
for (var i = 0; i < value.length; i++) {
var ch = value[i];
switch (ch) {
case '<':
result.push('&lt;');
break;
case '>':
result.push('&gt;');
break;
case '&':
result.push('&amp;');
break;
default:
result.push(ch);
}
}
return result.join('');
}
function decodeEntities(value: string): string {
return value.replace(/&lt;/g, '<').replace(/&gt;/g, '>').replace(/&amp;/g, '&');
}

View file

@ -1,355 +1,359 @@
"use strict";
var ts = require("./typescript/typescriptServices");
var lazy = require("lazy.js");
var event_stream_1 = require("event-stream");
var File = require("vinyl");
var sm = require("source-map");
var assign = require("object-assign");
var path = require("path");
var CollectStepResult;
(function (CollectStepResult) {
CollectStepResult[CollectStepResult["Yes"] = 0] = "Yes";
CollectStepResult[CollectStepResult["YesAndRecurse"] = 1] = "YesAndRecurse";
CollectStepResult[CollectStepResult["No"] = 2] = "No";
CollectStepResult[CollectStepResult["NoAndRecurse"] = 3] = "NoAndRecurse";
})(CollectStepResult || (CollectStepResult = {}));
function collect(node, fn) {
var result = [];
function loop(node) {
var stepResult = fn(node);
if (stepResult === CollectStepResult.Yes || stepResult === CollectStepResult.YesAndRecurse) {
result.push(node);
}
if (stepResult === CollectStepResult.YesAndRecurse || stepResult === CollectStepResult.NoAndRecurse) {
ts.forEachChild(node, loop);
}
}
loop(node);
return result;
}
function clone(object) {
var result = {};
for (var id in object) {
result[id] = object[id];
}
return result;
}
function template(lines) {
var indent = '', wrap = '';
if (lines.length > 1) {
indent = '\t';
wrap = '\n';
}
return "/*---------------------------------------------------------\n * Copyright (C) Microsoft Corporation. All rights reserved.\n *--------------------------------------------------------*/\ndefine([], [" + (wrap + lines.map(function (l) { return indent + l; }).join(',\n') + wrap) + "]);";
}
/**
* Returns a stream containing the patched JavaScript and source maps.
*/
function nls() {
var input = event_stream_1.through();
var output = input.pipe(event_stream_1.through(function (f) {
var _this = this;
if (!f.sourceMap) {
return this.emit('error', new Error("File " + f.relative + " does not have sourcemaps."));
}
var source = f.sourceMap.sources[0];
if (!source) {
return this.emit('error', new Error("File " + f.relative + " does not have a source in the source map."));
}
var root = f.sourceMap.sourceRoot;
if (root) {
source = path.join(root, source);
}
var typescript = f.sourceMap.sourcesContent[0];
if (!typescript) {
return this.emit('error', new Error("File " + f.relative + " does not have the original content in the source map."));
}
nls.patchFiles(f, typescript).forEach(function (f) { return _this.emit('data', f); });
}));
return event_stream_1.duplex(input, output);
}
function isImportNode(node) {
return node.kind === 212 /* ImportDeclaration */ || node.kind === 211 /* ImportEqualsDeclaration */;
}
(function (nls_1) {
function fileFrom(file, contents, path) {
if (path === void 0) { path = file.path; }
return new File({
contents: new Buffer(contents),
base: file.base,
cwd: file.cwd,
path: path
});
}
nls_1.fileFrom = fileFrom;
function mappedPositionFrom(source, lc) {
return { source: source, line: lc.line + 1, column: lc.character };
}
nls_1.mappedPositionFrom = mappedPositionFrom;
function lcFrom(position) {
return { line: position.line - 1, character: position.column };
}
nls_1.lcFrom = lcFrom;
var SingleFileServiceHost = (function () {
function SingleFileServiceHost(options, filename, contents) {
var _this = this;
this.options = options;
this.filename = filename;
this.getCompilationSettings = function () { return _this.options; };
this.getScriptFileNames = function () { return [_this.filename]; };
this.getScriptVersion = function () { return '1'; };
this.getScriptSnapshot = function (name) { return name === _this.filename ? _this.file : _this.lib; };
this.getCurrentDirectory = function () { return ''; };
this.getDefaultLibFileName = function () { return 'lib.d.ts'; };
this.file = ts.ScriptSnapshot.fromString(contents);
this.lib = ts.ScriptSnapshot.fromString('');
}
return SingleFileServiceHost;
}());
nls_1.SingleFileServiceHost = SingleFileServiceHost;
function isCallExpressionWithinTextSpanCollectStep(textSpan, node) {
if (!ts.textSpanContainsTextSpan({ start: node.pos, length: node.end - node.pos }, textSpan)) {
return CollectStepResult.No;
}
return node.kind === 160 /* CallExpression */ ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse;
}
function analyze(contents, options) {
if (options === void 0) { options = {}; }
var filename = 'file.ts';
var serviceHost = new SingleFileServiceHost(assign(clone(options), { noResolve: true }), filename, contents);
var service = ts.createLanguageService(serviceHost);
var sourceFile = service.getSourceFile(filename);
// all imports
var imports = lazy(collect(sourceFile, function (n) { return isImportNode(n) ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse; }));
// import nls = require('vs/nls');
var importEqualsDeclarations = imports
.filter(function (n) { return n.kind === 211 /* ImportEqualsDeclaration */; })
.map(function (n) { return n; })
.filter(function (d) { return d.moduleReference.kind === 222 /* ExternalModuleReference */; })
.filter(function (d) { return d.moduleReference.expression.getText() === '\'vs/nls\''; });
// import ... from 'vs/nls';
var importDeclarations = imports
.filter(function (n) { return n.kind === 212 /* ImportDeclaration */; })
.map(function (n) { return n; })
.filter(function (d) { return d.moduleSpecifier.kind === 8 /* StringLiteral */; })
.filter(function (d) { return d.moduleSpecifier.getText() === '\'vs/nls\''; })
.filter(function (d) { return !!d.importClause && !!d.importClause.namedBindings; });
var nlsExpressions = importEqualsDeclarations
.map(function (d) { return d.moduleReference.expression; })
.concat(importDeclarations.map(function (d) { return d.moduleSpecifier; }))
.map(function (d) { return ({
start: ts.getLineAndCharacterOfPosition(sourceFile, d.getStart()),
end: ts.getLineAndCharacterOfPosition(sourceFile, d.getEnd())
}); });
// `nls.localize(...)` calls
var nlsLocalizeCallExpressions = importDeclarations
.filter(function (d) { return d.importClause.namedBindings.kind === 214 /* NamespaceImport */; })
.map(function (d) { return d.importClause.namedBindings.name; })
.concat(importEqualsDeclarations.map(function (d) { return d.name; }))
.map(function (n) { return service.getReferencesAtPosition(filename, n.pos + 1); })
.flatten()
.filter(function (r) { return !r.isWriteAccess; })
.map(function (r) { return collect(sourceFile, function (n) { return isCallExpressionWithinTextSpanCollectStep(r.textSpan, n); }); })
.map(function (a) { return lazy(a).last(); })
.filter(function (n) { return !!n; })
.map(function (n) { return n; })
.filter(function (n) { return n.expression.kind === 158 /* PropertyAccessExpression */ && n.expression.name.getText() === 'localize'; });
// `localize` named imports
var allLocalizeImportDeclarations = importDeclarations
.filter(function (d) { return d.importClause.namedBindings.kind === 215 /* NamedImports */; })
.map(function (d) { return d.importClause.namedBindings.elements; })
.flatten();
// `localize` read-only references
var localizeReferences = allLocalizeImportDeclarations
.filter(function (d) { return d.name.getText() === 'localize'; })
.map(function (n) { return service.getReferencesAtPosition(filename, n.pos + 1); })
.flatten()
.filter(function (r) { return !r.isWriteAccess; });
// custom named `localize` read-only references
var namedLocalizeReferences = allLocalizeImportDeclarations
.filter(function (d) { return d.propertyName && d.propertyName.getText() === 'localize'; })
.map(function (n) { return service.getReferencesAtPosition(filename, n.name.pos + 1); })
.flatten()
.filter(function (r) { return !r.isWriteAccess; });
// find the deepest call expressions AST nodes that contain those references
var localizeCallExpressions = localizeReferences
.concat(namedLocalizeReferences)
.map(function (r) { return collect(sourceFile, function (n) { return isCallExpressionWithinTextSpanCollectStep(r.textSpan, n); }); })
.map(function (a) { return lazy(a).last(); })
.filter(function (n) { return !!n; })
.map(function (n) { return n; });
// collect everything
var localizeCalls = nlsLocalizeCallExpressions
.concat(localizeCallExpressions)
.map(function (e) { return e.arguments; })
.filter(function (a) { return a.length > 1; })
.sort(function (a, b) { return a[0].getStart() - b[0].getStart(); })
.map(function (a) { return ({
keySpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getEnd()) },
key: a[0].getText(),
valueSpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getEnd()) },
value: a[1].getText()
}); });
return {
localizeCalls: localizeCalls.toArray(),
nlsExpressions: nlsExpressions.toArray()
};
}
nls_1.analyze = analyze;
var TextModel = (function () {
function TextModel(contents) {
var regex = /\r\n|\r|\n/g;
var index = 0;
var match;
this.lines = [];
this.lineEndings = [];
while (match = regex.exec(contents)) {
this.lines.push(contents.substring(index, match.index));
this.lineEndings.push(match[0]);
index = regex.lastIndex;
}
if (contents.length > 0) {
this.lines.push(contents.substring(index, contents.length));
this.lineEndings.push('');
}
}
TextModel.prototype.get = function (index) {
return this.lines[index];
};
TextModel.prototype.set = function (index, line) {
this.lines[index] = line;
};
Object.defineProperty(TextModel.prototype, "lineCount", {
get: function () {
return this.lines.length;
},
enumerable: true,
configurable: true
});
/**
* Applies patch(es) to the model.
* Multiple patches must be ordered.
* Does not support patches spanning multiple lines.
*/
TextModel.prototype.apply = function (patch) {
var startLineNumber = patch.span.start.line;
var endLineNumber = patch.span.end.line;
var startLine = this.lines[startLineNumber] || '';
var endLine = this.lines[endLineNumber] || '';
this.lines[startLineNumber] = [
startLine.substring(0, patch.span.start.character),
patch.content,
endLine.substring(patch.span.end.character)
].join('');
for (var i = startLineNumber + 1; i <= endLineNumber; i++) {
this.lines[i] = '';
}
};
TextModel.prototype.toString = function () {
return lazy(this.lines).zip(this.lineEndings)
.flatten().toArray().join('');
};
return TextModel;
}());
nls_1.TextModel = TextModel;
function patchJavascript(patches, contents, moduleId) {
var model = new nls.TextModel(contents);
// patch the localize calls
lazy(patches).reverse().each(function (p) { return model.apply(p); });
// patch the 'vs/nls' imports
var firstLine = model.get(0);
var patchedFirstLine = firstLine.replace(/(['"])vs\/nls\1/g, "$1vs/nls!" + moduleId + "$1");
model.set(0, patchedFirstLine);
return model.toString();
}
nls_1.patchJavascript = patchJavascript;
function patchSourcemap(patches, rsm, smc) {
var smg = new sm.SourceMapGenerator({
file: rsm.file,
sourceRoot: rsm.sourceRoot
});
patches = patches.reverse();
var currentLine = -1;
var currentLineDiff = 0;
var source = null;
smc.eachMapping(function (m) {
var patch = patches[patches.length - 1];
var original = { line: m.originalLine, column: m.originalColumn };
var generated = { line: m.generatedLine, column: m.generatedColumn };
if (currentLine !== generated.line) {
currentLineDiff = 0;
}
currentLine = generated.line;
generated.column += currentLineDiff;
if (patch && m.generatedLine - 1 === patch.span.end.line && m.generatedColumn === patch.span.end.character) {
var originalLength = patch.span.end.character - patch.span.start.character;
var modifiedLength = patch.content.length;
var lengthDiff = modifiedLength - originalLength;
currentLineDiff += lengthDiff;
generated.column += lengthDiff;
patches.pop();
}
source = rsm.sourceRoot ? path.relative(rsm.sourceRoot, m.source) : m.source;
source = source.replace(/\\/g, '/');
smg.addMapping({ source: source, name: m.name, original: original, generated: generated });
}, null, sm.SourceMapConsumer.GENERATED_ORDER);
if (source) {
smg.setSourceContent(source, smc.sourceContentFor(source));
}
return JSON.parse(smg.toString());
}
nls_1.patchSourcemap = patchSourcemap;
function patch(moduleId, typescript, javascript, sourcemap) {
var _a = analyze(typescript), localizeCalls = _a.localizeCalls, nlsExpressions = _a.nlsExpressions;
if (localizeCalls.length === 0) {
return { javascript: javascript, sourcemap: sourcemap };
}
var nlsKeys = template(localizeCalls.map(function (lc) { return lc.key; }));
var nls = template(localizeCalls.map(function (lc) { return lc.value; }));
var smc = new sm.SourceMapConsumer(sourcemap);
var positionFrom = mappedPositionFrom.bind(null, sourcemap.sources[0]);
var i = 0;
// build patches
var patches = lazy(localizeCalls)
.map(function (lc) { return ([
{ range: lc.keySpan, content: '' + (i++) },
{ range: lc.valueSpan, content: 'null' }
]); })
.flatten()
.map(function (c) {
var start = lcFrom(smc.generatedPositionFor(positionFrom(c.range.start)));
var end = lcFrom(smc.generatedPositionFor(positionFrom(c.range.end)));
return { span: { start: start, end: end }, content: c.content };
})
.toArray();
javascript = patchJavascript(patches, javascript, moduleId);
// since imports are not within the sourcemap information,
// we must do this MacGyver style
if (nlsExpressions.length) {
javascript = javascript.replace(/^define\(.*$/m, function (line) {
return line.replace(/(['"])vs\/nls\1/g, "$1vs/nls!" + moduleId + "$1");
});
}
sourcemap = patchSourcemap(patches, sourcemap, smc);
return { javascript: javascript, sourcemap: sourcemap, nlsKeys: nlsKeys, nls: nls };
}
nls_1.patch = patch;
function patchFiles(javascriptFile, typescript) {
// hack?
var moduleId = javascriptFile.relative
.replace(/\.js$/, '')
.replace(/\\/g, '/');
var _a = patch(moduleId, typescript, javascriptFile.contents.toString(), javascriptFile.sourceMap), javascript = _a.javascript, sourcemap = _a.sourcemap, nlsKeys = _a.nlsKeys, nls = _a.nls;
var result = [fileFrom(javascriptFile, javascript)];
result[0].sourceMap = sourcemap;
if (nlsKeys) {
result.push(fileFrom(javascriptFile, nlsKeys, javascriptFile.path.replace(/\.js$/, '.nls.keys.js')));
}
if (nls) {
result.push(fileFrom(javascriptFile, nls, javascriptFile.path.replace(/\.js$/, '.nls.js')));
}
return result;
}
nls_1.patchFiles = patchFiles;
})(nls || (nls = {}));
module.exports = nls;
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var ts = require("typescript");
var lazy = require("lazy.js");
var event_stream_1 = require("event-stream");
var File = require("vinyl");
var sm = require("source-map");
var assign = require("object-assign");
var path = require("path");
var CollectStepResult;
(function (CollectStepResult) {
CollectStepResult[CollectStepResult["Yes"] = 0] = "Yes";
CollectStepResult[CollectStepResult["YesAndRecurse"] = 1] = "YesAndRecurse";
CollectStepResult[CollectStepResult["No"] = 2] = "No";
CollectStepResult[CollectStepResult["NoAndRecurse"] = 3] = "NoAndRecurse";
})(CollectStepResult || (CollectStepResult = {}));
function collect(node, fn) {
var result = [];
function loop(node) {
var stepResult = fn(node);
if (stepResult === CollectStepResult.Yes || stepResult === CollectStepResult.YesAndRecurse) {
result.push(node);
}
if (stepResult === CollectStepResult.YesAndRecurse || stepResult === CollectStepResult.NoAndRecurse) {
ts.forEachChild(node, loop);
}
}
loop(node);
return result;
}
function clone(object) {
var result = {};
for (var id in object) {
result[id] = object[id];
}
return result;
}
function template(lines) {
var indent = '', wrap = '';
if (lines.length > 1) {
indent = '\t';
wrap = '\n';
}
return "/*---------------------------------------------------------\n * Copyright (C) Microsoft Corporation. All rights reserved.\n *--------------------------------------------------------*/\ndefine([], [" + (wrap + lines.map(function (l) { return indent + l; }).join(',\n') + wrap) + "]);";
}
/**
* Returns a stream containing the patched JavaScript and source maps.
*/
function nls() {
var input = event_stream_1.through();
var output = input.pipe(event_stream_1.through(function (f) {
var _this = this;
if (!f.sourceMap) {
return this.emit('error', new Error("File " + f.relative + " does not have sourcemaps."));
}
var source = f.sourceMap.sources[0];
if (!source) {
return this.emit('error', new Error("File " + f.relative + " does not have a source in the source map."));
}
var root = f.sourceMap.sourceRoot;
if (root) {
source = path.join(root, source);
}
var typescript = f.sourceMap.sourcesContent[0];
if (!typescript) {
return this.emit('error', new Error("File " + f.relative + " does not have the original content in the source map."));
}
nls.patchFiles(f, typescript).forEach(function (f) { return _this.emit('data', f); });
}));
return event_stream_1.duplex(input, output);
}
function isImportNode(node) {
return node.kind === ts.SyntaxKind.ImportDeclaration || node.kind === ts.SyntaxKind.ImportEqualsDeclaration;
}
(function (nls_1) {
function fileFrom(file, contents, path) {
if (path === void 0) { path = file.path; }
return new File({
contents: new Buffer(contents),
base: file.base,
cwd: file.cwd,
path: path
});
}
nls_1.fileFrom = fileFrom;
function mappedPositionFrom(source, lc) {
return { source: source, line: lc.line + 1, column: lc.character };
}
nls_1.mappedPositionFrom = mappedPositionFrom;
function lcFrom(position) {
return { line: position.line - 1, character: position.column };
}
nls_1.lcFrom = lcFrom;
var SingleFileServiceHost = (function () {
function SingleFileServiceHost(options, filename, contents) {
var _this = this;
this.options = options;
this.filename = filename;
this.getCompilationSettings = function () { return _this.options; };
this.getScriptFileNames = function () { return [_this.filename]; };
this.getScriptVersion = function () { return '1'; };
this.getScriptSnapshot = function (name) { return name === _this.filename ? _this.file : _this.lib; };
this.getCurrentDirectory = function () { return ''; };
this.getDefaultLibFileName = function () { return 'lib.d.ts'; };
this.file = ts.ScriptSnapshot.fromString(contents);
this.lib = ts.ScriptSnapshot.fromString('');
}
return SingleFileServiceHost;
}());
nls_1.SingleFileServiceHost = SingleFileServiceHost;
function isCallExpressionWithinTextSpanCollectStep(textSpan, node) {
if (!ts.textSpanContainsTextSpan({ start: node.pos, length: node.end - node.pos }, textSpan)) {
return CollectStepResult.No;
}
return node.kind === ts.SyntaxKind.CallExpression ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse;
}
function analyze(contents, options) {
if (options === void 0) { options = {}; }
var filename = 'file.ts';
var serviceHost = new SingleFileServiceHost(assign(clone(options), { noResolve: true }), filename, contents);
var service = ts.createLanguageService(serviceHost);
var sourceFile = ts.createSourceFile(filename, contents, ts.ScriptTarget.ES5, true);
// all imports
var imports = lazy(collect(sourceFile, function (n) { return isImportNode(n) ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse; }));
// import nls = require('vs/nls');
var importEqualsDeclarations = imports
.filter(function (n) { return n.kind === ts.SyntaxKind.ImportEqualsDeclaration; })
.map(function (n) { return n; })
.filter(function (d) { return d.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference; })
.filter(function (d) { return d.moduleReference.expression.getText() === '\'vs/nls\''; });
// import ... from 'vs/nls';
var importDeclarations = imports
.filter(function (n) { return n.kind === ts.SyntaxKind.ImportDeclaration; })
.map(function (n) { return n; })
.filter(function (d) { return d.moduleSpecifier.kind === ts.SyntaxKind.StringLiteral; })
.filter(function (d) { return d.moduleSpecifier.getText() === '\'vs/nls\''; })
.filter(function (d) { return !!d.importClause && !!d.importClause.namedBindings; });
var nlsExpressions = importEqualsDeclarations
.map(function (d) { return d.moduleReference.expression; })
.concat(importDeclarations.map(function (d) { return d.moduleSpecifier; }))
.map(function (d) { return ({
start: ts.getLineAndCharacterOfPosition(sourceFile, d.getStart()),
end: ts.getLineAndCharacterOfPosition(sourceFile, d.getEnd())
}); });
// `nls.localize(...)` calls
var nlsLocalizeCallExpressions = importDeclarations
.filter(function (d) { return d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport; })
.map(function (d) { return d.importClause.namedBindings.name; })
.concat(importEqualsDeclarations.map(function (d) { return d.name; }))
.map(function (n) { return service.getReferencesAtPosition(filename, n.pos + 1); })
.flatten()
.filter(function (r) { return !r.isWriteAccess; })
.map(function (r) { return collect(sourceFile, function (n) { return isCallExpressionWithinTextSpanCollectStep(r.textSpan, n); }); })
.map(function (a) { return lazy(a).last(); })
.filter(function (n) { return !!n; })
.map(function (n) { return n; })
.filter(function (n) { return n.expression.kind === ts.SyntaxKind.PropertyAccessExpression && n.expression.name.getText() === 'localize'; });
// `localize` named imports
var allLocalizeImportDeclarations = importDeclarations
.filter(function (d) { return d.importClause.namedBindings.kind === ts.SyntaxKind.NamedImports; })
.map(function (d) { return d.importClause.namedBindings.elements; })
.flatten();
// `localize` read-only references
var localizeReferences = allLocalizeImportDeclarations
.filter(function (d) { return d.name.getText() === 'localize'; })
.map(function (n) { return service.getReferencesAtPosition(filename, n.pos + 1); })
.flatten()
.filter(function (r) { return !r.isWriteAccess; });
// custom named `localize` read-only references
var namedLocalizeReferences = allLocalizeImportDeclarations
.filter(function (d) { return d.propertyName && d.propertyName.getText() === 'localize'; })
.map(function (n) { return service.getReferencesAtPosition(filename, n.name.pos + 1); })
.flatten()
.filter(function (r) { return !r.isWriteAccess; });
// find the deepest call expressions AST nodes that contain those references
var localizeCallExpressions = localizeReferences
.concat(namedLocalizeReferences)
.map(function (r) { return collect(sourceFile, function (n) { return isCallExpressionWithinTextSpanCollectStep(r.textSpan, n); }); })
.map(function (a) { return lazy(a).last(); })
.filter(function (n) { return !!n; })
.map(function (n) { return n; });
// collect everything
var localizeCalls = nlsLocalizeCallExpressions
.concat(localizeCallExpressions)
.map(function (e) { return e.arguments; })
.filter(function (a) { return a.length > 1; })
.sort(function (a, b) { return a[0].getStart() - b[0].getStart(); })
.map(function (a) { return ({
keySpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getEnd()) },
key: a[0].getText(),
valueSpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getEnd()) },
value: a[1].getText()
}); });
return {
localizeCalls: localizeCalls.toArray(),
nlsExpressions: nlsExpressions.toArray()
};
}
nls_1.analyze = analyze;
var TextModel = (function () {
function TextModel(contents) {
var regex = /\r\n|\r|\n/g;
var index = 0;
var match;
this.lines = [];
this.lineEndings = [];
while (match = regex.exec(contents)) {
this.lines.push(contents.substring(index, match.index));
this.lineEndings.push(match[0]);
index = regex.lastIndex;
}
if (contents.length > 0) {
this.lines.push(contents.substring(index, contents.length));
this.lineEndings.push('');
}
}
TextModel.prototype.get = function (index) {
return this.lines[index];
};
TextModel.prototype.set = function (index, line) {
this.lines[index] = line;
};
Object.defineProperty(TextModel.prototype, "lineCount", {
get: function () {
return this.lines.length;
},
enumerable: true,
configurable: true
});
/**
* Applies patch(es) to the model.
* Multiple patches must be ordered.
* Does not support patches spanning multiple lines.
*/
TextModel.prototype.apply = function (patch) {
var startLineNumber = patch.span.start.line;
var endLineNumber = patch.span.end.line;
var startLine = this.lines[startLineNumber] || '';
var endLine = this.lines[endLineNumber] || '';
this.lines[startLineNumber] = [
startLine.substring(0, patch.span.start.character),
patch.content,
endLine.substring(patch.span.end.character)
].join('');
for (var i = startLineNumber + 1; i <= endLineNumber; i++) {
this.lines[i] = '';
}
};
TextModel.prototype.toString = function () {
return lazy(this.lines).zip(this.lineEndings)
.flatten().toArray().join('');
};
return TextModel;
}());
nls_1.TextModel = TextModel;
function patchJavascript(patches, contents, moduleId) {
var model = new nls.TextModel(contents);
// patch the localize calls
lazy(patches).reverse().each(function (p) { return model.apply(p); });
// patch the 'vs/nls' imports
var firstLine = model.get(0);
var patchedFirstLine = firstLine.replace(/(['"])vs\/nls\1/g, "$1vs/nls!" + moduleId + "$1");
model.set(0, patchedFirstLine);
return model.toString();
}
nls_1.patchJavascript = patchJavascript;
function patchSourcemap(patches, rsm, smc) {
var smg = new sm.SourceMapGenerator({
file: rsm.file,
sourceRoot: rsm.sourceRoot
});
patches = patches.reverse();
var currentLine = -1;
var currentLineDiff = 0;
var source = null;
smc.eachMapping(function (m) {
var patch = patches[patches.length - 1];
var original = { line: m.originalLine, column: m.originalColumn };
var generated = { line: m.generatedLine, column: m.generatedColumn };
if (currentLine !== generated.line) {
currentLineDiff = 0;
}
currentLine = generated.line;
generated.column += currentLineDiff;
if (patch && m.generatedLine - 1 === patch.span.end.line && m.generatedColumn === patch.span.end.character) {
var originalLength = patch.span.end.character - patch.span.start.character;
var modifiedLength = patch.content.length;
var lengthDiff = modifiedLength - originalLength;
currentLineDiff += lengthDiff;
generated.column += lengthDiff;
patches.pop();
}
source = rsm.sourceRoot ? path.relative(rsm.sourceRoot, m.source) : m.source;
source = source.replace(/\\/g, '/');
smg.addMapping({ source: source, name: m.name, original: original, generated: generated });
}, null, sm.SourceMapConsumer.GENERATED_ORDER);
if (source) {
smg.setSourceContent(source, smc.sourceContentFor(source));
}
return JSON.parse(smg.toString());
}
nls_1.patchSourcemap = patchSourcemap;
function patch(moduleId, typescript, javascript, sourcemap) {
var _a = analyze(typescript), localizeCalls = _a.localizeCalls, nlsExpressions = _a.nlsExpressions;
if (localizeCalls.length === 0) {
return { javascript: javascript, sourcemap: sourcemap };
}
var nlsKeys = template(localizeCalls.map(function (lc) { return lc.key; }));
var nls = template(localizeCalls.map(function (lc) { return lc.value; }));
var smc = new sm.SourceMapConsumer(sourcemap);
var positionFrom = mappedPositionFrom.bind(null, sourcemap.sources[0]);
var i = 0;
// build patches
var patches = lazy(localizeCalls)
.map(function (lc) { return ([
{ range: lc.keySpan, content: '' + (i++) },
{ range: lc.valueSpan, content: 'null' }
]); })
.flatten()
.map(function (c) {
var start = lcFrom(smc.generatedPositionFor(positionFrom(c.range.start)));
var end = lcFrom(smc.generatedPositionFor(positionFrom(c.range.end)));
return { span: { start: start, end: end }, content: c.content };
})
.toArray();
javascript = patchJavascript(patches, javascript, moduleId);
// since imports are not within the sourcemap information,
// we must do this MacGyver style
if (nlsExpressions.length) {
javascript = javascript.replace(/^define\(.*$/m, function (line) {
return line.replace(/(['"])vs\/nls\1/g, "$1vs/nls!" + moduleId + "$1");
});
}
sourcemap = patchSourcemap(patches, sourcemap, smc);
return { javascript: javascript, sourcemap: sourcemap, nlsKeys: nlsKeys, nls: nls };
}
nls_1.patch = patch;
function patchFiles(javascriptFile, typescript) {
// hack?
var moduleId = javascriptFile.relative
.replace(/\.js$/, '')
.replace(/\\/g, '/');
var _a = patch(moduleId, typescript, javascriptFile.contents.toString(), javascriptFile.sourceMap), javascript = _a.javascript, sourcemap = _a.sourcemap, nlsKeys = _a.nlsKeys, nls = _a.nls;
var result = [fileFrom(javascriptFile, javascript)];
result[0].sourceMap = sourcemap;
if (nlsKeys) {
result.push(fileFrom(javascriptFile, nlsKeys, javascriptFile.path.replace(/\.js$/, '.nls.keys.js')));
}
if (nls) {
result.push(fileFrom(javascriptFile, nls, javascriptFile.path.replace(/\.js$/, '.nls.js')));
}
return result;
}
nls_1.patchFiles = patchFiles;
})(nls || (nls = {}));
module.exports = nls;

View file

@ -1,7 +1,11 @@
import * as ts from './typescript/typescriptServices';
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import * as lazy from 'lazy.js';
import { duplex, through } from 'event-stream';
import { Stream } from 'stream';
import File = require('vinyl');
import * as sm from 'source-map';
import assign = require('object-assign');
@ -37,7 +41,7 @@ function collect(node: ts.Node, fn: (node: ts.Node) => CollectStepResult): ts.No
return result;
}
function clone<T>(object:T): T {
function clone<T>(object: T): T {
var result = <T>{};
for (var id in object) {
result[id] = object[id];
@ -56,7 +60,7 @@ function template(lines: string[]): string {
return `/*---------------------------------------------------------
* Copyright (C) Microsoft Corporation. All rights reserved.
*--------------------------------------------------------*/
define([], [${ wrap + lines.map(l => indent + l).join(',\n') + wrap }]);`;
define([], [${ wrap + lines.map(l => indent + l).join(',\n') + wrap}]);`;
}
/**
@ -66,12 +70,12 @@ function nls(): NodeJS.ReadWriteStream {
var input = through();
var output = input.pipe(through(function (f: FileSourceMap) {
if (!f.sourceMap) {
return this.emit('error', new Error(`File ${ f.relative } does not have sourcemaps.`));
return this.emit('error', new Error(`File ${f.relative} does not have sourcemaps.`));
}
let source = f.sourceMap.sources[0];
if (!source) {
return this.emit('error', new Error(`File ${ f.relative } does not have a source in the source map.`));
return this.emit('error', new Error(`File ${f.relative} does not have a source in the source map.`));
}
const root = f.sourceMap.sourceRoot;
@ -81,7 +85,7 @@ function nls(): NodeJS.ReadWriteStream {
const typescript = f.sourceMap.sourcesContent[0];
if (!typescript) {
return this.emit('error', new Error(`File ${ f.relative } does not have the original content in the source map.`));
return this.emit('error', new Error(`File ${f.relative} does not have the original content in the source map.`));
}
nls.patchFiles(f, typescript).forEach(f => this.emit('data', f));
@ -152,12 +156,12 @@ module nls {
this.lib = ts.ScriptSnapshot.fromString('');
}
getCompilationSettings = () => this.options;
getScriptFileNames = () => [this.filename];
getScriptVersion = () => '1';
getScriptSnapshot = (name: string) => name === this.filename ? this.file : this.lib;
getCurrentDirectory = () => '';
getDefaultLibFileName = () => 'lib.d.ts';
getCompilationSettings = () => this.options;
getScriptFileNames = () => [this.filename];
getScriptVersion = () => '1';
getScriptSnapshot = (name: string) => name === this.filename ? this.file : this.lib;
getCurrentDirectory = () => '';
getDefaultLibFileName = () => 'lib.d.ts';
}
function isCallExpressionWithinTextSpanCollectStep(textSpan: ts.TextSpan, node: ts.Node): CollectStepResult {
@ -172,7 +176,7 @@ module nls {
const filename = 'file.ts';
const serviceHost = new SingleFileServiceHost(assign(clone(options), { noResolve: true }), filename, contents);
const service = ts.createLanguageService(serviceHost);
const sourceFile = service.getSourceFile(filename);
const sourceFile = ts.createSourceFile(filename, contents, ts.ScriptTarget.ES5, true);
// all imports
const imports = lazy(collect(sourceFile, n => isImportNode(n) ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse));
@ -180,14 +184,14 @@ module nls {
// import nls = require('vs/nls');
const importEqualsDeclarations = imports
.filter(n => n.kind === ts.SyntaxKind.ImportEqualsDeclaration)
.map(n => <ts.ImportEqualsDeclaration> n)
.map(n => <ts.ImportEqualsDeclaration>n)
.filter(d => d.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference)
.filter(d => (<ts.ExternalModuleReference>d.moduleReference).expression.getText() === '\'vs/nls\'');
// import ... from 'vs/nls';
const importDeclarations = imports
.filter(n => n.kind === ts.SyntaxKind.ImportDeclaration)
.map(n => <ts.ImportDeclaration> n)
.map(n => <ts.ImportDeclaration>n)
.filter(d => d.moduleSpecifier.kind === ts.SyntaxKind.StringLiteral)
.filter(d => d.moduleSpecifier.getText() === '\'vs/nls\'')
.filter(d => !!d.importClause && !!d.importClause.namedBindings);
@ -203,7 +207,7 @@ module nls {
// `nls.localize(...)` calls
const nlsLocalizeCallExpressions = importDeclarations
.filter(d => d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport)
.map(d => (<ts.NamespaceImport> d.importClause.namedBindings).name)
.map(d => (<ts.NamespaceImport>d.importClause.namedBindings).name)
.concat(importEqualsDeclarations.map(d => d.name))
// find read-only references to `nls`
@ -215,15 +219,15 @@ module nls {
.map(r => collect(sourceFile, n => isCallExpressionWithinTextSpanCollectStep(r.textSpan, n)))
.map(a => lazy(a).last())
.filter(n => !!n)
.map(n => <ts.CallExpression> n)
.map(n => <ts.CallExpression>n)
// only `localize` calls
.filter(n => n.expression.kind === ts.SyntaxKind.PropertyAccessExpression && (<ts.PropertyAccessExpression> n.expression).name.getText() === 'localize');
.filter(n => n.expression.kind === ts.SyntaxKind.PropertyAccessExpression && (<ts.PropertyAccessExpression>n.expression).name.getText() === 'localize');
// `localize` named imports
const allLocalizeImportDeclarations = importDeclarations
.filter(d => d.importClause.namedBindings.kind === ts.SyntaxKind.NamedImports)
.map(d => (<ts.NamedImports> d.importClause.namedBindings).elements)
.map(d => [].concat((<ts.NamedImports>d.importClause.namedBindings).elements))
.flatten();
// `localize` read-only references
@ -246,7 +250,7 @@ module nls {
.map(r => collect(sourceFile, n => isCallExpressionWithinTextSpanCollectStep(r.textSpan, n)))
.map(a => lazy(a).last())
.filter(n => !!n)
.map(n => <ts.CallExpression> n);
.map(n => <ts.CallExpression>n);
// collect everything
const localizeCalls = nlsLocalizeCallExpressions
@ -341,7 +345,7 @@ module nls {
// patch the 'vs/nls' imports
const firstLine = model.get(0);
const patchedFirstLine = firstLine.replace(/(['"])vs\/nls\1/g, `$1vs/nls!${ moduleId }$1`);
const patchedFirstLine = firstLine.replace(/(['"])vs\/nls\1/g, `$1vs/nls!${moduleId}$1`);
model.set(0, patchedFirstLine);
return model.toString();
@ -382,7 +386,7 @@ module nls {
source = rsm.sourceRoot ? path.relative(rsm.sourceRoot, m.source) : m.source;
source = source.replace(/\\/g, '/');
smg.addMapping({ source, name: m.name, original, generated});
smg.addMapping({ source, name: m.name, original, generated });
}, null, sm.SourceMapConsumer.GENERATED_ORDER);
if (source) {
@ -425,7 +429,7 @@ module nls {
// we must do this MacGyver style
if (nlsExpressions.length) {
javascript = javascript.replace(/^define\(.*$/m, line => {
return line.replace(/(['"])vs\/nls\1/g, `$1vs/nls!${ moduleId }$1`);
return line.replace(/(['"])vs\/nls\1/g, `$1vs/nls!${moduleId}$1`);
});
}

View file

@ -1,229 +1,239 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
var path = require("path");
var gulp = require("gulp");
var sourcemaps = require("gulp-sourcemaps");
var filter = require("gulp-filter");
var minifyCSS = require("gulp-cssnano");
var uglify = require("gulp-uglify");
var es = require("event-stream");
var concat = require("gulp-concat");
var VinylFile = require("vinyl");
var bundle = require("./bundle");
var util = require("./util");
var i18n = require("./i18n");
var gulpUtil = require("gulp-util");
var flatmap = require("gulp-flatmap");
var pump = require("pump");
var REPO_ROOT_PATH = path.join(__dirname, '../..');
function log(prefix, message) {
gulpUtil.log(gulpUtil.colors.cyan('[' + prefix + ']'), message);
}
function loaderConfig(emptyPaths) {
var result = {
paths: {
'vs': 'out-build/vs',
'vscode': 'empty:'
},
nodeModules: emptyPaths || []
};
result['vs/css'] = { inlineResources: true };
return result;
}
exports.loaderConfig = loaderConfig;
var IS_OUR_COPYRIGHT_REGEXP = /Copyright \(C\) Microsoft Corporation/i;
function loader(bundledFileHeader, bundleLoader) {
var sources = [
'out-build/vs/loader.js'
];
if (bundleLoader) {
sources = sources.concat([
'out-build/vs/css.js',
'out-build/vs/nls.js'
]);
}
var isFirst = true;
return (gulp
.src(sources, { base: 'out-build' })
.pipe(es.through(function (data) {
if (isFirst) {
isFirst = false;
this.emit('data', new VinylFile({
path: 'fake',
base: '',
contents: new Buffer(bundledFileHeader)
}));
this.emit('data', data);
}
else {
this.emit('data', data);
}
}))
.pipe(util.loadSourcemaps())
.pipe(concat('vs/loader.js'))
.pipe(es.mapSync(function (f) {
f.sourceMap.sourceRoot = util.toFileUri(path.join(REPO_ROOT_PATH, 'src'));
return f;
})));
}
function toConcatStream(bundledFileHeader, sources, dest) {
var useSourcemaps = /\.js$/.test(dest) && !/\.nls\.js$/.test(dest);
// If a bundle ends up including in any of the sources our copyright, then
// insert a fake source at the beginning of each bundle with our copyright
var containsOurCopyright = false;
for (var i = 0, len = sources.length; i < len; i++) {
var fileContents = sources[i].contents;
if (IS_OUR_COPYRIGHT_REGEXP.test(fileContents)) {
containsOurCopyright = true;
break;
}
}
if (containsOurCopyright) {
sources.unshift({
path: null,
contents: bundledFileHeader
});
}
var treatedSources = sources.map(function (source) {
var root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
var base = source.path ? root + '/out-build' : '';
return new VinylFile({
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
base: base,
contents: new Buffer(source.contents)
});
});
return es.readArray(treatedSources)
.pipe(useSourcemaps ? util.loadSourcemaps() : es.through())
.pipe(concat(dest));
}
function toBundleStream(bundledFileHeader, bundles) {
return es.merge(bundles.map(function (bundle) {
return toConcatStream(bundledFileHeader, bundle.sources, bundle.dest);
}));
}
function optimizeTask(opts) {
var entryPoints = opts.entryPoints;
var otherSources = opts.otherSources;
var resources = opts.resources;
var loaderConfig = opts.loaderConfig;
var bundledFileHeader = opts.header;
var bundleLoader = (typeof opts.bundleLoader === 'undefined' ? true : opts.bundleLoader);
var out = opts.out;
return function () {
var bundlesStream = es.through(); // this stream will contain the bundled files
var resourcesStream = es.through(); // this stream will contain the resources
var bundleInfoStream = es.through(); // this stream will contain bundleInfo.json
bundle.bundle(entryPoints, loaderConfig, function (err, result) {
if (err) {
return bundlesStream.emit('error', JSON.stringify(err));
}
toBundleStream(bundledFileHeader, result.files).pipe(bundlesStream);
// Remove css inlined resources
var filteredResources = resources.slice();
result.cssInlinedResources.forEach(function (resource) {
if (process.env['VSCODE_BUILD_VERBOSE']) {
log('optimizer', 'excluding inlined: ' + resource);
}
filteredResources.push('!' + resource);
});
gulp.src(filteredResources, { base: 'out-build' }).pipe(resourcesStream);
var bundleInfoArray = [];
if (opts.bundleInfo) {
bundleInfoArray.push(new VinylFile({
path: 'bundleInfo.json',
base: '.',
contents: new Buffer(JSON.stringify(result.bundleData, null, '\t'))
}));
}
es.readArray(bundleInfoArray).pipe(bundleInfoStream);
});
var otherSourcesStream = es.through();
var otherSourcesStreamArr = [];
gulp.src(otherSources, { base: 'out-build' })
.pipe(es.through(function (data) {
otherSourcesStreamArr.push(toConcatStream(bundledFileHeader, [data], data.relative));
}, function () {
if (!otherSourcesStreamArr.length) {
setTimeout(function () { otherSourcesStream.emit('end'); }, 0);
}
else {
es.merge(otherSourcesStreamArr).pipe(otherSourcesStream);
}
}));
var result = es.merge(loader(bundledFileHeader, bundleLoader), bundlesStream, otherSourcesStream, resourcesStream, bundleInfoStream);
return result
.pipe(sourcemaps.write('./', {
sourceRoot: null,
addComment: true,
includeContent: true
}))
.pipe(i18n.processNlsFiles({
fileHeader: bundledFileHeader
}))
.pipe(gulp.dest(out));
};
}
exports.optimizeTask = optimizeTask;
;
/**
* Wrap around uglify and allow the preserveComments function
* to have a file "context" to include our copyright only once per file.
*/
function uglifyWithCopyrights() {
var preserveComments = function (f) {
return function (node, comment) {
var text = comment.value;
var type = comment.type;
if (/@minifier_do_not_preserve/.test(text)) {
return false;
}
var isOurCopyright = IS_OUR_COPYRIGHT_REGEXP.test(text);
if (isOurCopyright) {
if (f.__hasOurCopyright) {
return false;
}
f.__hasOurCopyright = true;
return true;
}
if ('comment2' === type) {
// check for /*!. Note that text doesn't contain leading /*
return (text.length > 0 && text[0] === '!') || /@preserve|license|@cc_on|copyright/i.test(text);
}
else if ('comment1' === type) {
return /license|copyright/i.test(text);
}
return false;
};
};
var input = es.through();
var output = input
.pipe(flatmap(function (stream, f) {
return stream
.pipe(uglify({ preserveComments: preserveComments(f) }));
}));
return es.duplex(input, output);
}
function minifyTask(src, sourceMapBaseUrl) {
var sourceMappingURL = sourceMapBaseUrl && (function (f) { return sourceMapBaseUrl + "/" + f.relative + ".map"; });
return function (cb) {
var jsFilter = filter('**/*.js', { restore: true });
var cssFilter = filter('**/*.css', { restore: true });
pump(gulp.src([src + '/**', '!' + src + '/**/*.map']), jsFilter, sourcemaps.init({ loadMaps: true }), uglifyWithCopyrights(), jsFilter.restore, cssFilter, minifyCSS({ reduceIdents: false }), cssFilter.restore, sourcemaps.write('./', {
sourceMappingURL: sourceMappingURL,
sourceRoot: null,
includeContent: true,
addComment: true
}), gulp.dest(src + '-min'), function (err) {
if (err instanceof uglify.GulpUglifyError) {
console.error("Uglify error in '" + (err.cause && err.cause.filename) + "'");
}
cb(err);
});
};
}
exports.minifyTask = minifyTask;
;
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
var path = require("path");
var gulp = require("gulp");
var sourcemaps = require("gulp-sourcemaps");
var filter = require("gulp-filter");
var minifyCSS = require("gulp-cssnano");
var uglify = require("gulp-uglify");
var composer = require("gulp-uglify/composer");
var uglifyes = require("uglify-es");
var es = require("event-stream");
var concat = require("gulp-concat");
var VinylFile = require("vinyl");
var bundle = require("./bundle");
var util = require("./util");
var i18n = require("./i18n");
var gulpUtil = require("gulp-util");
var flatmap = require("gulp-flatmap");
var pump = require("pump");
var REPO_ROOT_PATH = path.join(__dirname, '../..');
function log(prefix, message) {
gulpUtil.log(gulpUtil.colors.cyan('[' + prefix + ']'), message);
}
function loaderConfig(emptyPaths) {
var result = {
paths: {
'vs': 'out-build/vs',
'vscode': 'empty:'
},
nodeModules: emptyPaths || []
};
result['vs/css'] = { inlineResources: true };
return result;
}
exports.loaderConfig = loaderConfig;
var IS_OUR_COPYRIGHT_REGEXP = /Copyright \(C\) Microsoft Corporation/i;
function loader(bundledFileHeader, bundleLoader) {
var sources = [
'out-build/vs/loader.js'
];
if (bundleLoader) {
sources = sources.concat([
'out-build/vs/css.js',
'out-build/vs/nls.js'
]);
}
var isFirst = true;
return (gulp
.src(sources, { base: 'out-build' })
.pipe(es.through(function (data) {
if (isFirst) {
isFirst = false;
this.emit('data', new VinylFile({
path: 'fake',
base: '',
contents: new Buffer(bundledFileHeader)
}));
this.emit('data', data);
}
else {
this.emit('data', data);
}
}))
.pipe(util.loadSourcemaps())
.pipe(concat('vs/loader.js'))
.pipe(es.mapSync(function (f) {
f.sourceMap.sourceRoot = util.toFileUri(path.join(REPO_ROOT_PATH, 'src'));
return f;
})));
}
function toConcatStream(bundledFileHeader, sources, dest) {
var useSourcemaps = /\.js$/.test(dest) && !/\.nls\.js$/.test(dest);
// If a bundle ends up including in any of the sources our copyright, then
// insert a fake source at the beginning of each bundle with our copyright
var containsOurCopyright = false;
for (var i = 0, len = sources.length; i < len; i++) {
var fileContents = sources[i].contents;
if (IS_OUR_COPYRIGHT_REGEXP.test(fileContents)) {
containsOurCopyright = true;
break;
}
}
if (containsOurCopyright) {
sources.unshift({
path: null,
contents: bundledFileHeader
});
}
var treatedSources = sources.map(function (source) {
var root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
var base = source.path ? root + '/out-build' : '';
return new VinylFile({
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
base: base,
contents: new Buffer(source.contents)
});
});
return es.readArray(treatedSources)
.pipe(useSourcemaps ? util.loadSourcemaps() : es.through())
.pipe(concat(dest));
}
function toBundleStream(bundledFileHeader, bundles) {
return es.merge(bundles.map(function (bundle) {
return toConcatStream(bundledFileHeader, bundle.sources, bundle.dest);
}));
}
function optimizeTask(opts) {
var entryPoints = opts.entryPoints;
var otherSources = opts.otherSources;
var resources = opts.resources;
var loaderConfig = opts.loaderConfig;
var bundledFileHeader = opts.header;
var bundleLoader = (typeof opts.bundleLoader === 'undefined' ? true : opts.bundleLoader);
var out = opts.out;
return function () {
var bundlesStream = es.through(); // this stream will contain the bundled files
var resourcesStream = es.through(); // this stream will contain the resources
var bundleInfoStream = es.through(); // this stream will contain bundleInfo.json
bundle.bundle(entryPoints, loaderConfig, function (err, result) {
if (err) {
return bundlesStream.emit('error', JSON.stringify(err));
}
toBundleStream(bundledFileHeader, result.files).pipe(bundlesStream);
// Remove css inlined resources
var filteredResources = resources.slice();
result.cssInlinedResources.forEach(function (resource) {
if (process.env['VSCODE_BUILD_VERBOSE']) {
log('optimizer', 'excluding inlined: ' + resource);
}
filteredResources.push('!' + resource);
});
gulp.src(filteredResources, { base: 'out-build' }).pipe(resourcesStream);
var bundleInfoArray = [];
if (opts.bundleInfo) {
bundleInfoArray.push(new VinylFile({
path: 'bundleInfo.json',
base: '.',
contents: new Buffer(JSON.stringify(result.bundleData, null, '\t'))
}));
}
es.readArray(bundleInfoArray).pipe(bundleInfoStream);
});
var otherSourcesStream = es.through();
var otherSourcesStreamArr = [];
gulp.src(otherSources, { base: 'out-build' })
.pipe(es.through(function (data) {
otherSourcesStreamArr.push(toConcatStream(bundledFileHeader, [data], data.relative));
}, function () {
if (!otherSourcesStreamArr.length) {
setTimeout(function () { otherSourcesStream.emit('end'); }, 0);
}
else {
es.merge(otherSourcesStreamArr).pipe(otherSourcesStream);
}
}));
var result = es.merge(loader(bundledFileHeader, bundleLoader), bundlesStream, otherSourcesStream, resourcesStream, bundleInfoStream);
return result
.pipe(sourcemaps.write('./', {
sourceRoot: null,
addComment: true,
includeContent: true
}))
.pipe(i18n.processNlsFiles({
fileHeader: bundledFileHeader,
languages: opts.languages
}))
.pipe(gulp.dest(out));
};
}
exports.optimizeTask = optimizeTask;
;
/**
* Wrap around uglify and allow the preserveComments function
* to have a file "context" to include our copyright only once per file.
*/
function uglifyWithCopyrights() {
var preserveComments = function (f) {
return function (node, comment) {
var text = comment.value;
var type = comment.type;
if (/@minifier_do_not_preserve/.test(text)) {
return false;
}
var isOurCopyright = IS_OUR_COPYRIGHT_REGEXP.test(text);
if (isOurCopyright) {
if (f.__hasOurCopyright) {
return false;
}
f.__hasOurCopyright = true;
return true;
}
if ('comment2' === type) {
// check for /*!. Note that text doesn't contain leading /*
return (text.length > 0 && text[0] === '!') || /@preserve|license|@cc_on|copyright/i.test(text);
}
else if ('comment1' === type) {
return /license|copyright/i.test(text);
}
return false;
};
};
var minify = composer(uglifyes);
var input = es.through();
var output = input
.pipe(flatmap(function (stream, f) {
return stream.pipe(minify({
output: {
comments: preserveComments(f),
// linux tfs build agent is crashing, does this help?§
max_line_len: 3200000
}
}));
}));
return es.duplex(input, output);
}
function minifyTask(src, sourceMapBaseUrl) {
var sourceMappingURL = sourceMapBaseUrl && (function (f) { return sourceMapBaseUrl + "/" + f.relative + ".map"; });
return function (cb) {
var jsFilter = filter('**/*.js', { restore: true });
var cssFilter = filter('**/*.css', { restore: true });
pump(gulp.src([src + '/**', '!' + src + '/**/*.map']), jsFilter, sourcemaps.init({ loadMaps: true }), uglifyWithCopyrights(), jsFilter.restore, cssFilter, minifyCSS({ reduceIdents: false }), cssFilter.restore, sourcemaps.write('./', {
sourceMappingURL: sourceMappingURL,
sourceRoot: null,
includeContent: true,
addComment: true
}), gulp.dest(src + '-min'), function (err) {
if (err instanceof uglify.GulpUglifyError) {
console.error("Uglify error in '" + (err.cause && err.cause.filename) + "'");
}
cb(err);
});
};
}
exports.minifyTask = minifyTask;
;

View file

@ -11,6 +11,8 @@ import * as sourcemaps from 'gulp-sourcemaps';
import * as filter from 'gulp-filter';
import * as minifyCSS from 'gulp-cssnano';
import * as uglify from 'gulp-uglify';
import * as composer from 'gulp-uglify/composer';
import * as uglifyes from 'uglify-es';
import * as es from 'event-stream';
import * as concat from 'gulp-concat';
import * as VinylFile from 'vinyl';
@ -24,17 +26,17 @@ import * as sm from 'source-map';
const REPO_ROOT_PATH = path.join(__dirname, '../..');
function log(prefix:string, message:string): void {
function log(prefix: string, message: string): void {
gulpUtil.log(gulpUtil.colors.cyan('[' + prefix + ']'), message);
}
export function loaderConfig(emptyPaths:string[]) {
export function loaderConfig(emptyPaths: string[]) {
const result = {
paths: {
'vs': 'out-build/vs',
'vscode': 'empty:'
},
nodeModules: emptyPaths||[]
nodeModules: emptyPaths || []
};
result['vs/css'] = { inlineResources: true };
@ -48,7 +50,7 @@ declare class FileSourceMap extends VinylFile {
public sourceMap: sm.RawSourceMap;
}
function loader(bundledFileHeader:string, bundleLoader:boolean): NodeJS.ReadWriteStream {
function loader(bundledFileHeader: string, bundleLoader: boolean): NodeJS.ReadWriteStream {
let sources = [
'out-build/vs/loader.js'
];
@ -62,30 +64,30 @@ function loader(bundledFileHeader:string, bundleLoader:boolean): NodeJS.ReadWrit
let isFirst = true;
return (
gulp
.src(sources, { base: 'out-build' })
.pipe(es.through(function(data) {
if (isFirst) {
isFirst = false;
this.emit('data', new VinylFile({
path: 'fake',
base: '',
contents: new Buffer(bundledFileHeader)
}));
this.emit('data', data);
} else {
this.emit('data', data);
}
}))
.pipe(util.loadSourcemaps())
.pipe(concat('vs/loader.js'))
.pipe(es.mapSync<FileSourceMap,FileSourceMap>(function (f) {
f.sourceMap.sourceRoot = util.toFileUri(path.join(REPO_ROOT_PATH, 'src'));
return f;
}))
.src(sources, { base: 'out-build' })
.pipe(es.through(function (data) {
if (isFirst) {
isFirst = false;
this.emit('data', new VinylFile({
path: 'fake',
base: '',
contents: new Buffer(bundledFileHeader)
}));
this.emit('data', data);
} else {
this.emit('data', data);
}
}))
.pipe(util.loadSourcemaps())
.pipe(concat('vs/loader.js'))
.pipe(es.mapSync<FileSourceMap, FileSourceMap>(function (f) {
f.sourceMap.sourceRoot = util.toFileUri(path.join(REPO_ROOT_PATH, 'src'));
return f;
}))
);
}
function toConcatStream(bundledFileHeader:string, sources:bundle.IFile[], dest:string): NodeJS.ReadWriteStream {
function toConcatStream(bundledFileHeader: string, sources: bundle.IFile[], dest: string): NodeJS.ReadWriteStream {
const useSourcemaps = /\.js$/.test(dest) && !/\.nls\.js$/.test(dest);
// If a bundle ends up including in any of the sources our copyright, then
@ -106,7 +108,7 @@ function toConcatStream(bundledFileHeader:string, sources:bundle.IFile[], dest:s
});
}
const treatedSources = sources.map(function(source) {
const treatedSources = sources.map(function (source) {
const root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
const base = source.path ? root + '/out-build' : '';
@ -122,8 +124,8 @@ function toConcatStream(bundledFileHeader:string, sources:bundle.IFile[], dest:s
.pipe(concat(dest));
}
function toBundleStream(bundledFileHeader:string, bundles:bundle.IConcatFile[]): NodeJS.ReadWriteStream {
return es.merge(bundles.map(function(bundle) {
function toBundleStream(bundledFileHeader: string, bundles: bundle.IConcatFile[]): NodeJS.ReadWriteStream {
return es.merge(bundles.map(function (bundle) {
return toConcatStream(bundledFileHeader, bundle.sources, bundle.dest);
}));
}
@ -158,8 +160,12 @@ export interface IOptimizeTaskOpts {
* (out folder name)
*/
out: string;
/**
* (languages to process)
*/
languages: string[];
}
export function optimizeTask(opts:IOptimizeTaskOpts):()=>NodeJS.ReadWriteStream {
export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStream {
const entryPoints = opts.entryPoints;
const otherSources = opts.otherSources;
const resources = opts.resources;
@ -168,19 +174,19 @@ export function optimizeTask(opts:IOptimizeTaskOpts):()=>NodeJS.ReadWriteStream
const bundleLoader = (typeof opts.bundleLoader === 'undefined' ? true : opts.bundleLoader);
const out = opts.out;
return function() {
return function () {
const bundlesStream = es.through(); // this stream will contain the bundled files
const resourcesStream = es.through(); // this stream will contain the resources
const bundleInfoStream = es.through(); // this stream will contain bundleInfo.json
bundle.bundle(entryPoints, loaderConfig, function(err, result) {
bundle.bundle(entryPoints, loaderConfig, function (err, result) {
if (err) { return bundlesStream.emit('error', JSON.stringify(err)); }
toBundleStream(bundledFileHeader, result.files).pipe(bundlesStream);
// Remove css inlined resources
const filteredResources = resources.slice();
result.cssInlinedResources.forEach(function(resource) {
result.cssInlinedResources.forEach(function (resource) {
if (process.env['VSCODE_BUILD_VERBOSE']) {
log('optimizer', 'excluding inlined: ' + resource);
}
@ -188,7 +194,7 @@ export function optimizeTask(opts:IOptimizeTaskOpts):()=>NodeJS.ReadWriteStream
});
gulp.src(filteredResources, { base: 'out-build' }).pipe(resourcesStream);
const bundleInfoArray:VinylFile[] = [];
const bundleInfoArray: VinylFile[] = [];
if (opts.bundleInfo) {
bundleInfoArray.push(new VinylFile({
path: 'bundleInfo.json',
@ -200,7 +206,7 @@ export function optimizeTask(opts:IOptimizeTaskOpts):()=>NodeJS.ReadWriteStream
});
const otherSourcesStream = es.through();
const otherSourcesStreamArr:NodeJS.ReadWriteStream[] = [];
const otherSourcesStreamArr: NodeJS.ReadWriteStream[] = [];
gulp.src(otherSources, { base: 'out-build' })
.pipe(es.through(function (data) {
@ -228,7 +234,8 @@ export function optimizeTask(opts:IOptimizeTaskOpts):()=>NodeJS.ReadWriteStream
includeContent: true
}))
.pipe(i18n.processNlsFiles({
fileHeader: bundledFileHeader
fileHeader: bundledFileHeader,
languages: opts.languages
}))
.pipe(gulp.dest(out));
};
@ -241,9 +248,9 @@ declare class FileWithCopyright extends VinylFile {
* Wrap around uglify and allow the preserveComments function
* to have a file "context" to include our copyright only once per file.
*/
function uglifyWithCopyrights():NodeJS.ReadWriteStream {
const preserveComments = (f:FileWithCopyright) => {
return (node, comment:{value:string;type:string;}) => {
function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
const preserveComments = (f: FileWithCopyright) => {
return (node, comment: { value: string; type: string; }) => {
const text = comment.value;
const type = comment.type;
@ -271,18 +278,24 @@ function uglifyWithCopyrights():NodeJS.ReadWriteStream {
};
};
const minify = composer(uglifyes);
const input = es.through();
const output = input
.pipe(flatmap((stream, f) => {
return stream
.pipe(uglify({ preserveComments: preserveComments(<FileWithCopyright>f) }));
return stream.pipe(minify({
output: {
comments: preserveComments(<FileWithCopyright>f),
// linux tfs build agent is crashing, does this help?§
max_line_len: 3200000
}
}));
}));
return es.duplex(input, output);
}
export function minifyTask(src:string, sourceMapBaseUrl:string):(cb:any)=>void {
const sourceMappingURL = sourceMapBaseUrl && (f => `${ sourceMapBaseUrl }/${ f.relative }.map`);
export function minifyTask(src: string, sourceMapBaseUrl: string): (cb: any) => void {
const sourceMappingURL = sourceMapBaseUrl && (f => `${sourceMapBaseUrl}/${f.relative}.map`);
return cb => {
const jsFilter = filter('**/*.js', { restore: true });
@ -304,12 +317,12 @@ export function minifyTask(src:string, sourceMapBaseUrl:string):(cb:any)=>void {
addComment: true
}),
gulp.dest(src + '-min')
, (err:any) => {
if (err instanceof uglify.GulpUglifyError) {
console.error(`Uglify error in '${ err.cause && err.cause.filename }'`);
}
, (err: any) => {
if (err instanceof uglify.GulpUglifyError) {
console.error(`Uglify error in '${err.cause && err.cause.filename}'`);
}
cb(err);
});
cb(err);
});
};
};

View file

@ -1,59 +1,83 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
var es = require("event-stream");
var _ = require("underscore");
var util = require("gulp-util");
var allErrors = [];
var startTime = null;
var count = 0;
function onStart() {
if (count++ > 0) {
return;
}
startTime = new Date().getTime();
util.log("Starting " + util.colors.green('compilation') + "...");
}
function onEnd() {
if (--count > 0) {
return;
}
log();
}
function log() {
var errors = _.flatten(allErrors);
errors.map(function (err) { return util.log(util.colors.red('Error') + ": " + err); });
util.log("Finished " + util.colors.green('compilation') + " with " + errors.length + " errors after " + util.colors.magenta((new Date().getTime() - startTime) + ' ms'));
}
function createReporter() {
var errors = [];
allErrors.push(errors);
var ReportFunc = (function () {
function ReportFunc(err) {
errors.push(err);
}
ReportFunc.hasErrors = function () {
return errors.length > 0;
};
ReportFunc.end = function (emitError) {
errors.length = 0;
onStart();
return es.through(null, function () {
onEnd();
if (emitError && errors.length > 0) {
log();
this.emit('error');
}
else {
this.emit('end');
}
});
};
return ReportFunc;
}());
return ReportFunc;
}
exports.createReporter = createReporter;
;
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
var es = require("event-stream");
var _ = require("underscore");
var util = require("gulp-util");
var fs = require("fs");
var path = require("path");
var allErrors = [];
var startTime = null;
var count = 0;
function onStart() {
if (count++ > 0) {
return;
}
startTime = new Date().getTime();
util.log("Starting " + util.colors.green('compilation') + "...");
}
function onEnd() {
if (--count > 0) {
return;
}
log();
}
var buildLogPath = path.join(path.dirname(path.dirname(__dirname)), '.build', 'log');
try {
fs.mkdirSync(path.dirname(buildLogPath));
}
catch (err) {
// ignore
}
function log() {
var errors = _.flatten(allErrors);
errors.map(function (err) { return util.log(util.colors.red('Error') + ": " + err); });
var regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/;
var messages = errors
.map(function (err) { return regex.exec(err); })
.filter(function (match) { return !!match; })
.map(function (_a) {
var path = _a[1], line = _a[2], column = _a[3], message = _a[4];
return ({ path: path, line: parseInt(line), column: parseInt(column), message: message });
});
try {
fs.writeFileSync(buildLogPath, JSON.stringify(messages));
}
catch (err) {
//noop
}
util.log("Finished " + util.colors.green('compilation') + " with " + errors.length + " errors after " + util.colors.magenta((new Date().getTime() - startTime) + ' ms'));
}
function createReporter() {
var errors = [];
allErrors.push(errors);
var ReportFunc = (function () {
function ReportFunc(err) {
errors.push(err);
}
ReportFunc.hasErrors = function () {
return errors.length > 0;
};
ReportFunc.end = function (emitError) {
errors.length = 0;
onStart();
return es.through(null, function () {
onEnd();
if (emitError && errors.length > 0) {
log();
this.emit('error');
}
else {
this.emit('end');
}
});
};
return ReportFunc;
}());
return ReportFunc;
}
exports.createReporter = createReporter;
;

View file

@ -8,6 +8,8 @@
import * as es from 'event-stream';
import * as _ from 'underscore';
import * as util from 'gulp-util';
import * as fs from 'fs';
import * as path from 'path';
const allErrors: Error[][] = [];
let startTime: number = null;
@ -30,10 +32,31 @@ function onEnd(): void {
log();
}
const buildLogPath = path.join(path.dirname(path.dirname(__dirname)), '.build', 'log');
try {
fs.mkdirSync(path.dirname(buildLogPath));
} catch (err) {
// ignore
}
function log(): void {
const errors = _.flatten(allErrors);
errors.map(err => util.log(`${util.colors.red('Error')}: ${err}`));
const regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/;
const messages = errors
.map(err => regex.exec(err))
.filter(match => !!match)
.map(([, path, line, column, message]) => ({ path, line: parseInt(line), column: parseInt(column), message }));
try {
fs.writeFileSync(buildLogPath, JSON.stringify(messages));
} catch (err) {
//noop
}
util.log(`Finished ${util.colors.green('compilation')} with ${errors.length} errors after ${util.colors.magenta((new Date().getTime() - startTime) + ' ms')}`);
}

View file

@ -0,0 +1,39 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
var snaps;
(function (snaps) {
var fs = require('fs');
var path = require('path');
var os = require('os');
var cp = require('child_process');
var mksnapshot = path.join(__dirname, "../../node_modules/.bin/" + (process.platform === 'win32' ? 'mksnapshot.cmd' : 'mksnapshot'));
var product = require('../../product.json');
var arch = (process.argv.join('').match(/--arch=(.*)/) || [])[1];
//
var loaderFilepath;
var startupBlobFilepath;
switch (process.platform) {
case 'darwin':
loaderFilepath = "VSCode-darwin/" + product.nameLong + ".app/Contents/Resources/app/out/vs/loader.js";
startupBlobFilepath = "VSCode-darwin/" + product.nameLong + ".app/Contents/Frameworks/Electron Framework.framework/Resources/snapshot_blob.bin";
break;
case 'win32':
case 'linux':
loaderFilepath = "VSCode-" + process.platform + "-" + arch + "/resources/app/out/vs/loader.js";
startupBlobFilepath = "VSCode-" + process.platform + "-" + arch + "/snapshot_blob.bin";
}
loaderFilepath = path.join(__dirname, '../../../', loaderFilepath);
startupBlobFilepath = path.join(__dirname, '../../../', startupBlobFilepath);
snapshotLoader(loaderFilepath, startupBlobFilepath);
function snapshotLoader(loaderFilepath, startupBlobFilepath) {
var inputFile = fs.readFileSync(loaderFilepath);
var wrappedInputFile = "\n\t\tvar Monaco_Loader_Init;\n\t\t(function() {\n\t\t\tvar doNotInitLoader = true;\n\t\t\t" + inputFile.toString() + ";\n\t\t\tMonaco_Loader_Init = function() {\n\t\t\t\tAMDLoader.init();\n\t\t\t\tCSSLoaderPlugin.init();\n\t\t\t\tNLSLoaderPlugin.init();\n\n\t\t\t\treturn { define, require };\n\t\t\t}\n\t\t})();\n\t\t";
var wrappedInputFilepath = path.join(os.tmpdir(), 'wrapped-loader.js');
console.log(wrappedInputFilepath);
fs.writeFileSync(wrappedInputFilepath, wrappedInputFile);
cp.execFileSync(mksnapshot, [wrappedInputFilepath, "--startup_blob", startupBlobFilepath]);
}
})(snaps || (snaps = {}));

View file

@ -0,0 +1,63 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
namespace snaps {
const fs = require('fs');
const path = require('path');
const os = require('os');
const cp = require('child_process');
const mksnapshot = path.join(__dirname, `../../node_modules/.bin/${process.platform === 'win32' ? 'mksnapshot.cmd' : 'mksnapshot'}`);
const product = require('../../product.json');
const arch = (process.argv.join('').match(/--arch=(.*)/) || [])[1];
//
let loaderFilepath: string;
let startupBlobFilepath: string;
switch (process.platform) {
case 'darwin':
loaderFilepath = `VSCode-darwin/${product.nameLong}.app/Contents/Resources/app/out/vs/loader.js`;
startupBlobFilepath = `VSCode-darwin/${product.nameLong}.app/Contents/Frameworks/Electron Framework.framework/Resources/snapshot_blob.bin`;
break;
case 'win32':
case 'linux':
loaderFilepath = `VSCode-${process.platform}-${arch}/resources/app/out/vs/loader.js`;
startupBlobFilepath = `VSCode-${process.platform}-${arch}/snapshot_blob.bin`;
}
loaderFilepath = path.join(__dirname, '../../../', loaderFilepath);
startupBlobFilepath = path.join(__dirname, '../../../', startupBlobFilepath);
snapshotLoader(loaderFilepath, startupBlobFilepath);
function snapshotLoader(loaderFilepath: string, startupBlobFilepath: string): void {
const inputFile = fs.readFileSync(loaderFilepath);
const wrappedInputFile = `
var Monaco_Loader_Init;
(function() {
var doNotInitLoader = true;
${inputFile.toString()};
Monaco_Loader_Init = function() {
AMDLoader.init();
CSSLoaderPlugin.init();
NLSLoaderPlugin.init();
return { define, require };
}
})();
`;
const wrappedInputFilepath = path.join(os.tmpdir(), 'wrapped-loader.js');
console.log(wrappedInputFilepath);
fs.writeFileSync(wrappedInputFilepath, wrappedInputFile);
cp.execFileSync(mksnapshot, [wrappedInputFilepath, `--startup_blob`, startupBlobFilepath]);
}
}

View file

@ -0,0 +1,40 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
var assert = require("assert");
var i18n = require("../i18n");
suite('XLF Parser Tests', function () {
var sampleXlf = '<?xml version="1.0" encoding="utf-8"?><xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2"><file original="vs/base/common/keybinding" source-language="en" datatype="plaintext"><body><trans-unit id="key1"><source xml:lang="en">Key #1</source></trans-unit><trans-unit id="key2"><source xml:lang="en">Key #2 &amp;</source></trans-unit></body></file></xliff>';
var sampleTranslatedXlf = '<?xml version="1.0" encoding="utf-8"?><xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2"><file original="vs/base/common/keybinding" source-language="en" target-language="ru" datatype="plaintext"><body><trans-unit id="key1"><source xml:lang="en">Key #1</source><target>Кнопка #1</target></trans-unit><trans-unit id="key2"><source xml:lang="en">Key #2 &amp;</source><target>Кнопка #2 &amp;</target></trans-unit></body></file></xliff>';
var originalFilePath = 'vs/base/common/keybinding';
var keys = ['key1', 'key2'];
var messages = ['Key #1', 'Key #2 &'];
var translatedMessages = { key1: 'Кнопка #1', key2: 'Кнопка #2 &' };
test('Keys & messages to XLF conversion', function () {
var xlf = new i18n.XLF('vscode-workbench');
xlf.addFile(originalFilePath, keys, messages);
var xlfString = xlf.toString();
assert.strictEqual(xlfString.replace(/\s{2,}/g, ''), sampleXlf);
});
test('XLF to keys & messages conversion', function () {
i18n.XLF.parse(sampleTranslatedXlf).then(function (resolvedFiles) {
assert.deepEqual(resolvedFiles[0].messages, translatedMessages);
assert.strictEqual(resolvedFiles[0].originalFilePath, originalFilePath);
});
});
test('JSON file source path to Transifex resource match', function () {
var editorProject = 'vscode-editor', workbenchProject = 'vscode-workbench';
var platform = { name: 'vs/platform', project: editorProject }, editorContrib = { name: 'vs/editor/contrib', project: editorProject }, editor = { name: 'vs/editor', project: editorProject }, base = { name: 'vs/base', project: editorProject }, code = { name: 'vs/code', project: workbenchProject }, workbenchParts = { name: 'vs/workbench/parts/html', project: workbenchProject }, workbenchServices = { name: 'vs/workbench/services/files', project: workbenchProject }, workbench = { name: 'vs/workbench', project: workbenchProject };
assert.deepEqual(i18n.getResource('vs/platform/actions/browser/menusExtensionPoint'), platform);
assert.deepEqual(i18n.getResource('vs/editor/contrib/clipboard/browser/clipboard'), editorContrib);
assert.deepEqual(i18n.getResource('vs/editor/common/modes/modesRegistry'), editor);
assert.deepEqual(i18n.getResource('vs/base/common/errorMessage'), base);
assert.deepEqual(i18n.getResource('vs/code/electron-main/window'), code);
assert.deepEqual(i18n.getResource('vs/workbench/parts/html/browser/webview'), workbenchParts);
assert.deepEqual(i18n.getResource('vs/workbench/services/files/node/fileService'), workbenchServices);
assert.deepEqual(i18n.getResource('vs/workbench/browser/parts/panel/panelActions'), workbench);
});
});

View file

@ -0,0 +1,54 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import assert = require('assert');
import i18n = require('../i18n');
suite('XLF Parser Tests', () => {
const sampleXlf = '<?xml version="1.0" encoding="utf-8"?><xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2"><file original="vs/base/common/keybinding" source-language="en" datatype="plaintext"><body><trans-unit id="key1"><source xml:lang="en">Key #1</source></trans-unit><trans-unit id="key2"><source xml:lang="en">Key #2 &amp;</source></trans-unit></body></file></xliff>';
const sampleTranslatedXlf = '<?xml version="1.0" encoding="utf-8"?><xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2"><file original="vs/base/common/keybinding" source-language="en" target-language="ru" datatype="plaintext"><body><trans-unit id="key1"><source xml:lang="en">Key #1</source><target>Кнопка #1</target></trans-unit><trans-unit id="key2"><source xml:lang="en">Key #2 &amp;</source><target>Кнопка #2 &amp;</target></trans-unit></body></file></xliff>';
const originalFilePath = 'vs/base/common/keybinding';
const keys = ['key1', 'key2'];
const messages = ['Key #1', 'Key #2 &'];
const translatedMessages = { key1: 'Кнопка #1', key2: 'Кнопка #2 &' };
test('Keys & messages to XLF conversion', () => {
let xlf = new i18n.XLF('vscode-workbench');
xlf.addFile(originalFilePath, keys, messages);
const xlfString = xlf.toString();
assert.strictEqual(xlfString.replace(/\s{2,}/g, ''), sampleXlf);
});
test('XLF to keys & messages conversion', () => {
i18n.XLF.parse(sampleTranslatedXlf).then(function(resolvedFiles) {
assert.deepEqual(resolvedFiles[0].messages, translatedMessages);
assert.strictEqual(resolvedFiles[0].originalFilePath, originalFilePath);
});
});
test('JSON file source path to Transifex resource match', () => {
const editorProject: string = 'vscode-editor',
workbenchProject: string = 'vscode-workbench';
const platform: i18n.Resource = { name: 'vs/platform', project: editorProject },
editorContrib = { name: 'vs/editor/contrib', project: editorProject },
editor = { name: 'vs/editor', project: editorProject },
base = { name: 'vs/base', project: editorProject },
code = { name: 'vs/code', project: workbenchProject },
workbenchParts = { name: 'vs/workbench/parts/html', project: workbenchProject },
workbenchServices = { name: 'vs/workbench/services/files', project: workbenchProject },
workbench = { name: 'vs/workbench', project: workbenchProject};
assert.deepEqual(i18n.getResource('vs/platform/actions/browser/menusExtensionPoint'), platform);
assert.deepEqual(i18n.getResource('vs/editor/contrib/clipboard/browser/clipboard'), editorContrib);
assert.deepEqual(i18n.getResource('vs/editor/common/modes/modesRegistry'), editor);
assert.deepEqual(i18n.getResource('vs/base/common/errorMessage'), base);
assert.deepEqual(i18n.getResource('vs/code/electron-main/window'), code);
assert.deepEqual(i18n.getResource('vs/workbench/parts/html/browser/webview'), workbenchParts);
assert.deepEqual(i18n.getResource('vs/workbench/services/files/node/fileService'), workbenchServices);
assert.deepEqual(i18n.getResource('vs/workbench/browser/parts/panel/panelActions'), workbench);
});
});

View file

@ -0,0 +1,59 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
var ts = require("typescript");
var Lint = require("tslint");
var Rule = (function (_super) {
__extends(Rule, _super);
function Rule() {
return _super !== null && _super.apply(this, arguments) || this;
}
Rule.prototype.apply = function (sourceFile) {
var allowed = this.getOptions().ruleArguments[0];
return this.applyWithWalker(new AsyncRuleWalker(sourceFile, this.getOptions(), allowed));
};
return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule;
var AsyncRuleWalker = (function (_super) {
__extends(AsyncRuleWalker, _super);
function AsyncRuleWalker(file, opts, allowed) {
var _this = _super.call(this, file, opts) || this;
_this.allowed = allowed;
return _this;
}
AsyncRuleWalker.prototype.visitMethodDeclaration = function (node) {
this.visitFunctionLikeDeclaration(node);
};
AsyncRuleWalker.prototype.visitFunctionDeclaration = function (node) {
this.visitFunctionLikeDeclaration(node);
};
AsyncRuleWalker.prototype.visitFunctionLikeDeclaration = function (node) {
var _this = this;
var flags = ts.getCombinedModifierFlags(node);
if (!(flags & ts.ModifierFlags.Async)) {
return;
}
var path = node.getSourceFile().path;
var pathParts = path.split(/\\|\//);
if (pathParts.some(function (part) { return _this.allowed.some(function (allowed) { return part === allowed; }); })) {
return;
}
var message = "You are not allowed to use async function in this layer. Allowed layers are: [" + this.allowed + "]";
this.addFailureAtNode(node, message);
};
return AsyncRuleWalker;
}(Lint.RuleWalker));

View file

@ -0,0 +1,47 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import * as Lint from 'tslint';
export class Rule extends Lint.Rules.AbstractRule {
public apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
const allowed = this.getOptions().ruleArguments[0] as string[];
return this.applyWithWalker(new AsyncRuleWalker(sourceFile, this.getOptions(), allowed));
}
}
class AsyncRuleWalker extends Lint.RuleWalker {
constructor(file: ts.SourceFile, opts: Lint.IOptions, private allowed: string[]) {
super(file, opts);
}
protected visitMethodDeclaration(node: ts.MethodDeclaration): void {
this.visitFunctionLikeDeclaration(node);
}
protected visitFunctionDeclaration(node: ts.FunctionDeclaration): void {
this.visitFunctionLikeDeclaration(node);
}
private visitFunctionLikeDeclaration(node: ts.FunctionLikeDeclaration) {
const flags = ts.getCombinedModifierFlags(node);
if (!(flags & ts.ModifierFlags.Async)) {
return;
}
const path = (node.getSourceFile() as any).path;
const pathParts = path.split(/\\|\//);
if (pathParts.some(part => this.allowed.some(allowed => part === allowed))) {
return;
}
const message = `You are not allowed to use async function in this layer. Allowed layers are: [${this.allowed}]`;
this.addFailureAtNode(node, message);
}
}

View file

@ -1,44 +1,50 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
"use strict";
var __extends = (this && this.__extends) || function (d, b) {
for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p];
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
var path_1 = require("path");
var Lint = require("tslint");
var Rule = (function (_super) {
__extends(Rule, _super);
function Rule() {
return _super.apply(this, arguments) || this;
}
Rule.prototype.apply = function (sourceFile) {
return this.applyWithWalker(new ImportPatterns(sourceFile, this.getOptions()));
};
return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule;
var ImportPatterns = (function (_super) {
__extends(ImportPatterns, _super);
function ImportPatterns(file, opts) {
var _this = _super.call(this, file, opts) || this;
_this.imports = Object.create(null);
return _this;
}
ImportPatterns.prototype.visitImportDeclaration = function (node) {
var path = node.moduleSpecifier.getText();
// remove quotes
path = path.slice(1, -1);
if (path[0] === '.') {
path = path_1.join(path_1.dirname(node.getSourceFile().fileName), path);
}
if (this.imports[path]) {
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), "Duplicate imports for '" + path + "'."));
}
this.imports[path] = true;
};
return ImportPatterns;
}(Lint.RuleWalker));
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
var path_1 = require("path");
var Lint = require("tslint");
var Rule = (function (_super) {
__extends(Rule, _super);
function Rule() {
return _super !== null && _super.apply(this, arguments) || this;
}
Rule.prototype.apply = function (sourceFile) {
return this.applyWithWalker(new ImportPatterns(sourceFile, this.getOptions()));
};
return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule;
var ImportPatterns = (function (_super) {
__extends(ImportPatterns, _super);
function ImportPatterns(file, opts) {
var _this = _super.call(this, file, opts) || this;
_this.imports = Object.create(null);
return _this;
}
ImportPatterns.prototype.visitImportDeclaration = function (node) {
var path = node.moduleSpecifier.getText();
// remove quotes
path = path.slice(1, -1);
if (path[0] === '.') {
path = path_1.join(path_1.dirname(node.getSourceFile().fileName), path);
}
if (this.imports[path]) {
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), "Duplicate imports for '" + path + "'."));
}
this.imports[path] = true;
};
return ImportPatterns;
}(Lint.RuleWalker));

View file

@ -1,51 +1,90 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
"use strict";
var __extends = (this && this.__extends) || function (d, b) {
for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p];
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
var Lint = require("tslint");
var minimatch = require("minimatch");
var Rule = (function (_super) {
__extends(Rule, _super);
function Rule() {
return _super.apply(this, arguments) || this;
}
Rule.prototype.apply = function (sourceFile) {
var configs = this.getOptions().ruleArguments;
for (var _i = 0, configs_1 = configs; _i < configs_1.length; _i++) {
var config = configs_1[_i];
if (minimatch(sourceFile.fileName, config.target)) {
return this.applyWithWalker(new ImportPatterns(sourceFile, this.getOptions(), config));
}
}
return [];
};
return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule;
var ImportPatterns = (function (_super) {
__extends(ImportPatterns, _super);
function ImportPatterns(file, opts, _config) {
var _this = _super.call(this, file, opts) || this;
_this._config = _config;
return _this;
}
ImportPatterns.prototype.visitImportDeclaration = function (node) {
var path = node.moduleSpecifier.getText();
// remove quotes
path = path.slice(1, -1);
// ignore relative paths
if (path[0] === '.') {
return;
}
if (!minimatch(path, this._config.restrictions)) {
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), "Imports violates '" + this._config.restrictions + "'-restriction."));
}
};
return ImportPatterns;
}(Lint.RuleWalker));
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
var ts = require("typescript");
var Lint = require("tslint");
var minimatch = require("minimatch");
var path_1 = require("path");
var Rule = (function (_super) {
__extends(Rule, _super);
function Rule() {
return _super !== null && _super.apply(this, arguments) || this;
}
Rule.prototype.apply = function (sourceFile) {
var configs = this.getOptions().ruleArguments;
for (var _i = 0, configs_1 = configs; _i < configs_1.length; _i++) {
var config = configs_1[_i];
if (minimatch(sourceFile.fileName, config.target)) {
return this.applyWithWalker(new ImportPatterns(sourceFile, this.getOptions(), config));
}
}
return [];
};
return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule;
var ImportPatterns = (function (_super) {
__extends(ImportPatterns, _super);
function ImportPatterns(file, opts, _config) {
var _this = _super.call(this, file, opts) || this;
_this._config = _config;
return _this;
}
ImportPatterns.prototype.visitImportEqualsDeclaration = function (node) {
if (node.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) {
this._validateImport(node.moduleReference.expression.getText(), node);
}
};
ImportPatterns.prototype.visitImportDeclaration = function (node) {
this._validateImport(node.moduleSpecifier.getText(), node);
};
ImportPatterns.prototype.visitCallExpression = function (node) {
_super.prototype.visitCallExpression.call(this, node);
// import('foo') statements inside the code
if (node.expression.kind === ts.SyntaxKind.ImportKeyword) {
var path = node.arguments[0];
this._validateImport(path.getText(), node);
}
};
ImportPatterns.prototype._validateImport = function (path, node) {
// remove quotes
path = path.slice(1, -1);
// resolve relative paths
if (path[0] === '.') {
path = path_1.join(this.getSourceFile().fileName, path);
}
var restrictions;
if (typeof this._config.restrictions === 'string') {
restrictions = [this._config.restrictions];
}
else {
restrictions = this._config.restrictions;
}
var matched = false;
for (var _i = 0, restrictions_1 = restrictions; _i < restrictions_1.length; _i++) {
var pattern = restrictions_1[_i];
if (minimatch(path, pattern)) {
matched = true;
break;
}
}
if (!matched) {
// None of the restrictions matched
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), "Imports violates '" + restrictions.join(' or ') + "' restrictions. See https://github.com/Microsoft/vscode/wiki/Code-Organization"));
}
};
return ImportPatterns;
}(Lint.RuleWalker));

View file

@ -6,13 +6,13 @@
import * as ts from 'typescript';
import * as Lint from 'tslint';
import * as minimatch from 'minimatch';
import { join } from 'path';
interface ImportPatternsConfig {
target: string;
restrictions: string;
restrictions: string | string[];
}
export class Rule extends Lint.Rules.AbstractRule {
public apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
@ -35,19 +35,53 @@ class ImportPatterns extends Lint.RuleWalker {
super(file, opts);
}
protected visitImportDeclaration(node: ts.ImportDeclaration): void {
let path = node.moduleSpecifier.getText();
protected visitImportEqualsDeclaration(node: ts.ImportEqualsDeclaration): void {
if (node.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) {
this._validateImport(node.moduleReference.expression.getText(), node);
}
}
protected visitImportDeclaration(node: ts.ImportDeclaration): void {
this._validateImport(node.moduleSpecifier.getText(), node);
}
protected visitCallExpression(node: ts.CallExpression): void {
super.visitCallExpression(node);
// import('foo') statements inside the code
if (node.expression.kind === ts.SyntaxKind.ImportKeyword) {
const [path] = node.arguments;
this._validateImport(path.getText(), node);
}
}
private _validateImport(path: string, node: ts.Node): void {
// remove quotes
path = path.slice(1, -1);
// ignore relative paths
// resolve relative paths
if (path[0] === '.') {
return;
path = join(this.getSourceFile().fileName, path);
}
if (!minimatch(path, this._config.restrictions)) {
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Imports violates '${this._config.restrictions}'-restriction.`));
let restrictions: string[];
if (typeof this._config.restrictions === 'string') {
restrictions = [this._config.restrictions];
} else {
restrictions = this._config.restrictions;
}
let matched = false;
for (const pattern of restrictions) {
if (minimatch(path, pattern)) {
matched = true;
break;
}
}
if (!matched) {
// None of the restrictions matched
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Imports violates '${restrictions.join(' or ')}' restrictions. See https://github.com/Microsoft/vscode/wiki/Code-Organization`));
}
}
}

View file

@ -1,79 +1,101 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
"use strict";
var __extends = (this && this.__extends) || function (d, b) {
for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p];
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
var Lint = require("tslint");
var path_1 = require("path");
var Rule = (function (_super) {
__extends(Rule, _super);
function Rule() {
return _super.apply(this, arguments) || this;
}
Rule.prototype.apply = function (sourceFile) {
var parts = path_1.dirname(sourceFile.fileName).split(/\\|\//);
var ruleArgs = this.getOptions().ruleArguments[0];
var config;
for (var i = parts.length - 1; i >= 0; i--) {
if (ruleArgs[parts[i]]) {
config = {
allowed: new Set(ruleArgs[parts[i]]).add(parts[i]),
disallowed: new Set()
};
Object.keys(ruleArgs).forEach(function (key) {
if (!config.allowed.has(key)) {
config.disallowed.add(key);
}
});
break;
}
}
if (!config) {
return [];
}
return this.applyWithWalker(new LayeringRule(sourceFile, config, this.getOptions()));
};
return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule;
var LayeringRule = (function (_super) {
__extends(LayeringRule, _super);
function LayeringRule(file, config, opts) {
var _this = _super.call(this, file, opts) || this;
_this._config = config;
return _this;
}
LayeringRule.prototype.visitImportDeclaration = function (node) {
var path = node.moduleSpecifier.getText();
// remove quotes
path = path.slice(1, -1);
if (path[0] === '.') {
path = path_1.join(path_1.dirname(node.getSourceFile().fileName), path);
}
var parts = path_1.dirname(path).split(/\\|\//);
for (var i = parts.length - 1; i >= 0; i--) {
var part = parts[i];
if (this._config.allowed.has(part)) {
// GOOD - same layer
return;
}
if (this._config.disallowed.has(part)) {
// BAD - wrong layer
var message = "Bad layering. You are not allowed to access '" + part + "' from here, allowed layers are: [" + LayeringRule._print(this._config.allowed) + "]";
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), message));
return;
}
}
};
LayeringRule._print = function (set) {
var r = [];
set.forEach(function (e) { return r.push(e); });
return r.join(', ');
};
return LayeringRule;
}(Lint.RuleWalker));
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
var ts = require("typescript");
var Lint = require("tslint");
var path_1 = require("path");
var Rule = (function (_super) {
__extends(Rule, _super);
function Rule() {
return _super !== null && _super.apply(this, arguments) || this;
}
Rule.prototype.apply = function (sourceFile) {
var parts = path_1.dirname(sourceFile.fileName).split(/\\|\//);
var ruleArgs = this.getOptions().ruleArguments[0];
var config;
for (var i = parts.length - 1; i >= 0; i--) {
if (ruleArgs[parts[i]]) {
config = {
allowed: new Set(ruleArgs[parts[i]]).add(parts[i]),
disallowed: new Set()
};
Object.keys(ruleArgs).forEach(function (key) {
if (!config.allowed.has(key)) {
config.disallowed.add(key);
}
});
break;
}
}
if (!config) {
return [];
}
return this.applyWithWalker(new LayeringRule(sourceFile, config, this.getOptions()));
};
return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule;
var LayeringRule = (function (_super) {
__extends(LayeringRule, _super);
function LayeringRule(file, config, opts) {
var _this = _super.call(this, file, opts) || this;
_this._config = config;
return _this;
}
LayeringRule.prototype.visitImportEqualsDeclaration = function (node) {
if (node.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) {
this._validateImport(node.moduleReference.expression.getText(), node);
}
};
LayeringRule.prototype.visitImportDeclaration = function (node) {
this._validateImport(node.moduleSpecifier.getText(), node);
};
LayeringRule.prototype.visitCallExpression = function (node) {
_super.prototype.visitCallExpression.call(this, node);
// import('foo') statements inside the code
if (node.expression.kind === ts.SyntaxKind.ImportKeyword) {
var path = node.arguments[0];
this._validateImport(path.getText(), node);
}
};
LayeringRule.prototype._validateImport = function (path, node) {
// remove quotes
path = path.slice(1, -1);
if (path[0] === '.') {
path = path_1.join(path_1.dirname(node.getSourceFile().fileName), path);
}
var parts = path_1.dirname(path).split(/\\|\//);
for (var i = parts.length - 1; i >= 0; i--) {
var part = parts[i];
if (this._config.allowed.has(part)) {
// GOOD - same layer
return;
}
if (this._config.disallowed.has(part)) {
// BAD - wrong layer
var message = "Bad layering. You are not allowed to access '" + part + "' from here, allowed layers are: [" + LayeringRule._print(this._config.allowed) + "]";
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), message));
return;
}
}
};
LayeringRule._print = function (set) {
var r = [];
set.forEach(function (e) { return r.push(e); });
return r.join(', ');
};
return LayeringRule;
}(Lint.RuleWalker));

View file

@ -51,9 +51,27 @@ class LayeringRule extends Lint.RuleWalker {
this._config = config;
}
protected visitImportDeclaration(node: ts.ImportDeclaration): void {
let path = node.moduleSpecifier.getText();
protected visitImportEqualsDeclaration(node: ts.ImportEqualsDeclaration): void {
if (node.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) {
this._validateImport(node.moduleReference.expression.getText(), node);
}
}
protected visitImportDeclaration(node: ts.ImportDeclaration): void {
this._validateImport(node.moduleSpecifier.getText(), node);
}
protected visitCallExpression(node: ts.CallExpression): void {
super.visitCallExpression(node);
// import('foo') statements inside the code
if (node.expression.kind === ts.SyntaxKind.ImportKeyword) {
const [path] = node.arguments;
this._validateImport(path.getText(), node);
}
}
private _validateImport(path: string, node: ts.Node): void {
// remove quotes
path = path.slice(1, -1);

View file

@ -1,171 +1,182 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
"use strict";
var __extends = (this && this.__extends) || function (d, b) {
for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p];
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
var ts = require("typescript");
var Lint = require("tslint");
/**
* Implementation of the no-unexternalized-strings rule.
*/
var Rule = (function (_super) {
__extends(Rule, _super);
function Rule() {
return _super.apply(this, arguments) || this;
}
Rule.prototype.apply = function (sourceFile) {
return this.applyWithWalker(new NoUnexternalizedStringsRuleWalker(sourceFile, this.getOptions()));
};
return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule;
function isStringLiteral(node) {
return node && node.kind === ts.SyntaxKind.StringLiteral;
}
function isObjectLiteral(node) {
return node && node.kind === ts.SyntaxKind.ObjectLiteralExpression;
}
function isPropertyAssignment(node) {
return node && node.kind === ts.SyntaxKind.PropertyAssignment;
}
var NoUnexternalizedStringsRuleWalker = (function (_super) {
__extends(NoUnexternalizedStringsRuleWalker, _super);
function NoUnexternalizedStringsRuleWalker(file, opts) {
var _this = _super.call(this, file, opts) || this;
_this.signatures = Object.create(null);
_this.ignores = Object.create(null);
_this.messageIndex = undefined;
_this.keyIndex = undefined;
_this.usedKeys = Object.create(null);
var options = _this.getOptions();
var first = options && options.length > 0 ? options[0] : null;
if (first) {
if (Array.isArray(first.signatures)) {
first.signatures.forEach(function (signature) { return _this.signatures[signature] = true; });
}
if (Array.isArray(first.ignores)) {
first.ignores.forEach(function (ignore) { return _this.ignores[ignore] = true; });
}
if (typeof first.messageIndex !== 'undefined') {
_this.messageIndex = first.messageIndex;
}
if (typeof first.keyIndex !== 'undefined') {
_this.keyIndex = first.keyIndex;
}
}
return _this;
}
NoUnexternalizedStringsRuleWalker.prototype.visitSourceFile = function (node) {
var _this = this;
_super.prototype.visitSourceFile.call(this, node);
Object.keys(this.usedKeys).forEach(function (key) {
var occurences = _this.usedKeys[key];
if (occurences.length > 1) {
occurences.forEach(function (occurence) {
_this.addFailure((_this.createFailure(occurence.key.getStart(), occurence.key.getWidth(), "Duplicate key " + occurence.key.getText() + " with different message value.")));
});
}
});
};
NoUnexternalizedStringsRuleWalker.prototype.visitStringLiteral = function (node) {
this.checkStringLiteral(node);
_super.prototype.visitStringLiteral.call(this, node);
};
NoUnexternalizedStringsRuleWalker.prototype.checkStringLiteral = function (node) {
var text = node.getText();
var doubleQuoted = text.length >= 2 && text[0] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE && text[text.length - 1] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE;
var info = this.findDescribingParent(node);
// Ignore strings in import and export nodes.
if (info && info.ignoreUsage) {
return;
}
var callInfo = info ? info.callInfo : null;
var functionName = callInfo ? callInfo.callExpression.expression.getText() : null;
if (functionName && this.ignores[functionName]) {
return;
}
if (doubleQuoted && (!callInfo || callInfo.argIndex === -1 || !this.signatures[functionName])) {
var s_1 = node.getText();
var replacement = new Lint.Replacement(node.getStart(), node.getWidth(), "nls.localize('KEY-" + s_1.substring(1, s_1.length - 1) + "', " + s_1 + ")");
var fix = new Lint.Fix("Unexternalitzed string", [replacement]);
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), "Unexternalized string found: " + node.getText(), fix));
return;
}
// We have a single quoted string outside a localize function name.
if (!doubleQuoted && !this.signatures[functionName]) {
return;
}
// We have a string that is a direct argument into the localize call.
var keyArg = callInfo.argIndex === this.keyIndex
? callInfo.callExpression.arguments[this.keyIndex]
: null;
if (keyArg) {
if (isStringLiteral(keyArg)) {
this.recordKey(keyArg, this.messageIndex ? callInfo.callExpression.arguments[this.messageIndex] : undefined);
}
else if (isObjectLiteral(keyArg)) {
for (var i = 0; i < keyArg.properties.length; i++) {
var property = keyArg.properties[i];
if (isPropertyAssignment(property)) {
var name_1 = property.name.getText();
if (name_1 === 'key') {
var initializer = property.initializer;
if (isStringLiteral(initializer)) {
this.recordKey(initializer, this.messageIndex ? callInfo.callExpression.arguments[this.messageIndex] : undefined);
}
break;
}
}
}
}
}
var messageArg = callInfo.argIndex === this.messageIndex
? callInfo.callExpression.arguments[this.messageIndex]
: null;
if (messageArg && messageArg !== node) {
this.addFailure(this.createFailure(messageArg.getStart(), messageArg.getWidth(), "Message argument to '" + callInfo.callExpression.expression.getText() + "' must be a string literal."));
return;
}
};
NoUnexternalizedStringsRuleWalker.prototype.recordKey = function (keyNode, messageNode) {
var text = keyNode.getText();
var occurences = this.usedKeys[text];
if (!occurences) {
occurences = [];
this.usedKeys[text] = occurences;
}
if (messageNode) {
if (occurences.some(function (pair) { return pair.message ? pair.message.getText() === messageNode.getText() : false; })) {
return;
}
}
occurences.push({ key: keyNode, message: messageNode });
};
NoUnexternalizedStringsRuleWalker.prototype.findDescribingParent = function (node) {
var parent;
while ((parent = node.parent)) {
var kind = parent.kind;
if (kind === ts.SyntaxKind.CallExpression) {
var callExpression = parent;
return { callInfo: { callExpression: callExpression, argIndex: callExpression.arguments.indexOf(node) } };
}
else if (kind === ts.SyntaxKind.ImportEqualsDeclaration || kind === ts.SyntaxKind.ImportDeclaration || kind === ts.SyntaxKind.ExportDeclaration) {
return { ignoreUsage: true };
}
else if (kind === ts.SyntaxKind.VariableDeclaration || kind === ts.SyntaxKind.FunctionDeclaration || kind === ts.SyntaxKind.PropertyDeclaration
|| kind === ts.SyntaxKind.MethodDeclaration || kind === ts.SyntaxKind.VariableDeclarationList || kind === ts.SyntaxKind.InterfaceDeclaration
|| kind === ts.SyntaxKind.ClassDeclaration || kind === ts.SyntaxKind.EnumDeclaration || kind === ts.SyntaxKind.ModuleDeclaration
|| kind === ts.SyntaxKind.TypeAliasDeclaration || kind === ts.SyntaxKind.SourceFile) {
return null;
}
node = parent;
}
};
return NoUnexternalizedStringsRuleWalker;
}(Lint.RuleWalker));
NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE = '"';
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
var ts = require("typescript");
var Lint = require("tslint");
/**
* Implementation of the no-unexternalized-strings rule.
*/
var Rule = (function (_super) {
__extends(Rule, _super);
function Rule() {
return _super !== null && _super.apply(this, arguments) || this;
}
Rule.prototype.apply = function (sourceFile) {
return this.applyWithWalker(new NoUnexternalizedStringsRuleWalker(sourceFile, this.getOptions()));
};
return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule;
function isStringLiteral(node) {
return node && node.kind === ts.SyntaxKind.StringLiteral;
}
function isObjectLiteral(node) {
return node && node.kind === ts.SyntaxKind.ObjectLiteralExpression;
}
function isPropertyAssignment(node) {
return node && node.kind === ts.SyntaxKind.PropertyAssignment;
}
var NoUnexternalizedStringsRuleWalker = (function (_super) {
__extends(NoUnexternalizedStringsRuleWalker, _super);
function NoUnexternalizedStringsRuleWalker(file, opts) {
var _this = _super.call(this, file, opts) || this;
_this.signatures = Object.create(null);
_this.ignores = Object.create(null);
_this.messageIndex = undefined;
_this.keyIndex = undefined;
_this.usedKeys = Object.create(null);
var options = _this.getOptions();
var first = options && options.length > 0 ? options[0] : null;
if (first) {
if (Array.isArray(first.signatures)) {
first.signatures.forEach(function (signature) { return _this.signatures[signature] = true; });
}
if (Array.isArray(first.ignores)) {
first.ignores.forEach(function (ignore) { return _this.ignores[ignore] = true; });
}
if (typeof first.messageIndex !== 'undefined') {
_this.messageIndex = first.messageIndex;
}
if (typeof first.keyIndex !== 'undefined') {
_this.keyIndex = first.keyIndex;
}
}
return _this;
}
NoUnexternalizedStringsRuleWalker.prototype.visitSourceFile = function (node) {
var _this = this;
_super.prototype.visitSourceFile.call(this, node);
Object.keys(this.usedKeys).forEach(function (key) {
var occurrences = _this.usedKeys[key];
if (occurrences.length > 1) {
occurrences.forEach(function (occurrence) {
_this.addFailure((_this.createFailure(occurrence.key.getStart(), occurrence.key.getWidth(), "Duplicate key " + occurrence.key.getText() + " with different message value.")));
});
}
});
};
NoUnexternalizedStringsRuleWalker.prototype.visitStringLiteral = function (node) {
this.checkStringLiteral(node);
_super.prototype.visitStringLiteral.call(this, node);
};
NoUnexternalizedStringsRuleWalker.prototype.checkStringLiteral = function (node) {
var text = node.getText();
var doubleQuoted = text.length >= 2 && text[0] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE && text[text.length - 1] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE;
var info = this.findDescribingParent(node);
// Ignore strings in import and export nodes.
if (info && info.isImport && doubleQuoted) {
this.addFailureAtNode(node, NoUnexternalizedStringsRuleWalker.ImportFailureMessage, new Lint.Fix(NoUnexternalizedStringsRuleWalker.ImportFailureMessage, [
this.createReplacement(node.getStart(), 1, '\''),
this.createReplacement(node.getStart() + text.length - 1, 1, '\''),
]));
return;
}
var callInfo = info ? info.callInfo : null;
var functionName = callInfo ? callInfo.callExpression.expression.getText() : null;
if (functionName && this.ignores[functionName]) {
return;
}
if (doubleQuoted && (!callInfo || callInfo.argIndex === -1 || !this.signatures[functionName])) {
var s = node.getText();
var replacement = new Lint.Replacement(node.getStart(), node.getWidth(), "nls.localize('KEY-" + s.substring(1, s.length - 1) + "', " + s + ")");
var fix = new Lint.Fix('Unexternalitzed string', [replacement]);
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), "Unexternalized string found: " + node.getText(), fix));
return;
}
// We have a single quoted string outside a localize function name.
if (!doubleQuoted && !this.signatures[functionName]) {
return;
}
// We have a string that is a direct argument into the localize call.
var keyArg = callInfo.argIndex === this.keyIndex
? callInfo.callExpression.arguments[this.keyIndex]
: null;
if (keyArg) {
if (isStringLiteral(keyArg)) {
this.recordKey(keyArg, this.messageIndex ? callInfo.callExpression.arguments[this.messageIndex] : undefined);
}
else if (isObjectLiteral(keyArg)) {
for (var i = 0; i < keyArg.properties.length; i++) {
var property = keyArg.properties[i];
if (isPropertyAssignment(property)) {
var name_1 = property.name.getText();
if (name_1 === 'key') {
var initializer = property.initializer;
if (isStringLiteral(initializer)) {
this.recordKey(initializer, this.messageIndex ? callInfo.callExpression.arguments[this.messageIndex] : undefined);
}
break;
}
}
}
}
}
var messageArg = callInfo.argIndex === this.messageIndex
? callInfo.callExpression.arguments[this.messageIndex]
: null;
if (messageArg && messageArg !== node) {
this.addFailure(this.createFailure(messageArg.getStart(), messageArg.getWidth(), "Message argument to '" + callInfo.callExpression.expression.getText() + "' must be a string literal."));
return;
}
};
NoUnexternalizedStringsRuleWalker.prototype.recordKey = function (keyNode, messageNode) {
var text = keyNode.getText();
var occurrences = this.usedKeys[text];
if (!occurrences) {
occurrences = [];
this.usedKeys[text] = occurrences;
}
if (messageNode) {
if (occurrences.some(function (pair) { return pair.message ? pair.message.getText() === messageNode.getText() : false; })) {
return;
}
}
occurrences.push({ key: keyNode, message: messageNode });
};
NoUnexternalizedStringsRuleWalker.prototype.findDescribingParent = function (node) {
var parent;
while ((parent = node.parent)) {
var kind = parent.kind;
if (kind === ts.SyntaxKind.CallExpression) {
var callExpression = parent;
return { callInfo: { callExpression: callExpression, argIndex: callExpression.arguments.indexOf(node) } };
}
else if (kind === ts.SyntaxKind.ImportEqualsDeclaration || kind === ts.SyntaxKind.ImportDeclaration || kind === ts.SyntaxKind.ExportDeclaration) {
return { isImport: true };
}
else if (kind === ts.SyntaxKind.VariableDeclaration || kind === ts.SyntaxKind.FunctionDeclaration || kind === ts.SyntaxKind.PropertyDeclaration
|| kind === ts.SyntaxKind.MethodDeclaration || kind === ts.SyntaxKind.VariableDeclarationList || kind === ts.SyntaxKind.InterfaceDeclaration
|| kind === ts.SyntaxKind.ClassDeclaration || kind === ts.SyntaxKind.EnumDeclaration || kind === ts.SyntaxKind.ModuleDeclaration
|| kind === ts.SyntaxKind.TypeAliasDeclaration || kind === ts.SyntaxKind.SourceFile) {
return null;
}
node = parent;
}
};
NoUnexternalizedStringsRuleWalker.ImportFailureMessage = 'Do not use double qoutes for imports.';
NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE = '"';
return NoUnexternalizedStringsRuleWalker;
}(Lint.RuleWalker));

View file

@ -45,6 +45,8 @@ interface KeyMessagePair {
class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
private static ImportFailureMessage = 'Do not use double qoutes for imports.';
private static DOUBLE_QUOTE: string = '"';
private signatures: Map<boolean>;
@ -82,10 +84,10 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
protected visitSourceFile(node: ts.SourceFile): void {
super.visitSourceFile(node);
Object.keys(this.usedKeys).forEach(key => {
let occurences = this.usedKeys[key];
if (occurences.length > 1) {
occurences.forEach(occurence => {
this.addFailure((this.createFailure(occurence.key.getStart(), occurence.key.getWidth(), `Duplicate key ${occurence.key.getText()} with different message value.`)));
let occurrences = this.usedKeys[key];
if (occurrences.length > 1) {
occurrences.forEach(occurrence => {
this.addFailure((this.createFailure(occurrence.key.getStart(), occurrence.key.getWidth(), `Duplicate key ${occurrence.key.getText()} with different message value.`)));
});
}
});
@ -101,7 +103,15 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
let doubleQuoted = text.length >= 2 && text[0] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE && text[text.length - 1] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE;
let info = this.findDescribingParent(node);
// Ignore strings in import and export nodes.
if (info && info.ignoreUsage) {
if (info && info.isImport && doubleQuoted) {
this.addFailureAtNode(
node,
NoUnexternalizedStringsRuleWalker.ImportFailureMessage,
new Lint.Fix(NoUnexternalizedStringsRuleWalker.ImportFailureMessage, [
this.createReplacement(node.getStart(), 1, '\''),
this.createReplacement(node.getStart() + text.length - 1, 1, '\''),
])
);
return;
}
let callInfo = info ? info.callInfo : null;
@ -113,7 +123,7 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
if (doubleQuoted && (!callInfo || callInfo.argIndex === -1 || !this.signatures[functionName])) {
const s = node.getText();
const replacement = new Lint.Replacement(node.getStart(), node.getWidth(), `nls.localize('KEY-${s.substring(1, s.length - 1)}', ${s})`);
const fix = new Lint.Fix("Unexternalitzed string", [replacement]);
const fix = new Lint.Fix('Unexternalitzed string', [replacement]);
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Unexternalized string found: ${node.getText()}`, fix));
return;
}
@ -157,20 +167,20 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
private recordKey(keyNode: ts.StringLiteral, messageNode: ts.Node) {
let text = keyNode.getText();
let occurences: KeyMessagePair[] = this.usedKeys[text];
if (!occurences) {
occurences = [];
this.usedKeys[text] = occurences;
let occurrences: KeyMessagePair[] = this.usedKeys[text];
if (!occurrences) {
occurrences = [];
this.usedKeys[text] = occurrences;
}
if (messageNode) {
if (occurences.some(pair => pair.message ? pair.message.getText() === messageNode.getText() : false)) {
if (occurrences.some(pair => pair.message ? pair.message.getText() === messageNode.getText() : false)) {
return;
}
}
occurences.push({ key: keyNode, message: messageNode });
occurrences.push({ key: keyNode, message: messageNode });
}
private findDescribingParent(node: ts.Node): { callInfo?: { callExpression: ts.CallExpression, argIndex: number }, ignoreUsage?: boolean; } {
private findDescribingParent(node: ts.Node): { callInfo?: { callExpression: ts.CallExpression, argIndex: number }, isImport?: boolean; } {
let parent: ts.Node;
while ((parent = node.parent)) {
let kind = parent.kind;
@ -178,7 +188,7 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
let callExpression = parent as ts.CallExpression;
return { callInfo: { callExpression: callExpression, argIndex: callExpression.arguments.indexOf(<any>node) } };
} else if (kind === ts.SyntaxKind.ImportEqualsDeclaration || kind === ts.SyntaxKind.ImportDeclaration || kind === ts.SyntaxKind.ExportDeclaration) {
return { ignoreUsage: true };
return { isImport: true };
} else if (kind === ts.SyntaxKind.VariableDeclaration || kind === ts.SyntaxKind.FunctionDeclaration || kind === ts.SyntaxKind.PropertyDeclaration
|| kind === ts.SyntaxKind.MethodDeclaration || kind === ts.SyntaxKind.VariableDeclarationList || kind === ts.SyntaxKind.InterfaceDeclaration
|| kind === ts.SyntaxKind.ClassDeclaration || kind === ts.SyntaxKind.EnumDeclaration || kind === ts.SyntaxKind.ModuleDeclaration
@ -188,4 +198,4 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
node = parent;
}
}
}
}

View file

@ -0,0 +1,79 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
var Lint = require("tslint");
var fs = require("fs");
var Rule = (function (_super) {
__extends(Rule, _super);
function Rule() {
return _super !== null && _super.apply(this, arguments) || this;
}
Rule.prototype.apply = function (sourceFile) {
return this.applyWithWalker(new TranslationRemindRuleWalker(sourceFile, this.getOptions()));
};
return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule;
var TranslationRemindRuleWalker = (function (_super) {
__extends(TranslationRemindRuleWalker, _super);
function TranslationRemindRuleWalker(file, opts) {
return _super.call(this, file, opts) || this;
}
TranslationRemindRuleWalker.prototype.visitImportDeclaration = function (node) {
var declaration = node.moduleSpecifier.getText();
if (declaration !== "'" + TranslationRemindRuleWalker.NLS_MODULE + "'") {
return;
}
this.visitImportLikeDeclaration(node);
};
TranslationRemindRuleWalker.prototype.visitImportEqualsDeclaration = function (node) {
var reference = node.moduleReference.getText();
if (reference !== "require('" + TranslationRemindRuleWalker.NLS_MODULE + "')") {
return;
}
this.visitImportLikeDeclaration(node);
};
TranslationRemindRuleWalker.prototype.visitImportLikeDeclaration = function (node) {
var currentFile = node.getSourceFile().fileName;
var matchService = currentFile.match(/vs\/workbench\/services\/\w+/);
var matchPart = currentFile.match(/vs\/workbench\/parts\/\w+/);
if (!matchService && !matchPart) {
return;
}
var resource = matchService ? matchService[0] : matchPart[0];
var resourceDefined = false;
var json;
try {
json = fs.readFileSync('./build/lib/i18n.resources.json', 'utf8');
}
catch (e) {
console.error('[translation-remind rule]: File with resources to pull from Transifex was not found. Aborting translation resource check for newly defined workbench part/service.');
return;
}
var workbenchResources = JSON.parse(json).workbench;
workbenchResources.forEach(function (existingResource) {
if (existingResource.name === resource) {
resourceDefined = true;
return;
}
});
if (!resourceDefined) {
this.addFailureAtNode(node, "Please add '" + resource + "' to ./builds/lib/i18n.resources.json file to use translations here.");
}
};
TranslationRemindRuleWalker.NLS_MODULE = 'vs/nls';
return TranslationRemindRuleWalker;
}(Lint.RuleWalker));

View file

@ -0,0 +1,73 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import * as Lint from 'tslint';
import * as fs from 'fs';
export class Rule extends Lint.Rules.AbstractRule {
public apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
return this.applyWithWalker(new TranslationRemindRuleWalker(sourceFile, this.getOptions()));
}
}
class TranslationRemindRuleWalker extends Lint.RuleWalker {
private static NLS_MODULE: string = 'vs/nls';
constructor(file: ts.SourceFile, opts: Lint.IOptions) {
super(file, opts);
}
protected visitImportDeclaration(node: ts.ImportDeclaration): void {
const declaration = node.moduleSpecifier.getText();
if (declaration !== `'${TranslationRemindRuleWalker.NLS_MODULE}'`) {
return;
}
this.visitImportLikeDeclaration(node);
}
protected visitImportEqualsDeclaration(node: ts.ImportEqualsDeclaration): void {
const reference = node.moduleReference.getText();
if (reference !== `require('${TranslationRemindRuleWalker.NLS_MODULE}')`) {
return;
}
this.visitImportLikeDeclaration(node);
}
private visitImportLikeDeclaration(node: ts.ImportDeclaration | ts.ImportEqualsDeclaration) {
const currentFile = node.getSourceFile().fileName;
const matchService = currentFile.match(/vs\/workbench\/services\/\w+/);
const matchPart = currentFile.match(/vs\/workbench\/parts\/\w+/);
if (!matchService && !matchPart) {
return;
}
const resource = matchService ? matchService[0] : matchPart[0];
let resourceDefined = false;
let json;
try {
json = fs.readFileSync('./build/lib/i18n.resources.json', 'utf8');
} catch (e) {
console.error('[translation-remind rule]: File with resources to pull from Transifex was not found. Aborting translation resource check for newly defined workbench part/service.');
return;
}
const workbenchResources = JSON.parse(json).workbench;
workbenchResources.forEach(existingResource => {
if (existingResource.name === resource) {
resourceDefined = true;
return;
}
});
if (!resourceDefined) {
this.addFailureAtNode(node, `Please add '${resource}' to ./builds/lib/i18n.resources.json file to use translations here.`);
}
}
}

View file

@ -1,8 +0,0 @@
{
"compilerOptions": {
"target": "es5",
"lib": [
"es2015"
]
}
}

View file

@ -1,70 +0,0 @@
// ATTENTION - THIS DIRECTORY CONTAINS THIRD PARTY OPEN SOURCE MATERIALS:
[
{
"name": "typescript-legacy",
"version": "1.5",
"license": "Apache2",
"repositoryURL": "https://github.com/Microsoft/TypeScript",
// Reason: LICENSE file does not include Copyright statement
"licenseDetail": [
"Copyright (c) Microsoft Corporation. All rights reserved. ",
"",
"Apache License",
"",
"Version 2.0, January 2004",
"",
"http://www.apache.org/licenses/",
"",
"TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION",
"",
"1. Definitions.",
"",
"\"License\" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.",
"",
"\"Licensor\" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.",
"",
"\"Legal Entity\" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, \"control\" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.",
"",
"\"You\" (or \"Your\") shall mean an individual or Legal Entity exercising permissions granted by this License.",
"",
"\"Source\" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.",
"",
"\"Object\" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.",
"",
"\"Work\" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).",
"",
"\"Derivative Works\" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.",
"",
"\"Contribution\" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, \"submitted\" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as \"Not a Contribution.\"",
"",
"\"Contributor\" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.",
"",
"2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.",
"",
"3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.",
"",
"4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:",
"",
"You must give any other recipients of the Work or Derivative Works a copy of this License; and",
"",
"You must cause any modified files to carry prominent notices stating that You changed the files; and",
"",
"You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and",
"",
"If the Work includes a \"NOTICE\" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.",
"",
"5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.",
"",
"6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.",
"",
"7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.",
"",
"8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.",
"",
"9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.",
"",
"END OF TERMS AND CONDITIONS"
],
"isDev": true
}
]

File diff suppressed because it is too large Load diff

File diff suppressed because one or more lines are too long

View file

@ -1,6 +1,7 @@
declare module "event-stream" {
import { Stream } from 'stream';
import { ThroughStream } from 'through';
import { MapStream } from 'map-stream';
function merge(streams: Stream[]): ThroughStream;
function merge(...streams: Stream[]): ThroughStream;
@ -16,4 +17,5 @@ declare module "event-stream" {
function mapSync<I,O>(cb: (data:I) => O): ThroughStream;
function map<I,O>(cb: (data:I, cb:(err?:Error, data?: O)=>void) => O): ThroughStream;
function readable(asyncFunction: Function): MapStream;
}

View file

@ -1,45 +0,0 @@
// Type definitions for gulp-uglify
// Project: https://github.com/terinjokes/gulp-uglify
// Definitions by: Christopher Haws <https://github.com/ChristopherHaws/>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
declare module "gulp-uglify" {
import * as UglifyJS from 'uglify-js';
namespace GulpUglify {
interface Options {
/**
* Pass false to skip mangling names.
*/
mangle?: boolean;
/**
* Pass if you wish to specify additional output options. The defaults are optimized for best compression.
*/
output?: UglifyJS.BeautifierOptions;
/**
* Pass an object to specify custom compressor options. Pass false to skip compression completely.
*/
compress?: UglifyJS.CompressorOptions | boolean;
/**
* A convenience option for options.output.comments. Defaults to preserving no comments.
* all - Preserve all comments in code blocks
* some - Preserve comments that start with a bang (!) or include a Closure Compiler directive (@preserve, @license, @cc_on)
* function - Specify your own comment preservation function. You will be passed the current node and the current comment and are expected to return either true or false.
*/
preserveComments?: string|((node: any, comment: UglifyJS.Tokenizer) => boolean);
}
class GulpUglifyError {
cause: {
filename: string;
};
}
}
function GulpUglify(options?: GulpUglify.Options): NodeJS.ReadWriteStream;
export = GulpUglify;
}

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -1,428 +0,0 @@
// Type definitions for UglifyJS 2 v2.6.1
// Project: https://github.com/mishoo/UglifyJS2
// Definitions by: Tanguy Krotoff <https://github.com/tkrotoff>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
declare module 'uglify-js' {
import * as MOZ_SourceMap from 'source-map';
namespace UglifyJS {
interface Tokenizer {
/**
* The type of this token.
* Can be "num", "string", "regexp", "operator", "punc", "atom", "name", "keyword", "comment1" or "comment2".
* "comment1" and "comment2" are for single-line, respectively multi-line comments.
*/
type: string;
/**
* The name of the file where this token originated from. Useful when compressing multiple files at once to generate the proper source map.
*/
file: string;
/**
* The "value" of the token.
* That's additional information and depends on the token type: "num", "string" and "regexp" tokens you get their literal value.
* - For "operator" you get the operator.
* - For "punc" it's the punctuation sign (parens, comma, semicolon etc).
* - For "atom", "name" and "keyword" it's the name of the identifier
* - For comments it's the body of the comment (excluding the initial "//" and "/*".
*/
value: string;
/**
* The line number of this token in the original code.
* 1-based index.
*/
line: number;
/**
* The column number of this token in the original code.
* 0-based index.
*/
col: number;
/**
* Short for "newline before", it's a boolean that tells us whether there was a newline before this node in the original source. It helps for automatic semicolon insertion.
* For multi-line comments in particular this will be set to true if there either was a newline before this comment, or * * if this comment contains a newline.
*/
nlb: boolean;
/**
* This doesn't apply for comment tokens, but for all other token types it will be an array of comment tokens that were found before.
*/
comments_before: string[];
}
interface AST_Node {
// The first token of this node
start: AST_Node;
// The last token of this node
end: AST_Node;
transform(tt: TreeTransformer): AST_Toplevel;
}
interface AST_Toplevel extends AST_Node {
// UglifyJS contains a scope analyzer which figures out variable/function definitions, references etc.
// You need to call it manually before compression or mangling.
// The figure_out_scope method is defined only on the AST_Toplevel node.
figure_out_scope(): void;
// Get names that are optimized for GZip compression (names will be generated using the most frequent characters first)
compute_char_frequency(): void;
mangle_names(): void;
print(stream: OutputStream): void;
print_to_string(options?: BeautifierOptions): string;
}
interface MinifyOptions {
spidermonkey?: boolean;
outSourceMap?: string;
sourceRoot?: string;
inSourceMap?: string;
fromString?: boolean;
warnings?: boolean;
mangle?: Object;
output?: MinifyOutput,
compress?: Object;
}
interface MinifyOutput {
code: string;
map: string;
}
function minify(files: string | Array<string>, options?: MinifyOptions): MinifyOutput;
interface ParseOptions {
// Default is false
strict?: boolean;
// Input file name, default is null
filename?: string;
// Default is null
toplevel?: AST_Toplevel;
}
/**
* The parser creates a custom abstract syntax tree given a piece of JavaScript code.
* Perhaps you should read about the AST first.
*/
function parse(code: string, options?: ParseOptions): AST_Toplevel;
interface BeautifierOptions {
/**
* Start indentation on every line (only when `beautify`)
*/
indent_start?: number;
/**
* Indentation level (only when `beautify`)
*/
indent_level?: number;
/**
* Quote all keys in object literals?
*/
quote_keys?: boolean;
/**
* Add a space after colon signs?
*/
space_colon?: boolean;
/**
* Output ASCII-safe? (encodes Unicode characters as ASCII)
*/
ascii_only?: boolean;
/**
* Escape "</script"?
*/
inline_script?: boolean;
/**
* Informative maximum line width (for beautified output)
*/
width?: number;
/**
* Maximum line length (for non-beautified output)
*/
max_line_len?: number;
/**
* Output IE-safe code?
*/
ie_proof?: boolean;
/**
* Beautify output?
*/
beautify?: boolean;
/**
* Output a source map
*/
source_map?: SourceMapOptions;
/**
* Use brackets every time?
*/
bracketize?: boolean;
/**
* Output comments?
*/
comments?: boolean;
/**
* Use semicolons to separate statements? (otherwise, newlines)
*/
semicolons?: boolean;
}
interface OutputStream {
// Return the output so far as a string
get(): string;
toString(): string;
// Insert one indentation string (usually 4 characters).
// Optionally pass true to indent half the width (I'm using that for case and default lines in switch blocks.
// If beautify is off, this function does nothing.
indent(half?: boolean): void;
// Return the current indentation width (not level; for example if we're in level 2 and indent_level is 4, this method would return 8.
indentation(): number;
// return the width of the current line text minus indentation.
current_width(): number
// Return true if current_width() is bigger than options.width (assuming options.width is non-null, non-zero).
should_break(): boolean;
// If beautification is on, this inserts a newline. Otherwise it does nothing.
newline(): void;
// Include the given string into the output, adjusting current_line, current_col and current_pos accordingly.
print(str: string): void;
// If beautification is on this always includes a space character.
// Otherwise it saves a hint somewhere that a space might be needed at current point.
// The space will go in at the next output but only when absolutely required, for example it will insert the space in return 10 but not in return"stuff".
space(): void;
// Inserts a comma, and calls space() — that is, if beautification is on you'll get a space after the comma.
comma(): void;
// Inserts a colon, and calls space() if options.space_colon is set.
colon(): void;
// Returns the last printed chunk.
last(): string;
// If beautification is on it always inserts a semicolon.
// Otherwise it saves a hint that a semicolon might be needed at current point.
// The semicolon is inserted when the next output comes in, only if required to not break the JS syntax.
semicolon(): void;
// Always inserts a semicolon and clears the hint that a semicolon might be needed.
force_semicolon(): void;
// Encodes any non-ASCII characters in string with JavaScript's conventions (using \uCODE).
to_ascii(str: string): void;
// Prints an identifier. If options.ascii_only is set, non-ASCII chars will be encoded with JavaScript conventions.
print_name(name: string): void;
// Prints a string. It adds quotes automatically.
// It prefers double-quotes, but will actually count any quotes in the string and will use single-quotes if the output proves to be shorter (depending on how many backslashes it has to insert).
// It encodes to ASCII if options.ascii_only is set.
print_string(str: string): void;
// Returns the width of the next indentation level. For example if current level is 2 and options.indent_level is 4, it'll return 12.
next_indent(): number;
// Sets the current indentation to col (column), calls the function and thereafter restores the previous indentation level.
// If beautification is off it simply calls func.
with_indent(col: number, func: Function): void;
// This is used to output blocks in curly brackets.
// It'll print an open bracket at current point, then call newline() and with the next indentation level it calls your func.
// Lastly, it'll print an indented closing bracket. As usual, if beautification is off you'll just get {x} where x is whatever func outputs.
with_block(func: Function): void;
// Adds parens around the output that your function prints.
with_parens(func: Function): void;
// Adds square brackets around the output that your function prints.
with_square(func: Function): void;
// If options.source_map is set, this will generate a source mapping between the given token (which should be an AST_Token-like object) and the current line/col.
// The name is optional; in most cases it will be inferred from the token.
add_mapping(token: AST_Node, name?: string): void;
// Returns the option with the given name.
option(name: string): any;
// Returns the current line in the output (1-based).
line(): number;
// Returns the current column in the output (zero-based).
col(): number;
// Push the given node into an internal stack. This is used to keep track of current node's parent(s).
push_node(node: AST_Node): void;
// Pops the top of the stack and returns it.
pop_node(): AST_Node;
// Returns that internal stack.
stack(): any;
// Returns the n-th parent node (where zero means the direct parent).
parent(n: number): AST_Node;
}
/**
* The code generator is a recursive process of getting back source code from an AST returned by the parser.
* Every AST node has a print method that takes an OutputStream and dumps the code from that node into it.
* The stream object supports a lot of options that control the output.
* You can specify whether you'd like to get human-readable (indented) output, the indentation level, whether you'd like to quote all properties in object literals etc.
*/
function OutputStream(options?: BeautifierOptions): OutputStream;
interface SourceMapOptions {
/**
* The compressed file name
*/
file?: string;
/**
* The root URL to the original sources
*/
root?: string;
/**
* The input source map.
* Useful when you compress code that was generated from some other source (possibly other programming language).
* If you have an input source map, pass it in this argument and UglifyJS will generate a mapping that maps back
* to the original source (as opposed to the compiled code that you are compressing).
*/
orig?: Object | JSON;
}
interface SourceMap {
add(source: string, gen_line: number, gen_col: number, orig_line: number, orig_col: number, name?: string): void;
get(): MOZ_SourceMap.SourceMapGenerator;
toString(): string;
}
/**
* The output stream keeps track of the current line/column in the output and can trivially generate a source mapping to the original code via Mozilla's source-map library.
* To use this functionality, you must load this library (it's automatically require-d by UglifyJS in the NodeJS version, but in a browser you must load it yourself)
* and make it available via the global MOZ_SourceMap variable.
*/
function SourceMap(options?: SourceMapOptions): SourceMap;
interface CompressorOptions {
// Join consecutive statemets with the “comma operator”
sequences?: boolean;
// Optimize property access: a["foo"] → a.foo
properties?: boolean;
// Discard unreachable code
dead_code?: boolean;
// Discard “debugger” statements
drop_debugger?: boolean;
// Some unsafe optimizations (see below)
unsafe?: boolean;
// Optimize if-s and conditional expressions
conditionals?: boolean;
// Optimize comparisons
comparisons?: boolean;
// Evaluate constant expressions
evaluate?: boolean;
// Optimize boolean expressions
booleans?: boolean;
// Optimize loops
loops?: boolean;
// Drop unused variables/functions
unused?: boolean;
// Hoist function declarations
hoist_funs?: boolean;
// Hoist variable declarations
hoist_vars?: boolean;
// Optimize if-s followed by return/continue
if_return?: boolean;
// Join var declarations
join_vars?: boolean;
// Try to cascade `right` into `left` in sequences
cascade?: boolean;
// Drop side-effect-free statements
side_effects?: boolean;
// Warn about potentially dangerous optimizations/code
warnings?: boolean;
// Global definitions
global_defs?: Object;
}
/**
* The compressor is a tree transformer which reduces the code size by applying various optimizations on the AST
*/
function Compressor(options?: CompressorOptions): AST_Toplevel;
// TODO
interface TreeWalker {
}
type visitor = (node: AST_Node, descend: Function) => boolean;
/**
* UglifyJS provides a TreeWalker object and every node has a walk method that given a walker will apply your visitor to each node in the tree.
* Your visitor can return a non-falsy value in order to prevent descending the current node.
*/
function TreeWalker(visitor: visitor): TreeWalker;
// TODO
interface TreeTransformer extends TreeWalker {
}
/**
* The tree transformer is a special case of a tree walker.
* In fact it even inherits from TreeWalker and you can use the same methods, but initialization and visitor protocol are a bit different.
*/
function TreeTransformer(before: visitor, after: visitor): TreeTransformer;
}
export = UglifyJS;
}

View file

@ -1,210 +1,213 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
var es = require("event-stream");
var debounce = require("debounce");
var _filter = require("gulp-filter");
var rename = require("gulp-rename");
var _ = require("underscore");
var path = require("path");
var fs = require("fs");
var _rimraf = require("rimraf");
var git = require("./git");
var VinylFile = require("vinyl");
var NoCancellationToken = { isCancellationRequested: function () { return false; } };
function incremental(streamProvider, initial, supportsCancellation) {
var input = es.through();
var output = es.through();
var state = 'idle';
var buffer = Object.create(null);
var token = !supportsCancellation ? null : { isCancellationRequested: function () { return Object.keys(buffer).length > 0; } };
var run = function (input, isCancellable) {
state = 'running';
var stream = !supportsCancellation ? streamProvider() : streamProvider(isCancellable ? token : NoCancellationToken);
input
.pipe(stream)
.pipe(es.through(null, function () {
state = 'idle';
eventuallyRun();
}))
.pipe(output);
};
if (initial) {
run(initial, false);
}
var eventuallyRun = debounce(function () {
var paths = Object.keys(buffer);
if (paths.length === 0) {
return;
}
var data = paths.map(function (path) { return buffer[path]; });
buffer = Object.create(null);
run(es.readArray(data), true);
}, 500);
input.on('data', function (f) {
buffer[f.path] = f;
if (state === 'idle') {
eventuallyRun();
}
});
return es.duplex(input, output);
}
exports.incremental = incremental;
function fixWin32DirectoryPermissions() {
if (!/win32/.test(process.platform)) {
return es.through();
}
return es.mapSync(function (f) {
if (f.stat && f.stat.isDirectory && f.stat.isDirectory()) {
f.stat.mode = 16877;
}
return f;
});
}
exports.fixWin32DirectoryPermissions = fixWin32DirectoryPermissions;
function setExecutableBit(pattern) {
var setBit = es.mapSync(function (f) {
f.stat.mode = 33261;
return f;
});
if (!pattern) {
return setBit;
}
var input = es.through();
var filter = _filter(pattern, { restore: true });
var output = input
.pipe(filter)
.pipe(setBit)
.pipe(filter.restore);
return es.duplex(input, output);
}
exports.setExecutableBit = setExecutableBit;
function toFileUri(filePath) {
var match = filePath.match(/^([a-z])\:(.*)$/i);
if (match) {
filePath = '/' + match[1].toUpperCase() + ':' + match[2];
}
return 'file://' + filePath.replace(/\\/g, '/');
}
exports.toFileUri = toFileUri;
function skipDirectories() {
return es.mapSync(function (f) {
if (!f.isDirectory()) {
return f;
}
});
}
exports.skipDirectories = skipDirectories;
function cleanNodeModule(name, excludes, includes) {
var toGlob = function (path) { return '**/node_modules/' + name + (path ? '/' + path : ''); };
var negate = function (str) { return '!' + str; };
var allFilter = _filter(toGlob('**'), { restore: true });
var globs = [toGlob('**')].concat(excludes.map(_.compose(negate, toGlob)));
var input = es.through();
var nodeModuleInput = input.pipe(allFilter);
var output = nodeModuleInput.pipe(_filter(globs));
if (includes) {
var includeGlobs = includes.map(toGlob);
output = es.merge(output, nodeModuleInput.pipe(_filter(includeGlobs)));
}
output = output.pipe(allFilter.restore);
return es.duplex(input, output);
}
exports.cleanNodeModule = cleanNodeModule;
function loadSourcemaps() {
var input = es.through();
var output = input
.pipe(es.map(function (f, cb) {
if (f.sourceMap) {
cb(null, f);
return;
}
if (!f.contents) {
cb(new Error('empty file'));
return;
}
var contents = f.contents.toString('utf8');
var reg = /\/\/# sourceMappingURL=(.*)$/g;
var lastMatch = null, match = null;
while (match = reg.exec(contents)) {
lastMatch = match;
}
if (!lastMatch) {
f.sourceMap = {
version: 3,
names: [],
mappings: '',
sources: [f.relative.replace(/\//g, '/')],
sourcesContent: [contents]
};
cb(null, f);
return;
}
f.contents = new Buffer(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8');
fs.readFile(path.join(path.dirname(f.path), lastMatch[1]), 'utf8', function (err, contents) {
if (err) {
return cb(err);
}
f.sourceMap = JSON.parse(contents);
cb(null, f);
});
}));
return es.duplex(input, output);
}
exports.loadSourcemaps = loadSourcemaps;
function stripSourceMappingURL() {
var input = es.through();
var output = input
.pipe(es.mapSync(function (f) {
var contents = f.contents.toString('utf8');
f.contents = new Buffer(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8');
return f;
}));
return es.duplex(input, output);
}
exports.stripSourceMappingURL = stripSourceMappingURL;
function rimraf(dir) {
var retries = 0;
var retry = function (cb) {
_rimraf(dir, { maxBusyTries: 1 }, function (err) {
if (!err)
return cb();
if (err.code === 'ENOTEMPTY' && ++retries < 5)
return setTimeout(function () { return retry(cb); }, 10);
else
return cb(err);
});
};
return function (cb) { return retry(cb); };
}
exports.rimraf = rimraf;
function getVersion(root) {
var version = process.env['BUILD_SOURCEVERSION'];
if (!version || !/^[0-9a-f]{40}$/i.test(version)) {
version = git.getVersion(root);
}
return version;
}
exports.getVersion = getVersion;
function rebase(count) {
return rename(function (f) {
var parts = f.dirname.split(/[\/\\]/);
f.dirname = parts.slice(count).join(path.sep);
});
}
exports.rebase = rebase;
function filter(fn) {
var result = es.through(function (data) {
if (fn(data)) {
this.emit('data', data);
}
else {
result.restore.push(data);
}
});
result.restore = es.through();
return result;
}
exports.filter = filter;
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
var es = require("event-stream");
var debounce = require("debounce");
var _filter = require("gulp-filter");
var rename = require("gulp-rename");
var _ = require("underscore");
var path = require("path");
var fs = require("fs");
var _rimraf = require("rimraf");
var git = require("./git");
var VinylFile = require("vinyl");
var NoCancellationToken = { isCancellationRequested: function () { return false; } };
function incremental(streamProvider, initial, supportsCancellation) {
var input = es.through();
var output = es.through();
var state = 'idle';
var buffer = Object.create(null);
var token = !supportsCancellation ? null : { isCancellationRequested: function () { return Object.keys(buffer).length > 0; } };
var run = function (input, isCancellable) {
state = 'running';
var stream = !supportsCancellation ? streamProvider() : streamProvider(isCancellable ? token : NoCancellationToken);
input
.pipe(stream)
.pipe(es.through(null, function () {
state = 'idle';
eventuallyRun();
}))
.pipe(output);
};
if (initial) {
run(initial, false);
}
var eventuallyRun = debounce(function () {
var paths = Object.keys(buffer);
if (paths.length === 0) {
return;
}
var data = paths.map(function (path) { return buffer[path]; });
buffer = Object.create(null);
run(es.readArray(data), true);
}, 500);
input.on('data', function (f) {
buffer[f.path] = f;
if (state === 'idle') {
eventuallyRun();
}
});
return es.duplex(input, output);
}
exports.incremental = incremental;
function fixWin32DirectoryPermissions() {
if (!/win32/.test(process.platform)) {
return es.through();
}
return es.mapSync(function (f) {
if (f.stat && f.stat.isDirectory && f.stat.isDirectory()) {
f.stat.mode = 16877;
}
return f;
});
}
exports.fixWin32DirectoryPermissions = fixWin32DirectoryPermissions;
function setExecutableBit(pattern) {
var setBit = es.mapSync(function (f) {
f.stat.mode = 33261;
return f;
});
if (!pattern) {
return setBit;
}
var input = es.through();
var filter = _filter(pattern, { restore: true });
var output = input
.pipe(filter)
.pipe(setBit)
.pipe(filter.restore);
return es.duplex(input, output);
}
exports.setExecutableBit = setExecutableBit;
function toFileUri(filePath) {
var match = filePath.match(/^([a-z])\:(.*)$/i);
if (match) {
filePath = '/' + match[1].toUpperCase() + ':' + match[2];
}
return 'file://' + filePath.replace(/\\/g, '/');
}
exports.toFileUri = toFileUri;
function skipDirectories() {
return es.mapSync(function (f) {
if (!f.isDirectory()) {
return f;
}
});
}
exports.skipDirectories = skipDirectories;
function cleanNodeModule(name, excludes, includes) {
var toGlob = function (path) { return '**/node_modules/' + name + (path ? '/' + path : ''); };
var negate = function (str) { return '!' + str; };
var allFilter = _filter(toGlob('**'), { restore: true });
var globs = [toGlob('**')].concat(excludes.map(_.compose(negate, toGlob)));
var input = es.through();
var nodeModuleInput = input.pipe(allFilter);
var output = nodeModuleInput.pipe(_filter(globs));
if (includes) {
var includeGlobs = includes.map(toGlob);
output = es.merge(output, nodeModuleInput.pipe(_filter(includeGlobs)));
}
output = output.pipe(allFilter.restore);
return es.duplex(input, output);
}
exports.cleanNodeModule = cleanNodeModule;
function loadSourcemaps() {
var input = es.through();
var output = input
.pipe(es.map(function (f, cb) {
if (f.sourceMap) {
cb(null, f);
return;
}
if (!f.contents) {
cb(new Error('empty file'));
return;
}
var contents = f.contents.toString('utf8');
var reg = /\/\/# sourceMappingURL=(.*)$/g;
var lastMatch = null, match = null;
while (match = reg.exec(contents)) {
lastMatch = match;
}
if (!lastMatch) {
f.sourceMap = {
version: 3,
names: [],
mappings: '',
sources: [f.relative.replace(/\//g, '/')],
sourcesContent: [contents]
};
cb(null, f);
return;
}
f.contents = new Buffer(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8');
fs.readFile(path.join(path.dirname(f.path), lastMatch[1]), 'utf8', function (err, contents) {
if (err) {
return cb(err);
}
f.sourceMap = JSON.parse(contents);
cb(null, f);
});
}));
return es.duplex(input, output);
}
exports.loadSourcemaps = loadSourcemaps;
function stripSourceMappingURL() {
var input = es.through();
var output = input
.pipe(es.mapSync(function (f) {
var contents = f.contents.toString('utf8');
f.contents = new Buffer(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8');
return f;
}));
return es.duplex(input, output);
}
exports.stripSourceMappingURL = stripSourceMappingURL;
function rimraf(dir) {
var retries = 0;
var retry = function (cb) {
_rimraf(dir, { maxBusyTries: 1 }, function (err) {
if (!err) {
return cb();
}
;
if (err.code === 'ENOTEMPTY' && ++retries < 5) {
return setTimeout(function () { return retry(cb); }, 10);
}
return cb(err);
});
};
return function (cb) { return retry(cb); };
}
exports.rimraf = rimraf;
function getVersion(root) {
var version = process.env['BUILD_SOURCEVERSION'];
if (!version || !/^[0-9a-f]{40}$/i.test(version)) {
version = git.getVersion(root);
}
return version;
}
exports.getVersion = getVersion;
function rebase(count) {
return rename(function (f) {
var parts = f.dirname.split(/[\/\\]/);
f.dirname = parts.slice(count).join(path.sep);
});
}
exports.rebase = rebase;
function filter(fn) {
var result = es.through(function (data) {
if (fn(data)) {
this.emit('data', data);
}
else {
result.restore.push(data);
}
});
result.restore = es.through();
return result;
}
exports.filter = filter;

View file

@ -22,19 +22,19 @@ export interface ICancellationToken {
isCancellationRequested(): boolean;
}
const NoCancellationToken:ICancellationToken = { isCancellationRequested: () => false };
const NoCancellationToken: ICancellationToken = { isCancellationRequested: () => false };
export interface IStreamProvider {
(cancellationToken?:ICancellationToken): NodeJS.ReadWriteStream;
(cancellationToken?: ICancellationToken): NodeJS.ReadWriteStream;
}
export function incremental(streamProvider:IStreamProvider, initial:NodeJS.ReadWriteStream, supportsCancellation:boolean): NodeJS.ReadWriteStream {
export function incremental(streamProvider: IStreamProvider, initial: NodeJS.ReadWriteStream, supportsCancellation: boolean): NodeJS.ReadWriteStream {
const input = es.through();
const output = es.through();
let state = 'idle';
let buffer = Object.create(null);
const token:ICancellationToken = !supportsCancellation ? null : { isCancellationRequested: () => Object.keys(buffer).length > 0 };
const token: ICancellationToken = !supportsCancellation ? null : { isCancellationRequested: () => Object.keys(buffer).length > 0 };
const run = (input, isCancellable) => {
state = 'running';
@ -66,7 +66,7 @@ export function incremental(streamProvider:IStreamProvider, initial:NodeJS.ReadW
run(es.readArray(data), true);
}, 500);
input.on('data', f => {
input.on('data', (f: any) => {
buffer[f.path] = f;
if (state === 'idle') {
@ -82,7 +82,7 @@ export function fixWin32DirectoryPermissions(): NodeJS.ReadWriteStream {
return es.through();
}
return es.mapSync<VinylFile,VinylFile>(f => {
return es.mapSync<VinylFile, VinylFile>(f => {
if (f.stat && f.stat.isDirectory && f.stat.isDirectory()) {
f.stat.mode = 16877;
}
@ -92,7 +92,7 @@ export function fixWin32DirectoryPermissions(): NodeJS.ReadWriteStream {
}
export function setExecutableBit(pattern: string | string[]): NodeJS.ReadWriteStream {
var setBit = es.mapSync<VinylFile,VinylFile>(f => {
var setBit = es.mapSync<VinylFile, VinylFile>(f => {
f.stat.mode = /* 100755 */ 33261;
return f;
});
@ -111,7 +111,7 @@ export function setExecutableBit(pattern: string | string[]): NodeJS.ReadWriteSt
return es.duplex(input, output);
}
export function toFileUri(filePath:string): string {
export function toFileUri(filePath: string): string {
const match = filePath.match(/^([a-z])\:(.*)$/i);
if (match) {
@ -122,23 +122,23 @@ export function toFileUri(filePath:string): string {
}
export function skipDirectories(): NodeJS.ReadWriteStream {
return es.mapSync<VinylFile,VinylFile>(f => {
return es.mapSync<VinylFile, VinylFile>(f => {
if (!f.isDirectory()) {
return f;
}
});
}
export function cleanNodeModule(name:string, excludes:string[], includes:string[]): NodeJS.ReadWriteStream {
const toGlob = (path:string) => '**/node_modules/' + name + (path ? '/' + path : '');
const negate = (str:string) => '!' + str;
export function cleanNodeModule(name: string, excludes: string[], includes: string[]): NodeJS.ReadWriteStream {
const toGlob = (path: string) => '**/node_modules/' + name + (path ? '/' + path : '');
const negate = (str: string) => '!' + str;
const allFilter = _filter(toGlob('**'), { restore: true });
const globs = [toGlob('**')].concat(excludes.map(_.compose(negate, toGlob)));
const input = es.through();
const nodeModuleInput = input.pipe(allFilter);
let output:NodeJS.ReadWriteStream = nodeModuleInput.pipe(_filter(globs));
let output: NodeJS.ReadWriteStream = nodeModuleInput.pipe(_filter(globs));
if (includes) {
const includeGlobs = includes.map(toGlob);
@ -157,7 +157,7 @@ export function loadSourcemaps(): NodeJS.ReadWriteStream {
const input = es.through();
const output = input
.pipe(es.map<FileSourceMap,FileSourceMap>((f, cb): FileSourceMap => {
.pipe(es.map<FileSourceMap, FileSourceMap>((f, cb): FileSourceMap => {
if (f.sourceMap) {
cb(null, f);
return;
@ -207,7 +207,7 @@ export function stripSourceMappingURL(): NodeJS.ReadWriteStream {
const input = es.through();
const output = input
.pipe(es.mapSync<VinylFile,VinylFile>(f => {
.pipe(es.mapSync<VinylFile, VinylFile>(f => {
const contents = (<Buffer>f.contents).toString('utf8');
f.contents = new Buffer(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8');
return f;
@ -216,21 +216,27 @@ export function stripSourceMappingURL(): NodeJS.ReadWriteStream {
return es.duplex(input, output);
}
export function rimraf(dir:string):(cb:any)=>void {
export function rimraf(dir: string): (cb: any) => void {
let retries = 0;
const retry = cb => {
_rimraf(dir, { maxBusyTries: 1 }, (err:any) => {
if (!err) return cb();
if (err.code === 'ENOTEMPTY' && ++retries < 5) return setTimeout(() => retry(cb), 10);
else return cb(err);
_rimraf(dir, { maxBusyTries: 1 }, (err: any) => {
if (!err) {
return cb();
};
if (err.code === 'ENOTEMPTY' && ++retries < 5) {
return setTimeout(() => retry(cb), 10);
}
return cb(err);
});
};
return cb => retry(cb);
}
export function getVersion(root:string): string {
export function getVersion(root: string): string {
let version = process.env['BUILD_SOURCEVERSION'];
if (!version || !/^[0-9a-f]{40}$/i.test(version)) {
@ -240,7 +246,7 @@ export function getVersion(root:string): string {
return version;
}
export function rebase(count:number): NodeJS.ReadWriteStream {
export function rebase(count: number): NodeJS.ReadWriteStream {
return rename(f => {
const parts = f.dirname.split(/[\/\\]/);
f.dirname = parts.slice(count).join(path.sep);
@ -251,7 +257,7 @@ export interface FilterStream extends NodeJS.ReadWriteStream {
restore: ThroughStream;
}
export function filter(fn:(data:any)=>boolean):FilterStream {
export function filter(fn: (data: any) => boolean): FilterStream {
const result = <FilterStream><any>es.through(function (data) {
if (fn(data)) {
this.emit('data', data);

View file

@ -17,9 +17,20 @@ function handleDeletions() {
});
}
const watch = process.platform === 'win32'
? require('./watch-win32')
: require('gulp-watch');
let watch = void 0;
if (!process.env['VSCODE_USE_LEGACY_WATCH']) {
try {
watch = require('./watch-nsfw');
} catch (err) {
console.warn('Could not load our cross platform file watcher: ' + err.toString());
console.warn('Falling back to our platform specific watcher...');
}
}
if (!watch) {
watch = process.platform === 'win32' ? require('./watch-win32') : require('gulp-watch');
}
module.exports = function () {
return watch.apply(null, arguments)

View file

@ -5,6 +5,7 @@
"author": "Microsoft ",
"private": true,
"devDependencies": {
"gulp-watch": "^4.3.9"
"gulp-watch": "^4.3.9",
"nsfw": "^1.0.15"
}
}

View file

@ -0,0 +1,94 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var nsfw = require('nsfw');
var path = require('path');
var fs = require('fs');
var File = require('vinyl');
var es = require('event-stream');
var filter = require('gulp-filter');
function toChangeType(type) {
switch (type) {
case 0: return 'add';
case 1: return 'unlink';
case 2: return 'change';
}
}
function watch(root) {
var result = es.through();
function handleEvent(path, type) {
if (/[/\\].git[/\\]/.test(path) || /[/\\]out[/\\]/.test(path)) {
return; // filter as early as possible
}
var file = new File({
path: path,
base: root
});
file.event = type;
result.emit('data', file);
}
nsfw(root, function(events) {
for (var i = 0; i < events.length; i++) {
var e = events[i];
var changeType = e.action;
if (changeType === 3 /* RENAMED */) {
handleEvent(path.join(e.directory, e.oldFile), 'unlink');
handleEvent(path.join(e.directory, e.newFile), 'add');
} else {
handleEvent(path.join(e.directory, e.file), toChangeType(changeType));
}
}
}).then(function(watcher) {
watcher.start();
});
return result;
}
var cache = Object.create(null);
module.exports = function(pattern, options) {
options = options || {};
var cwd = path.normalize(options.cwd || process.cwd());
var watcher = cache[cwd];
if (!watcher) {
watcher = cache[cwd] = watch(cwd);
}
var rebase = !options.base ? es.through() : es.mapSync(function(f) {
f.base = options.base;
return f;
});
return watcher
.pipe(filter(['**', '!.git{,/**}'])) // ignore all things git
.pipe(filter(pattern))
.pipe(es.map(function(file, cb) {
fs.stat(file.path, function(err, stat) {
if (err && err.code === 'ENOENT') { return cb(null, file); }
if (err) { return cb(); }
if (!stat.isFile()) { return cb(); }
fs.readFile(file.path, function(err, contents) {
if (err && err.code === 'ENOENT') { return cb(null, file); }
if (err) { return cb(); }
file.contents = contents;
file.stat = stat;
cb(null, file);
});
});
}))
.pipe(rebase);
};

View file

@ -1,346 +1,327 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
"use strict";
var fs = require("fs");
var ts = require("typescript");
var path = require("path");
var util = require('gulp-util');
function log(message) {
var rest = [];
for (var _i = 1; _i < arguments.length; _i++) {
rest[_i - 1] = arguments[_i];
}
util.log.apply(util, [util.colors.cyan('[monaco.d.ts]'), message].concat(rest));
}
var SRC = path.join(__dirname, '../../src');
var OUT_ROOT = path.join(__dirname, '../../');
var RECIPE_PATH = path.join(__dirname, './monaco.d.ts.recipe');
var DECLARATION_PATH = path.join(__dirname, '../../src/vs/monaco.d.ts');
var CURRENT_PROCESSING_RULE = '';
function logErr(message) {
var rest = [];
for (var _i = 1; _i < arguments.length; _i++) {
rest[_i - 1] = arguments[_i];
}
util.log(util.colors.red('[monaco.d.ts]'), 'WHILE HANDLING RULE: ', CURRENT_PROCESSING_RULE);
util.log.apply(util, [util.colors.red('[monaco.d.ts]'), message].concat(rest));
}
function moduleIdToPath(out, moduleId) {
if (/\.d\.ts/.test(moduleId)) {
return path.join(SRC, moduleId);
}
return path.join(OUT_ROOT, out, moduleId) + '.d.ts';
}
var SOURCE_FILE_MAP = {};
function getSourceFile(out, moduleId) {
if (!SOURCE_FILE_MAP[moduleId]) {
var filePath = moduleIdToPath(out, moduleId);
var fileContents = void 0;
try {
fileContents = fs.readFileSync(filePath).toString();
}
catch (err) {
logErr('CANNOT FIND FILE ' + filePath);
return null;
}
var sourceFile = ts.createSourceFile(filePath, fileContents, ts.ScriptTarget.ES5);
SOURCE_FILE_MAP[moduleId] = sourceFile;
}
return SOURCE_FILE_MAP[moduleId];
}
function isDeclaration(a) {
return (a.kind === ts.SyntaxKind.InterfaceDeclaration
|| a.kind === ts.SyntaxKind.EnumDeclaration
|| a.kind === ts.SyntaxKind.ClassDeclaration
|| a.kind === ts.SyntaxKind.TypeAliasDeclaration
|| a.kind === ts.SyntaxKind.FunctionDeclaration
|| a.kind === ts.SyntaxKind.ModuleDeclaration);
}
function visitTopLevelDeclarations(sourceFile, visitor) {
var stop = false;
var visit = function (node) {
if (stop) {
return;
}
switch (node.kind) {
case ts.SyntaxKind.InterfaceDeclaration:
case ts.SyntaxKind.EnumDeclaration:
case ts.SyntaxKind.ClassDeclaration:
case ts.SyntaxKind.VariableStatement:
case ts.SyntaxKind.TypeAliasDeclaration:
case ts.SyntaxKind.FunctionDeclaration:
case ts.SyntaxKind.ModuleDeclaration:
stop = visitor(node);
}
// if (node.kind !== ts.SyntaxKind.SourceFile) {
// if (getNodeText(sourceFile, node).indexOf('SymbolKind') >= 0) {
// console.log('FOUND TEXT IN NODE: ' + ts.SyntaxKind[node.kind]);
// console.log(getNodeText(sourceFile, node));
// }
// }
if (stop) {
return;
}
ts.forEachChild(node, visit);
};
visit(sourceFile);
}
function getAllTopLevelDeclarations(sourceFile) {
var all = [];
visitTopLevelDeclarations(sourceFile, function (node) {
if (node.kind === ts.SyntaxKind.InterfaceDeclaration || node.kind === ts.SyntaxKind.ClassDeclaration || node.kind === ts.SyntaxKind.ModuleDeclaration) {
var interfaceDeclaration = node;
var triviaStart = interfaceDeclaration.pos;
var triviaEnd = interfaceDeclaration.name.pos;
var triviaText = getNodeText(sourceFile, { pos: triviaStart, end: triviaEnd });
// // let nodeText = getNodeText(sourceFile, node);
// if (getNodeText(sourceFile, node).indexOf('SymbolKind') >= 0) {
// console.log('TRIVIA: ', triviaText);
// }
if (triviaText.indexOf('@internal') === -1) {
all.push(node);
}
}
else {
var nodeText = getNodeText(sourceFile, node);
if (nodeText.indexOf('@internal') === -1) {
all.push(node);
}
}
return false /*continue*/;
});
return all;
}
function getTopLevelDeclaration(sourceFile, typeName) {
var result = null;
visitTopLevelDeclarations(sourceFile, function (node) {
if (isDeclaration(node)) {
if (node.name.text === typeName) {
result = node;
return true /*stop*/;
}
return false /*continue*/;
}
// node is ts.VariableStatement
if (getNodeText(sourceFile, node).indexOf(typeName) >= 0) {
result = node;
return true /*stop*/;
}
return false /*continue*/;
});
return result;
}
function getNodeText(sourceFile, node) {
return sourceFile.getFullText().substring(node.pos, node.end);
}
function getMassagedTopLevelDeclarationText(sourceFile, declaration) {
var result = getNodeText(sourceFile, declaration);
// if (result.indexOf('MonacoWorker') >= 0) {
// console.log('here!');
// // console.log(ts.SyntaxKind[declaration.kind]);
// }
if (declaration.kind === ts.SyntaxKind.InterfaceDeclaration || declaration.kind === ts.SyntaxKind.ClassDeclaration) {
var interfaceDeclaration = declaration;
var members = interfaceDeclaration.members;
members.forEach(function (member) {
try {
var memberText = getNodeText(sourceFile, member);
if (memberText.indexOf('@internal') >= 0 || memberText.indexOf('private') >= 0) {
// console.log('BEFORE: ', result);
result = result.replace(memberText, '');
}
}
catch (err) {
}
});
}
result = result.replace(/export default/g, 'export');
result = result.replace(/export declare/g, 'export');
return result;
}
function format(text) {
var options = getDefaultOptions();
// Parse the source text
var sourceFile = ts.createSourceFile('file.ts', text, ts.ScriptTarget.Latest, /*setParentPointers*/ true);
// Get the formatting edits on the input sources
var edits = ts.formatting.formatDocument(sourceFile, getRuleProvider(options), options);
// Apply the edits on the input code
return applyEdits(text, edits);
function getRuleProvider(options) {
// Share this between multiple formatters using the same options.
// This represents the bulk of the space the formatter uses.
var ruleProvider = new ts.formatting.RulesProvider();
ruleProvider.ensureUpToDate(options);
return ruleProvider;
}
function applyEdits(text, edits) {
// Apply edits in reverse on the existing text
var result = text;
for (var i = edits.length - 1; i >= 0; i--) {
var change = edits[i];
var head = result.slice(0, change.span.start);
var tail = result.slice(change.span.start + change.span.length);
result = head + change.newText + tail;
}
return result;
}
function getDefaultOptions() {
return {
indentSize: 4,
tabSize: 4,
newLineCharacter: '\r\n',
convertTabsToSpaces: true,
indentStyle: ts.IndentStyle.Block,
insertSpaceAfterCommaDelimiter: true,
insertSpaceAfterSemicolonInForStatements: true,
insertSpaceBeforeAndAfterBinaryOperators: true,
insertSpaceAfterKeywordsInControlFlowStatements: true,
insertSpaceAfterFunctionKeywordForAnonymousFunctions: false,
insertSpaceAfterOpeningAndBeforeClosingNonemptyParenthesis: false,
insertSpaceAfterOpeningAndBeforeClosingNonemptyBrackets: false,
insertSpaceAfterOpeningAndBeforeClosingTemplateStringBraces: true,
placeOpenBraceOnNewLineForFunctions: false,
placeOpenBraceOnNewLineForControlBlocks: false,
};
}
}
function createReplacer(data) {
data = data || '';
var rawDirectives = data.split(';');
var directives = [];
rawDirectives.forEach(function (rawDirective) {
if (rawDirective.length === 0) {
return;
}
var pieces = rawDirective.split('=>');
var findStr = pieces[0];
var replaceStr = pieces[1];
findStr = findStr.replace(/[\-\\\{\}\*\+\?\|\^\$\.\,\[\]\(\)\#\s]/g, '\\$&');
findStr = '\\b' + findStr + '\\b';
directives.push([new RegExp(findStr, 'g'), replaceStr]);
});
return function (str) {
for (var i = 0; i < directives.length; i++) {
str = str.replace(directives[i][0], directives[i][1]);
}
return str;
};
}
function generateDeclarationFile(out, recipe) {
var lines = recipe.split(/\r\n|\n|\r/);
var result = [];
lines.forEach(function (line) {
var m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m1) {
CURRENT_PROCESSING_RULE = line;
var moduleId = m1[1];
var sourceFile_1 = getSourceFile(out, moduleId);
if (!sourceFile_1) {
return;
}
var replacer_1 = createReplacer(m1[2]);
var typeNames = m1[3].split(/,/);
typeNames.forEach(function (typeName) {
typeName = typeName.trim();
if (typeName.length === 0) {
return;
}
var declaration = getTopLevelDeclaration(sourceFile_1, typeName);
if (!declaration) {
logErr('Cannot find type ' + typeName);
return;
}
result.push(replacer_1(getMassagedTopLevelDeclarationText(sourceFile_1, declaration)));
});
return;
}
var m2 = line.match(/^\s*#includeAll\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m2) {
CURRENT_PROCESSING_RULE = line;
var moduleId = m2[1];
var sourceFile_2 = getSourceFile(out, moduleId);
if (!sourceFile_2) {
return;
}
var replacer_2 = createReplacer(m2[2]);
var typeNames = m2[3].split(/,/);
var typesToExcludeMap_1 = {};
var typesToExcludeArr_1 = [];
typeNames.forEach(function (typeName) {
typeName = typeName.trim();
if (typeName.length === 0) {
return;
}
typesToExcludeMap_1[typeName] = true;
typesToExcludeArr_1.push(typeName);
});
getAllTopLevelDeclarations(sourceFile_2).forEach(function (declaration) {
if (isDeclaration(declaration)) {
if (typesToExcludeMap_1[declaration.name.text]) {
return;
}
}
else {
// node is ts.VariableStatement
var nodeText = getNodeText(sourceFile_2, declaration);
for (var i = 0; i < typesToExcludeArr_1.length; i++) {
if (nodeText.indexOf(typesToExcludeArr_1[i]) >= 0) {
return;
}
}
}
result.push(replacer_2(getMassagedTopLevelDeclarationText(sourceFile_2, declaration)));
});
return;
}
result.push(line);
});
var resultTxt = result.join('\n');
resultTxt = resultTxt.replace(/\bURI\b/g, 'Uri');
resultTxt = resultTxt.replace(/\bEvent</g, 'IEvent<');
resultTxt = resultTxt.replace(/\bTPromise</g, 'Promise<');
resultTxt = format(resultTxt);
resultTxt = resultTxt.replace(/\r\n/g, '\n');
return resultTxt;
}
function getFilesToWatch(out) {
var recipe = fs.readFileSync(RECIPE_PATH).toString();
var lines = recipe.split(/\r\n|\n|\r/);
var result = [];
lines.forEach(function (line) {
var m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m1) {
var moduleId = m1[1];
result.push(moduleIdToPath(out, moduleId));
return;
}
var m2 = line.match(/^\s*#includeAll\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m2) {
var moduleId = m2[1];
result.push(moduleIdToPath(out, moduleId));
return;
}
});
return result;
}
exports.getFilesToWatch = getFilesToWatch;
function run(out) {
log('Starting monaco.d.ts generation');
SOURCE_FILE_MAP = {};
var recipe = fs.readFileSync(RECIPE_PATH).toString();
var result = generateDeclarationFile(out, recipe);
var currentContent = fs.readFileSync(DECLARATION_PATH).toString();
log('Finished monaco.d.ts generation');
return {
content: result,
filePath: DECLARATION_PATH,
isTheSame: currentContent === result
};
}
exports.run = run;
function complainErrors() {
logErr('Not running monaco.d.ts generation due to compile errors');
}
exports.complainErrors = complainErrors;
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
var fs = require("fs");
var ts = require("typescript");
var path = require("path");
var tsfmt = require('../../tsfmt.json');
var util = require('gulp-util');
function log(message) {
var rest = [];
for (var _i = 1; _i < arguments.length; _i++) {
rest[_i - 1] = arguments[_i];
}
util.log.apply(util, [util.colors.cyan('[monaco.d.ts]'), message].concat(rest));
}
var SRC = path.join(__dirname, '../../src');
var OUT_ROOT = path.join(__dirname, '../../');
var RECIPE_PATH = path.join(__dirname, './monaco.d.ts.recipe');
var DECLARATION_PATH = path.join(__dirname, '../../src/vs/monaco.d.ts');
var CURRENT_PROCESSING_RULE = '';
function logErr(message) {
var rest = [];
for (var _i = 1; _i < arguments.length; _i++) {
rest[_i - 1] = arguments[_i];
}
util.log(util.colors.red('[monaco.d.ts]'), 'WHILE HANDLING RULE: ', CURRENT_PROCESSING_RULE);
util.log.apply(util, [util.colors.red('[monaco.d.ts]'), message].concat(rest));
}
function moduleIdToPath(out, moduleId) {
if (/\.d\.ts/.test(moduleId)) {
return path.join(SRC, moduleId);
}
return path.join(OUT_ROOT, out, moduleId) + '.d.ts';
}
var SOURCE_FILE_MAP = {};
function getSourceFile(out, inputFiles, moduleId) {
if (!SOURCE_FILE_MAP[moduleId]) {
var filePath = path.normalize(moduleIdToPath(out, moduleId));
if (!inputFiles.hasOwnProperty(filePath)) {
logErr('CANNOT FIND FILE ' + filePath + '. YOU MIGHT NEED TO RESTART gulp');
return null;
}
var fileContents = inputFiles[filePath];
var sourceFile = ts.createSourceFile(filePath, fileContents, ts.ScriptTarget.ES5);
SOURCE_FILE_MAP[moduleId] = sourceFile;
}
return SOURCE_FILE_MAP[moduleId];
}
function isDeclaration(a) {
return (a.kind === ts.SyntaxKind.InterfaceDeclaration
|| a.kind === ts.SyntaxKind.EnumDeclaration
|| a.kind === ts.SyntaxKind.ClassDeclaration
|| a.kind === ts.SyntaxKind.TypeAliasDeclaration
|| a.kind === ts.SyntaxKind.FunctionDeclaration
|| a.kind === ts.SyntaxKind.ModuleDeclaration);
}
function visitTopLevelDeclarations(sourceFile, visitor) {
var stop = false;
var visit = function (node) {
if (stop) {
return;
}
switch (node.kind) {
case ts.SyntaxKind.InterfaceDeclaration:
case ts.SyntaxKind.EnumDeclaration:
case ts.SyntaxKind.ClassDeclaration:
case ts.SyntaxKind.VariableStatement:
case ts.SyntaxKind.TypeAliasDeclaration:
case ts.SyntaxKind.FunctionDeclaration:
case ts.SyntaxKind.ModuleDeclaration:
stop = visitor(node);
}
// if (node.kind !== ts.SyntaxKind.SourceFile) {
// if (getNodeText(sourceFile, node).indexOf('SymbolKind') >= 0) {
// console.log('FOUND TEXT IN NODE: ' + ts.SyntaxKind[node.kind]);
// console.log(getNodeText(sourceFile, node));
// }
// }
if (stop) {
return;
}
ts.forEachChild(node, visit);
};
visit(sourceFile);
}
function getAllTopLevelDeclarations(sourceFile) {
var all = [];
visitTopLevelDeclarations(sourceFile, function (node) {
if (node.kind === ts.SyntaxKind.InterfaceDeclaration || node.kind === ts.SyntaxKind.ClassDeclaration || node.kind === ts.SyntaxKind.ModuleDeclaration) {
var interfaceDeclaration = node;
var triviaStart = interfaceDeclaration.pos;
var triviaEnd = interfaceDeclaration.name.pos;
var triviaText = getNodeText(sourceFile, { pos: triviaStart, end: triviaEnd });
// // let nodeText = getNodeText(sourceFile, node);
// if (getNodeText(sourceFile, node).indexOf('SymbolKind') >= 0) {
// console.log('TRIVIA: ', triviaText);
// }
if (triviaText.indexOf('@internal') === -1) {
all.push(node);
}
}
else {
var nodeText = getNodeText(sourceFile, node);
if (nodeText.indexOf('@internal') === -1) {
all.push(node);
}
}
return false /*continue*/;
});
return all;
}
function getTopLevelDeclaration(sourceFile, typeName) {
var result = null;
visitTopLevelDeclarations(sourceFile, function (node) {
if (isDeclaration(node)) {
if (node.name.text === typeName) {
result = node;
return true /*stop*/;
}
return false /*continue*/;
}
// node is ts.VariableStatement
if (getNodeText(sourceFile, node).indexOf(typeName) >= 0) {
result = node;
return true /*stop*/;
}
return false /*continue*/;
});
return result;
}
function getNodeText(sourceFile, node) {
return sourceFile.getFullText().substring(node.pos, node.end);
}
function getMassagedTopLevelDeclarationText(sourceFile, declaration) {
var result = getNodeText(sourceFile, declaration);
// if (result.indexOf('MonacoWorker') >= 0) {
// console.log('here!');
// // console.log(ts.SyntaxKind[declaration.kind]);
// }
if (declaration.kind === ts.SyntaxKind.InterfaceDeclaration || declaration.kind === ts.SyntaxKind.ClassDeclaration) {
var interfaceDeclaration = declaration;
var members = interfaceDeclaration.members;
members.forEach(function (member) {
try {
var memberText = getNodeText(sourceFile, member);
if (memberText.indexOf('@internal') >= 0 || memberText.indexOf('private') >= 0) {
// console.log('BEFORE: ', result);
result = result.replace(memberText, '');
// console.log('AFTER: ', result);
}
}
catch (err) {
// life..
}
});
}
result = result.replace(/export default/g, 'export');
result = result.replace(/export declare/g, 'export');
return result;
}
function format(text) {
// Parse the source text
var sourceFile = ts.createSourceFile('file.ts', text, ts.ScriptTarget.Latest, /*setParentPointers*/ true);
// Get the formatting edits on the input sources
var edits = ts.formatting.formatDocument(sourceFile, getRuleProvider(tsfmt), tsfmt);
// Apply the edits on the input code
return applyEdits(text, edits);
function getRuleProvider(options) {
// Share this between multiple formatters using the same options.
// This represents the bulk of the space the formatter uses.
var ruleProvider = new ts.formatting.RulesProvider();
ruleProvider.ensureUpToDate(options);
return ruleProvider;
}
function applyEdits(text, edits) {
// Apply edits in reverse on the existing text
var result = text;
for (var i = edits.length - 1; i >= 0; i--) {
var change = edits[i];
var head = result.slice(0, change.span.start);
var tail = result.slice(change.span.start + change.span.length);
result = head + change.newText + tail;
}
return result;
}
}
function createReplacer(data) {
data = data || '';
var rawDirectives = data.split(';');
var directives = [];
rawDirectives.forEach(function (rawDirective) {
if (rawDirective.length === 0) {
return;
}
var pieces = rawDirective.split('=>');
var findStr = pieces[0];
var replaceStr = pieces[1];
findStr = findStr.replace(/[\-\\\{\}\*\+\?\|\^\$\.\,\[\]\(\)\#\s]/g, '\\$&');
findStr = '\\b' + findStr + '\\b';
directives.push([new RegExp(findStr, 'g'), replaceStr]);
});
return function (str) {
for (var i = 0; i < directives.length; i++) {
str = str.replace(directives[i][0], directives[i][1]);
}
return str;
};
}
function generateDeclarationFile(out, inputFiles, recipe) {
var lines = recipe.split(/\r\n|\n|\r/);
var result = [];
lines.forEach(function (line) {
var m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m1) {
CURRENT_PROCESSING_RULE = line;
var moduleId = m1[1];
var sourceFile_1 = getSourceFile(out, inputFiles, moduleId);
if (!sourceFile_1) {
return;
}
var replacer_1 = createReplacer(m1[2]);
var typeNames = m1[3].split(/,/);
typeNames.forEach(function (typeName) {
typeName = typeName.trim();
if (typeName.length === 0) {
return;
}
var declaration = getTopLevelDeclaration(sourceFile_1, typeName);
if (!declaration) {
logErr('Cannot find type ' + typeName);
return;
}
result.push(replacer_1(getMassagedTopLevelDeclarationText(sourceFile_1, declaration)));
});
return;
}
var m2 = line.match(/^\s*#includeAll\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m2) {
CURRENT_PROCESSING_RULE = line;
var moduleId = m2[1];
var sourceFile_2 = getSourceFile(out, inputFiles, moduleId);
if (!sourceFile_2) {
return;
}
var replacer_2 = createReplacer(m2[2]);
var typeNames = m2[3].split(/,/);
var typesToExcludeMap_1 = {};
var typesToExcludeArr_1 = [];
typeNames.forEach(function (typeName) {
typeName = typeName.trim();
if (typeName.length === 0) {
return;
}
typesToExcludeMap_1[typeName] = true;
typesToExcludeArr_1.push(typeName);
});
getAllTopLevelDeclarations(sourceFile_2).forEach(function (declaration) {
if (isDeclaration(declaration)) {
if (typesToExcludeMap_1[declaration.name.text]) {
return;
}
}
else {
// node is ts.VariableStatement
var nodeText = getNodeText(sourceFile_2, declaration);
for (var i = 0; i < typesToExcludeArr_1.length; i++) {
if (nodeText.indexOf(typesToExcludeArr_1[i]) >= 0) {
return;
}
}
}
result.push(replacer_2(getMassagedTopLevelDeclarationText(sourceFile_2, declaration)));
});
return;
}
result.push(line);
});
var resultTxt = result.join('\n');
resultTxt = resultTxt.replace(/\bURI\b/g, 'Uri');
resultTxt = resultTxt.replace(/\bEvent</g, 'IEvent<');
resultTxt = resultTxt.replace(/\bTPromise</g, 'Promise<');
resultTxt = format(resultTxt);
resultTxt = resultTxt.replace(/\r\n/g, '\n');
return resultTxt;
}
function getFilesToWatch(out) {
var recipe = fs.readFileSync(RECIPE_PATH).toString();
var lines = recipe.split(/\r\n|\n|\r/);
var result = [];
lines.forEach(function (line) {
var m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m1) {
var moduleId = m1[1];
result.push(moduleIdToPath(out, moduleId));
return;
}
var m2 = line.match(/^\s*#includeAll\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m2) {
var moduleId = m2[1];
result.push(moduleIdToPath(out, moduleId));
return;
}
});
return result;
}
exports.getFilesToWatch = getFilesToWatch;
function run(out, inputFiles) {
log('Starting monaco.d.ts generation');
SOURCE_FILE_MAP = {};
var recipe = fs.readFileSync(RECIPE_PATH).toString();
var result = generateDeclarationFile(out, inputFiles, recipe);
var currentContent = fs.readFileSync(DECLARATION_PATH).toString();
log('Finished monaco.d.ts generation');
return {
content: result,
filePath: DECLARATION_PATH,
isTheSame: currentContent === result
};
}
exports.run = run;
function complainErrors() {
logErr('Not running monaco.d.ts generation due to compile errors');
}
exports.complainErrors = complainErrors;

View file

@ -6,6 +6,7 @@
import fs = require('fs');
import ts = require('typescript');
import path = require('path');
const tsfmt = require('../../tsfmt.json');
var util = require('gulp-util');
function log(message: any, ...rest: any[]): void {
@ -31,18 +32,16 @@ function moduleIdToPath(out:string, moduleId:string): string {
}
let SOURCE_FILE_MAP: {[moduleId:string]:ts.SourceFile;} = {};
function getSourceFile(out:string, moduleId:string): ts.SourceFile {
function getSourceFile(out:string, inputFiles: { [file: string]: string; }, moduleId:string): ts.SourceFile {
if (!SOURCE_FILE_MAP[moduleId]) {
let filePath = moduleIdToPath(out, moduleId);
let filePath = path.normalize(moduleIdToPath(out, moduleId));
let fileContents: string;
try {
fileContents = fs.readFileSync(filePath).toString();
} catch (err) {
logErr('CANNOT FIND FILE ' + filePath);
if (!inputFiles.hasOwnProperty(filePath)) {
logErr('CANNOT FIND FILE ' + filePath + '. YOU MIGHT NEED TO RESTART gulp');
return null;
}
let fileContents = inputFiles[filePath];
let sourceFile = ts.createSourceFile(filePath, fileContents, ts.ScriptTarget.ES5);
SOURCE_FILE_MAP[moduleId] = sourceFile;
@ -184,13 +183,12 @@ function getMassagedTopLevelDeclarationText(sourceFile:ts.SourceFile, declaratio
}
function format(text:string): string {
let options = getDefaultOptions();
// Parse the source text
let sourceFile = ts.createSourceFile('file.ts', text, ts.ScriptTarget.Latest, /*setParentPointers*/ true);
// Get the formatting edits on the input sources
let edits = (<any>ts).formatting.formatDocument(sourceFile, getRuleProvider(options), options);
let edits = (<any>ts).formatting.formatDocument(sourceFile, getRuleProvider(tsfmt), tsfmt);
// Apply the edits on the input code
return applyEdits(text, edits);
@ -214,27 +212,6 @@ function format(text:string): string {
}
return result;
}
function getDefaultOptions(): ts.FormatCodeSettings {
return {
indentSize: 4,
tabSize: 4,
newLineCharacter: '\r\n',
convertTabsToSpaces: true,
indentStyle: ts.IndentStyle.Block,
insertSpaceAfterCommaDelimiter: true,
insertSpaceAfterSemicolonInForStatements: true,
insertSpaceBeforeAndAfterBinaryOperators: true,
insertSpaceAfterKeywordsInControlFlowStatements: true,
insertSpaceAfterFunctionKeywordForAnonymousFunctions: false,
insertSpaceAfterOpeningAndBeforeClosingNonemptyParenthesis: false,
insertSpaceAfterOpeningAndBeforeClosingNonemptyBrackets: false,
insertSpaceAfterOpeningAndBeforeClosingTemplateStringBraces: true,
placeOpenBraceOnNewLineForFunctions: false,
placeOpenBraceOnNewLineForControlBlocks: false,
};
}
}
function createReplacer(data:string): (str:string)=>string {
@ -262,7 +239,7 @@ function createReplacer(data:string): (str:string)=>string {
};
}
function generateDeclarationFile(out:string, recipe:string): string {
function generateDeclarationFile(out: string, inputFiles: { [file: string]: string; }, recipe:string): string {
let lines = recipe.split(/\r\n|\n|\r/);
let result = [];
@ -273,7 +250,7 @@ function generateDeclarationFile(out:string, recipe:string): string {
if (m1) {
CURRENT_PROCESSING_RULE = line;
let moduleId = m1[1];
let sourceFile = getSourceFile(out, moduleId);
let sourceFile = getSourceFile(out, inputFiles, moduleId);
if (!sourceFile) {
return;
}
@ -300,7 +277,7 @@ function generateDeclarationFile(out:string, recipe:string): string {
if (m2) {
CURRENT_PROCESSING_RULE = line;
let moduleId = m2[1];
let sourceFile = getSourceFile(out, moduleId);
let sourceFile = getSourceFile(out, inputFiles, moduleId);
if (!sourceFile) {
return;
}
@ -383,12 +360,12 @@ export interface IMonacoDeclarationResult {
isTheSame: boolean;
}
export function run(out:string): IMonacoDeclarationResult {
export function run(out: string, inputFiles: { [file: string]: string; }): IMonacoDeclarationResult {
log('Starting monaco.d.ts generation');
SOURCE_FILE_MAP = {};
let recipe = fs.readFileSync(RECIPE_PATH).toString();
let result = generateDeclarationFile(out, recipe);
let result = generateDeclarationFile(out, inputFiles, recipe);
let currentContent = fs.readFileSync(DECLARATION_PATH).toString();
log('Finished monaco.d.ts generation');
@ -402,4 +379,4 @@ export function run(out:string): IMonacoDeclarationResult {
export function complainErrors() {
logErr('Not running monaco.d.ts generation due to compile errors');
}
}

View file

@ -1,83 +1,82 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
declare module monaco {
interface Thenable<T> {
/**
* Attaches callbacks for the resolution and/or rejection of the Promise.
* @param onfulfilled The callback to execute when the Promise is resolved.
* @param onrejected The callback to execute when the Promise is rejected.
* @returns A Promise for the completion of which ever callback is executed.
*/
then<TResult>(onfulfilled?: (value: T) => TResult | Thenable<TResult>, onrejected?: (reason: any) => TResult | Thenable<TResult>): Thenable<TResult>;
then<TResult>(onfulfilled?: (value: T) => TResult | Thenable<TResult>, onrejected?: (reason: any) => void): Thenable<TResult>;
}
type Thenable<T> = PromiseLike<T>;
export interface IDisposable {
dispose(): void;
}
export interface IDisposable {
dispose(): void;
}
export interface IEvent<T> {
(listener: (e: T) => any, thisArg?: any): IDisposable;
}
export interface IEvent<T> {
(listener: (e: T) => any, thisArg?: any): IDisposable;
}
/**
* A helper that allows to emit and listen to typed events
*/
export class Emitter<T> {
constructor();
readonly event: Event<T>;
fire(event?: T): void;
dispose(): void;
}
/**
* A helper that allows to emit and listen to typed events
*/
export class Emitter<T> {
constructor();
readonly event: Event<T>;
fire(event?: T): void;
dispose(): void;
}
export enum Severity {
Ignore = 0,
Info = 1,
Warning = 2,
Error = 3,
}
export enum Severity {
Ignore = 0,
Info = 1,
Warning = 2,
Error = 3,
}
#include(vs/base/common/winjs.base.d.ts): TValueCallback, ProgressCallback, TPromise
#include(vs/base/common/winjs.base.d.ts): TValueCallback, ProgressCallback, Promise
#include(vs/base/common/cancellation): CancellationTokenSource, CancellationToken
#include(vs/base/common/uri): URI
#include(vs/editor/common/standalone/standaloneBase): KeyCode, KeyMod
#include(vs/base/common/keyCodes): Keybinding
#include(vs/base/common/htmlContent): MarkedString
#include(vs/base/common/htmlContent): IMarkdownString
#include(vs/base/browser/keyboardEvent): IKeyboardEvent
#include(vs/base/browser/mouseEvent): IMouseEvent
#include(vs/editor/common/editorCommon): IScrollEvent, IPosition, IRange, ISelection
#include(vs/editor/common/core/position): Position
#include(vs/editor/common/core/range): Range
#include(vs/editor/common/core/selection): Selection, SelectionDirection
#include(vs/editor/common/editorCommon): IScrollEvent
#include(vs/editor/common/core/position): IPosition, Position
#include(vs/editor/common/core/range): IRange, Range
#include(vs/editor/common/core/selection): ISelection, Selection, SelectionDirection
#include(vs/editor/common/core/token): Token
}
declare module monaco.editor {
#includeAll(vs/editor/browser/standalone/standaloneEditor;modes.=>languages.;editorCommon.=>):
#include(vs/editor/common/services/standaloneColorService): BuiltinTheme, ITheme
#include(vs/editor/common/modes/supports/tokenization): IThemeRule
#includeAll(vs/editor/standalone/browser/standaloneEditor;modes.=>languages.;editorCommon.=>):
#include(vs/editor/standalone/common/standaloneThemeService): BuiltinTheme, IStandaloneThemeData, IColors
#include(vs/editor/common/modes/supports/tokenization): ITokenThemeRule
#include(vs/editor/common/services/webWorker): MonacoWebWorker, IWebWorkerOptions
#include(vs/editor/browser/standalone/standaloneCodeEditor): IEditorConstructionOptions, IDiffEditorConstructionOptions, IStandaloneCodeEditor, IStandaloneDiffEditor
#include(vs/editor/standalone/browser/standaloneCodeEditor): IEditorConstructionOptions, IDiffEditorConstructionOptions, IStandaloneCodeEditor, IStandaloneDiffEditor
export interface ICommandHandler {
(...args:any[]): void;
(...args:any[]): void;
}
#include(vs/platform/contextkey/common/contextkey): IContextKey
#include(vs/editor/browser/standalone/standaloneServices): IEditorOverrideServices
#include(vs/platform/markers/common/markers): IMarkerData
#include(vs/editor/browser/standalone/colorizer): IColorizerOptions, IColorizerElementOptions
#include(vs/editor/standalone/browser/standaloneServices): IEditorOverrideServices
#include(vs/platform/markers/common/markers): IMarker, IMarkerData
#include(vs/editor/standalone/browser/colorizer): IColorizerOptions, IColorizerElementOptions
#include(vs/base/common/scrollable): ScrollbarVisibility
#includeAll(vs/editor/common/editorCommon;IMode=>languages.IMode;LanguageIdentifier=>languages.LanguageIdentifier): IPosition, IRange, ISelection, SelectionDirection, IScrollEvent
#includeAll(vs/editor/browser/editorBrowser;editorCommon.=>):
#include(vs/platform/theme/common/themeService): ThemeColor
#includeAll(vs/editor/common/editorCommon;IMode=>languages.IMode;LanguageIdentifier=>languages.LanguageIdentifier;editorOptions.=>): ISelection, IScrollEvent
#includeAll(vs/editor/common/model/textModelEvents):
#includeAll(vs/editor/common/controller/cursorEvents):
#includeAll(vs/editor/common/config/editorOptions):
#includeAll(vs/editor/browser/editorBrowser;editorCommon.=>;editorOptions.=>):
#include(vs/editor/common/config/fontInfo): FontInfo, BareFontInfo
}
declare module monaco.languages {
#includeAll(vs/editor/browser/standalone/standaloneLanguages;modes.=>;editorCommon.=>editor.;IMarkerData=>editor.IMarkerData):
#includeAll(vs/editor/standalone/browser/standaloneLanguages;modes.=>;editorCommon.=>editor.;IMarkerData=>editor.IMarkerData):
#includeAll(vs/editor/common/modes/languageConfiguration):
#includeAll(vs/editor/common/modes;editorCommon.IRange=>IRange;editorCommon.IPosition=>IPosition;editorCommon.=>editor.):
#include(vs/editor/common/services/modeService): ILanguageExtensionPoint
#includeAll(vs/editor/common/modes/monarch/monarchTypes):
#includeAll(vs/editor/standalone/common/monarch/monarchTypes):
}
@ -85,4 +84,4 @@ declare module monaco.worker {
#includeAll(vs/editor/common/services/editorSimpleWorker;):
}
}

View file

@ -1,7 +1,7 @@
{
"name": "monaco-editor-core",
"private": true,
"version": "0.8.1",
"version": "0.9.0",
"description": "A browser based code editor",
"author": "Microsoft Corporation",
"license": "MIT",
@ -13,6 +13,10 @@
"url": "https://github.com/Microsoft/vscode/issues"
},
"devDependencies": {
"@types/minimist": "^1.2.0",
"@types/mocha": "^2.2.39",
"@types/semver": "^5.3.30",
"@types/sinon": "^1.16.34",
"debounce": "^1.0.0",
"eslint": "^3.4.0",
"event-stream": "^3.1.7",
@ -44,7 +48,7 @@
"sinon": "^1.17.2",
"source-map": "^0.4.4",
"tslint": "^4.3.1",
"typescript": "2.1.5",
"typescript": "2.5.2",
"typescript-formatter": "4.0.1",
"underscore": "^1.8.2",
"vinyl": "^0.4.5",

View file

@ -4,13 +4,15 @@
*--------------------------------------------------------------------------------------------*/
const cp = require('child_process');
const path = require('path');
const npm = process.platform === 'win32' ? 'npm.cmd' : 'npm';
function npmInstall(location) {
const result = cp.spawnSync(npm, ['install'], {
cwd: location ,
stdio: 'inherit'
});
function npmInstall(location, opts) {
opts = opts || {};
opts.cwd = location;
opts.stdio = 'inherit';
const result = cp.spawnSync(npm, ['install'], opts);
if (result.error || result.status !== 0) {
process.exit(1);
@ -31,7 +33,29 @@ const extensions = [
'javascript',
'css',
'html',
'git'
'git',
'gulp',
'grunt',
'jake',
'merge-conflict',
'emmet',
'npm',
'jake'
];
extensions.forEach(extension => npmInstall(`extensions/${extension}`));
extensions.forEach(extension => npmInstall(`extensions/${extension}`));
function npmInstallBuildDependencies() {
// make sure we install gulp watch for the system installed
// node, since that is the driver of gulp
const env = Object.assign({}, process.env);
delete env['npm_config_disturl'];
delete env['npm_config_target'];
delete env['npm_config_runtime'];
npmInstall(path.join(path.dirname(__dirname), 'lib', 'watch'), { env });
}
npmInstall(`build`); // node modules required for build
npmInstallBuildDependencies(); // node modules for watching, specific to host node version, not electron

View file

@ -3,32 +3,13 @@
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
const path = require('path');
const cp = require('child_process');
if (process.env['npm_config_disturl'] !== 'https://atom.io/download/atom-shell') {
if (process.env['npm_config_disturl'] !== 'https://atom.io/download/electron') {
console.error("You can't use plain npm to install Code's dependencies.");
console.error(
/^win/.test(process.platform)
? "Please run '.\\scripts\\npm.bat install' instead."
: "Please run './scripts/npm.sh install' instead."
? "Please run '.\\scripts\\npm.bat install' instead."
: "Please run './scripts/npm.sh install' instead."
);
process.exit(1);
}
// make sure we install gulp watch for the system installed
// node, since that is the driver of gulp
if (process.platform !== 'win32') {
const env = Object.assign({}, process.env);
delete env['npm_config_disturl'];
delete env['npm_config_target'];
delete env['npm_config_runtime'];
cp.spawnSync('npm', ['install'], {
cwd: path.join(path.dirname(__dirname), 'lib', 'watch'),
stdio: 'inherit',
env
});
}

View file

@ -0,0 +1,73 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
const cp = require('child_process');
const npm = process.platform === 'win32' ? 'npm.cmd' : 'npm';
function updateGrammar(location) {
const result = cp.spawnSync(npm, ['run', 'update-grammar'], {
cwd: location,
stdio: 'inherit'
});
if (result.error || result.status !== 0) {
process.exit(1);
}
}
const extensions = [
// 'bat' Grammar no longer available
'clojure',
'coffeescript',
'cpp',
'csharp',
'css',
'diff',
'docker',
'fsharp',
'gitsyntax',
'go',
'groovy',
'handlebars',
'hlsl',
'html',
'ini',
'java',
// 'javascript', updated through JavaScript
'json',
'less',
'lua',
'make',
'markdown',
'objective-c',
'perl',
'php',
// 'powershell', grammar not ready yet, @daviwil will ping when ready
'pug',
'python',
'r',
'razor',
'ruby',
'rust',
'scss',
'shaderlab',
'shellscript',
// 'sql', customized, PRs pending
'swift',
'typescript',
'vb',
'xml',
'yaml'
];
extensions.forEach(extension => updateGrammar(`extensions/${extension}`));
// run integration tests
if (process.platform === 'win32') {
cp.spawn('.\scripts\test-integration.bat', [], { env: process.env, stdio: 'inherit' });
} else {
cp.spawn('/bin/bash', ['./scripts/test-integration.sh'], { env: process.env, stdio: 'inherit' });
}

View file

@ -25,19 +25,28 @@ function getOptions(urlString) {
}
}
function download(url) {
return new Promise((c, e) => {
var content = '';
var request = https.get(getOptions(url), function (response) {
function download(url, redirectCount) {
return new Promise((c, e) => {
var content = '';
https.get(getOptions(url), function (response) {
response.on('data', function (data) {
content += data.toString();
}).on('end', function () {
let count = redirectCount || 0;
if (count < 5 && response.statusCode >= 300 && response.statusCode <= 303 || response.statusCode === 307) {
let location = response.headers['location'];
if (location) {
console.log("Redirected " + url + " to " + location);
download(location, count+1).then(c, e);
return;
}
}
c(content);
});
}).on('error', function (err) {
e(err.message);
});
});
});
}
function getCommitSha(repoId, repoPath) {
@ -46,14 +55,15 @@ function getCommitSha(repoId, repoPath) {
try {
let lastCommit = JSON.parse(content)[0];
return Promise.resolve({
commitSha : lastCommit.sha,
commitDate : lastCommit.commit.author.date
commitSha: lastCommit.sha,
commitDate: lastCommit.commit.author.date
});
} catch (e) {
console.error("Failed extracting the SHA: " + content);
return Promise.resolve(null);
}
}, function () {
console.err('Failed loading ' + commitInfo);
console.error('Failed loading ' + commitInfo);
return Promise.resolve(null);
});
}
@ -78,11 +88,23 @@ exports.update = function (repoId, repoPath, dest, modifyGrammar) {
modifyGrammar(grammar);
}
return getCommitSha(repoId, repoPath).then(function (info) {
let result = {
information_for_contributors: [
'This file has been converted from https://github.com/' + repoId + '/blob/master/' + repoPath,
'If you want to provide a fix or improvement, please create a pull request against the original repository.',
'Once accepted there, we are happy to receive an update request.'
]
};
if (info) {
grammar.version = 'https://github.com/' + repoId + '/commit/' + info.commitSha;
result.version = 'https://github.com/' + repoId + '/commit/' + info.commitSha;
}
for (let key in grammar) {
result[key] = grammar[key];
}
try {
fs.writeFileSync(dest, JSON.stringify(grammar, null, '\t'));
fs.writeFileSync(dest, JSON.stringify(result, null, '\t'));
if (info) {
console.log('Updated ' + path.basename(dest) + ' to ' + repoId + '@' + info.commitSha.substr(0, 7) + ' (' + info.commitDate.substr(0, 10) + ')');
} else {
@ -97,7 +119,7 @@ exports.update = function (repoId, repoPath, dest, modifyGrammar) {
}
if (path.basename(process.argv[1]) === 'update-grammar.js') {
for (var i = 3; i < process.argv.length; i+=2) {
exports.update(process.argv[2], process.argv[i], process.argv[i+1]);
for (var i = 3; i < process.argv.length; i += 2) {
exports.update(process.argv[2], process.argv[i], process.argv[i + 1]);
}
}

82
build/npm/update-theme.js Normal file
View file

@ -0,0 +1,82 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
var path = require('path');
var fs = require('fs');
var plist = require('fast-plist');
var mappings = {
"background": ["editor.background"],
"foreground": ["editor.foreground"],
"hoverHighlight": ["editor.hoverHighlightBackground"],
"linkForeground": ["editorLink.foreground"],
"selection": ["editor.selectionBackground"],
"inactiveSelection": ["editor.inactiveSelectionBackground"],
"selectionHighlightColor": ["editor.selectionHighlightBackground"],
"wordHighlight": ["editor.wordHighlightBackground"],
"wordHighlightStrong": ["editor.wordHighlightStrongBackground"],
"findMatchHighlight": ["editor.findMatchHighlightBackground", "peekViewResult.matchHighlightBackground"],
"currentFindMatchHighlight": ["editor.findMatchBackground"],
"findRangeHighlight": ["editor.findRangeHighlightBackground"],
"referenceHighlight": ["peekViewEditor.matchHighlightBackground"],
"lineHighlight": ["editor.lineHighlightBackground"],
"rangeHighlight": ["editor.rangeHighlightBackground"],
"caret": ["editorCursor.foreground"],
"invisibles": ["editorWhitespace.foreground"],
"guide": ["editorIndentGuide.background"],
"ansiBlack": ["terminal.ansiBlack"], "ansiRed": ["terminal.ansiRed"], "ansiGreen": ["terminal.ansiGreen"], "ansiYellow": ["terminal.ansiYellow"],
"ansiBlue": ["terminal.ansiBlue"], "ansiMagenta": ["terminal.ansiMagenta"], "ansiCyan": ["terminal.ansiCyan"], "ansiWhite": ["terminal.ansiWhite"],
"ansiBrightBlack": ["terminal.ansiBrightBlack"], "ansiBrightRed": ["terminal.ansiBrightRed"], "ansiBrightGreen": ["terminal.ansiBrightGreen"],
"ansiBrightYellow": ["terminal.ansiBrightYellow"], "ansiBrightBlue": ["terminal.ansiBrightBlue"], "ansiBrightMagenta": ["terminal.ansiBrightMagenta"],
"ansiBrightCyan": ["terminal.ansiBrightCyan"], "ansiBrightWhite": ["terminal.ansiBrightWhite"]
};
exports.update = function (srcName, destName) {
try {
console.log('reading ', srcName);
let result = {};
let plistContent = fs.readFileSync(srcName).toString();
let theme = plist.parse(plistContent);
let settings = theme.settings;
if (Array.isArray(settings)) {
let colorMap = {};
for (let entry of settings) {
let scope = entry.scope;
if (scope) {
let parts = scope.split(',').map(p => p.trim());
if (parts.length > 1) {
entry.scope = parts;
}
} else {
var entrySettings = entry.settings;
for (let entry in entrySettings) {
let mapping = mappings[entry];
if (mapping) {
for (let newKey of mapping) {
colorMap[newKey] = entrySettings[entry];
}
if (entry !== 'foreground' && entry !== 'background') {
delete entrySettings[entry];
}
}
}
}
}
result.name = theme.name;
result.tokenColors = settings;
result.colors = colorMap;
}
fs.writeFileSync(destName, JSON.stringify(result, null, '\t'));
} catch (e) {
console.log(e);
}
};
if (path.basename(process.argv[1]) === 'update-theme.js') {
exports.update(process.argv[2], process.argv[3]);
}

24
build/package.json Normal file
View file

@ -0,0 +1,24 @@
{
"name": "code-oss-dev-build",
"version": "1.0.0",
"devDependencies": {
"@types/azure": "^0.9.18",
"@types/documentdb": "^1.10.1",
"@types/es6-collections": "^0.5.30",
"@types/es6-promise": "0.0.32",
"@types/mime": "0.0.29",
"@types/node": "^7.0.13",
"@types/xml2js": "^0.0.33",
"azure-storage": "^2.1.0",
"documentdb": "^1.11.0",
"mime": "^1.3.4",
"minimist": "^1.2.0",
"typescript": "2.5.2",
"xml2js": "^0.4.17"
},
"scripts": {
"compile": "tsc",
"watch": "tsc --watch",
"postinstall": "npm run compile"
}
}

2
build/tfs/common/.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
node_modules/
*.js

40
build/tfs/common/common.sh Executable file
View file

@ -0,0 +1,40 @@
#!/bin/bash
set -e
# set agent specific npm cache
if [ -n "$AGENT_WORKFOLDER" ]
then
export npm_config_cache="$AGENT_WORKFOLDER/npm-cache"
echo "Using npm cache: $npm_config_cache"
fi
SUMMARY="Task;Duration"$'\n'
step() {
START=$SECONDS
TASK=$1; shift
echo ""
echo "*****************************************************************************"
echo "Start: $TASK"
echo "*****************************************************************************"
"$@"
# Calculate total duration
TOTAL=$(echo "$SECONDS - $START" | bc)
M=$(echo "$TOTAL / 60" | bc)
S=$(echo "$TOTAL % 60" | bc)
DURATION="$(printf "%02d" $M):$(printf "%02d" $S)"
echo "*****************************************************************************"
echo "End: $TASK, Total: $DURATION"
echo "*****************************************************************************"
SUMMARY="$SUMMARY$TASK;$DURATION"$'\n'
}
done_steps() {
echo ""
echo "Build Summary"
echo "============="
echo "${SUMMARY}" | column -t -s';'
}
trap done_steps EXIT

View file

@ -0,0 +1,85 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import { execSync } from 'child_process';
import { DocumentClient } from 'documentdb';
import * as azure from 'azure-storage';
interface Asset {
platform: string;
type: string;
url: string;
mooncakeUrl: string;
hash: string;
}
function queueSigningRequest(quality: string, commit: string): Promise<void> {
const retryOperations = new azure.ExponentialRetryPolicyFilter();
const queueSvc = azure
.createQueueService(process.env['AZURE_STORAGE_ACCOUNT_2'], process.env['AZURE_STORAGE_ACCESS_KEY_2'])
.withFilter(retryOperations);
queueSvc.messageEncoder = new azure.QueueMessageEncoder.TextBase64QueueMessageEncoder();
const message = `${quality}/${commit}`;
return new Promise<void>((c, e) => queueSvc.createMessage('sign-darwin', message, err => err ? e(err) : c()));
}
function isBuildSigned(quality: string, commit: string): Promise<boolean> {
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const collection = 'dbs/builds/colls/' + quality;
const updateQuery = {
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
parameters: [{ name: '@id', value: commit }]
};
return new Promise<boolean>((c, e) => {
client.queryDocuments(collection, updateQuery).toArray((err, results) => {
if (err) { return e(err); }
if (results.length !== 1) { return c(false); }
const [release] = results;
const assets: Asset[] = release.assets;
const isSigned = assets.some(a => a.platform === 'darwin' && a.type === 'archive');
c(isSigned);
});
});
}
async function waitForSignedBuild(quality: string, commit: string): Promise<void> {
let retries = 0;
while (retries < 180) {
if (await isBuildSigned(quality, commit)) {
return;
}
await new Promise<void>(c => setTimeout(c, 10000));
retries++;
}
throw new Error('Timed out waiting for signed build');
}
async function main(quality: string): Promise<void> {
const commit = execSync('git rev-parse HEAD', { encoding: 'utf8' }).trim();
console.log(`Queueing signing request for '${quality}/${commit}'...`);
await queueSigningRequest(quality, commit);
console.log('Waiting on signed build...');
await waitForSignedBuild(quality, commit);
console.log('Found signed build!');
}
main(process.argv[2]).catch(err => {
console.error(err);
process.exit(1);
});

View file

@ -0,0 +1,26 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
const cp = require('child_process');
const npm = process.platform === 'win32' ? 'npm.cmd' : 'npm';
function npmInstall(package: string, args: string[]): void {
const result = cp.spawnSync(npm, ['install', package, ...args], {
stdio: 'inherit'
});
if (result.error || result.status !== 0) {
process.exit(1);
}
}
const product = require('../../../product.json');
const dependencies = product.dependencies || {} as { [name: string]: string; };
const [, , ...args] = process.argv;
Object.keys(dependencies).forEach(name => {
const url = dependencies[name];
npmInstall(url, args);
});

15
build/tfs/common/node.sh Executable file
View file

@ -0,0 +1,15 @@
#!/bin/bash
set -e
# setup nvm
if [[ "$OSTYPE" == "darwin"* ]]; then
export NVM_DIR=~/.nvm
source $(brew --prefix nvm)/nvm.sh
else
source $NVM_DIR/nvm.sh
fi
# install node
NODE_VERSION=7.10.0
nvm install $NODE_VERSION
nvm use $NODE_VERSION

266
build/tfs/common/publish.ts Normal file
View file

@ -0,0 +1,266 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as fs from 'fs';
import { execSync } from 'child_process';
import { Readable } from 'stream';
import * as crypto from 'crypto';
import * as azure from 'azure-storage';
import * as mime from 'mime';
import * as minimist from 'minimist';
import { DocumentClient, NewDocument } from 'documentdb';
if (process.argv.length < 6) {
console.error('Usage: node publish.js <product> <platform> <type> <name> <version> <commit> <is_update> <file>');
process.exit(-1);
}
function hashStream(hashName: string, stream: Readable): Promise<string> {
return new Promise<string>((c, e) => {
const shasum = crypto.createHash(hashName);
stream
.on('data', shasum.update.bind(shasum))
.on('error', e)
.on('close', () => c(shasum.digest('hex')));
});
}
interface Config {
id: string;
frozen: boolean;
}
function createDefaultConfig(quality: string): Config {
return {
id: quality,
frozen: false
};
}
function getConfig(quality: string): Promise<Config> {
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const collection = 'dbs/builds/colls/config';
const query = {
query: `SELECT TOP 1 * FROM c WHERE c.id = @quality`,
parameters: [
{ name: '@quality', value: quality }
]
};
return new Promise<Config>((c, e) => {
client.queryDocuments(collection, query).toArray((err, results) => {
if (err && err.code !== 409) { return e(err); }
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0] as any as Config);
});
});
}
interface Asset {
platform: string;
type: string;
url: string;
mooncakeUrl: string;
hash: string;
sha256hash: string;
}
function createOrUpdate(commit: string, quality: string, platform: string, type: string, release: NewDocument, asset: Asset, isUpdate: boolean): Promise<void> {
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const collection = 'dbs/builds/colls/' + quality;
const updateQuery = {
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
parameters: [{ name: '@id', value: commit }]
};
let updateTries = 0;
function update(): Promise<void> {
updateTries++;
return new Promise<void>((c, e) => {
client.queryDocuments(collection, updateQuery).toArray((err, results) => {
if (err) { return e(err); }
if (results.length !== 1) { return e(new Error('No documents')); }
const release = results[0];
release.assets = [
...release.assets.filter((a: any) => !(a.platform === platform && a.type === type)),
asset
];
if (isUpdate) {
release.updates[platform] = type;
}
client.replaceDocument(release._self, release, err => {
if (err && err.code === 409 && updateTries < 5) { return c(update()); }
if (err) { return e(err); }
console.log('Build successfully updated.');
c();
});
});
});
}
return new Promise<void>((c, e) => {
client.createDocument(collection, release, err => {
if (err && err.code === 409) { return c(update()); }
if (err) { return e(err); }
console.log('Build successfully published.');
c();
});
});
}
async function assertContainer(blobService: azure.BlobService, quality: string): Promise<void> {
await new Promise((c, e) => blobService.createContainerIfNotExists(quality, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
}
async function doesAssetExist(blobService: azure.BlobService, quality: string, blobName: string): Promise<boolean> {
const existsResult = await new Promise<azure.BlobService.BlobResult>((c, e) => blobService.doesBlobExist(quality, blobName, (err, r) => err ? e(err) : c(r)));
return existsResult.exists;
}
async function uploadBlob(blobService: azure.BlobService, quality: string, blobName: string, file: string): Promise<void> {
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
contentSettings: {
contentType: mime.lookup(file),
cacheControl: 'max-age=31536000, public'
}
};
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, file, blobOptions, err => err ? e(err) : c()));
}
interface PublishOptions {
'upload-only': boolean;
}
async function publish(commit: string, quality: string, platform: string, type: string, name: string, version: string, _isUpdate: string, file: string, opts: PublishOptions): Promise<void> {
const isUpdate = _isUpdate === 'true';
const queuedBy = process.env['BUILD_QUEUEDBY'];
const sourceBranch = process.env['BUILD_SOURCEBRANCH'];
const isReleased = quality === 'insider'
&& /^master$|^refs\/heads\/master$/.test(sourceBranch)
&& /Project Collection Service Accounts|Microsoft.VisualStudio.Services.TFS/.test(queuedBy);
console.log('Publishing...');
console.log('Quality:', quality);
console.log('Platforn:', platform);
console.log('Type:', type);
console.log('Name:', name);
console.log('Version:', version);
console.log('Commit:', commit);
console.log('Is Update:', isUpdate);
console.log('Is Released:', isReleased);
console.log('File:', file);
const stream = fs.createReadStream(file);
const [sha1hash, sha256hash] = await Promise.all([hashStream('sha1', stream), hashStream('sha256', stream)]);
console.log('SHA1:', sha1hash);
console.log('SHA256:', sha256hash);
const blobName = commit + '/' + name;
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2'];
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2'])
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY'], `${storageAccount}.blob.core.chinacloudapi.cn`)
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
// mooncake is fussy and far away, this is needed!
mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
await Promise.all([
assertContainer(blobService, quality),
assertContainer(mooncakeBlobService, quality)
]);
const [blobExists, moooncakeBlobExists] = await Promise.all([
doesAssetExist(blobService, quality, blobName),
doesAssetExist(mooncakeBlobService, quality, blobName)
]);
const promises = [];
if (!blobExists) {
promises.push(uploadBlob(blobService, quality, blobName, file));
}
if (!moooncakeBlobExists) {
promises.push(uploadBlob(mooncakeBlobService, quality, blobName, file));
}
if (promises.length === 0) {
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
return;
}
console.log('Uploading blobs to Azure storage...');
await Promise.all(promises);
console.log('Blobs successfully uploaded.');
const config = await getConfig(quality);
console.log('Quality config:', config);
const asset: Asset = {
platform: platform,
type: type,
url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
mooncakeUrl: `${process.env['MOONCAKE_CDN_URL']}/${quality}/${blobName}`,
hash: sha1hash,
sha256hash
};
const release = {
id: commit,
timestamp: (new Date()).getTime(),
version,
isReleased: config.frozen ? false : isReleased,
sourceBranch,
queuedBy,
assets: [],
updates: {} as any
};
if (!opts['upload-only']) {
release.assets.push(asset);
if (isUpdate) {
release.updates[platform] = type;
}
}
await createOrUpdate(commit, quality, platform, type, release, asset, isUpdate);
}
function main(): void {
const opts = minimist<PublishOptions>(process.argv.slice(2), {
boolean: ['upload-only']
});
const [quality, platform, type, name, version, _isUpdate, file] = opts._;
const commit = execSync('git rev-parse HEAD', { encoding: 'utf8' }).trim();
publish(commit, quality, platform, type, name, version, _isUpdate, file, opts).catch(err => {
console.error(err);
process.exit(1);
});
}
main();

41
build/tfs/darwin/build.sh Executable file
View file

@ -0,0 +1,41 @@
#!/bin/sh
. ./build/tfs/common/node.sh
. ./scripts/env.sh
. ./build/tfs/common/common.sh
export VSCODE_MIXIN_PASSWORD="$1"
export AZURE_STORAGE_ACCESS_KEY="$2"
export AZURE_STORAGE_ACCESS_KEY_2="$3"
export MOONCAKE_STORAGE_ACCESS_KEY="$4"
export AZURE_DOCUMENTDB_MASTERKEY="$5"
VSO_PAT="$6"
echo "machine monacotools.visualstudio.com password $VSO_PAT" > ~/.netrc
step "Install dependencies" \
npm install
step "Hygiene" \
npm run gulp -- hygiene
step "Mix in repository from vscode-distro" \
npm run gulp -- mixin
step "Install distro dependencies" \
node build/tfs/common/installDistro.js
step "Build minified & upload source maps" \
npm run gulp -- vscode-darwin-min upload-vscode-sourcemaps
# step "Create loader snapshot"
# node build/lib/snapshotLoader.js
step "Run unit tests" \
./scripts/test.sh --build --reporter dot
step "Run integration tests" \
./scripts/test-integration.sh
step "Publish release" \
./build/tfs/darwin/release.sh

26
build/tfs/darwin/release.sh Executable file
View file

@ -0,0 +1,26 @@
#!/bin/sh
. ./scripts/env.sh
. ./build/tfs/common/common.sh
(cd $BUILD_SOURCESDIRECTORY/build/tfs/common && \
step "Install build dependencies" \
npm i)
REPO=`pwd`
ZIP=$REPO/../VSCode-darwin-selfsigned.zip
UNSIGNEDZIP=$REPO/../VSCode-darwin-unsigned.zip
BUILD=$REPO/../VSCode-darwin
PACKAGEJSON=`ls $BUILD/*.app/Contents/Resources/app/package.json`
VERSION=`node -p "require(\"$PACKAGEJSON\").version"`
rm -rf $UNSIGNEDZIP
(cd $BUILD && \
step "Create unsigned archive" \
zip -r -X -y $UNSIGNEDZIP *)
step "Upload unsigned archive" \
node build/tfs/common/publish.js --upload-only $VSCODE_QUALITY darwin archive-unsigned VSCode-darwin-$VSCODE_QUALITY-unsigned.zip $VERSION false $UNSIGNEDZIP
step "Sign build" \
node build/tfs/common/enqueue.js $VSCODE_QUALITY

28
build/tfs/darwin/smoketest.sh Executable file
View file

@ -0,0 +1,28 @@
#!/bin/sh
. ./build/tfs/common/node.sh
. ./scripts/env.sh
. ./build/tfs/common/common.sh
export VSCODE_MIXIN_PASSWORD="$1"
VSO_PAT="$2"
echo "machine monacotools.visualstudio.com password $VSO_PAT" > ~/.netrc
step "Install dependencies" \
npm install
step "Mix in repository from vscode-distro" \
npm run gulp -- mixin
step "Install distro dependencies" \
node build/tfs/common/installDistro.js
step "Build minified & upload source maps" \
npm run gulp -- vscode-darwin-min
step "Run smoke test" \
pushd test/smoke
npm install
npm test -- --latest "$AGENT_BUILDDIRECTORY/VSCode-darwin/Visual Studio Code - Insiders.app/Contents/MacOS/Electron"
popd

1
build/tfs/linux/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
pat

3
build/tfs/linux/build-ia32.sh Executable file
View file

@ -0,0 +1,3 @@
#!/bin/bash
set -e
./build/tfs/linux/build.sh ia32 "$@"

3
build/tfs/linux/build-x64.sh Executable file
View file

@ -0,0 +1,3 @@
#!/bin/bash
set -e
./build/tfs/linux/build.sh x64 "$@"

43
build/tfs/linux/build.sh Executable file
View file

@ -0,0 +1,43 @@
#!/bin/bash
. ./build/tfs/common/node.sh
. ./scripts/env.sh
. ./build/tfs/common/common.sh
export ARCH="$1"
export VSCODE_MIXIN_PASSWORD="$2"
export AZURE_STORAGE_ACCESS_KEY="$3"
export AZURE_STORAGE_ACCESS_KEY_2="$4"
export MOONCAKE_STORAGE_ACCESS_KEY="$5"
export AZURE_DOCUMENTDB_MASTERKEY="$6"
export LINUX_REPO_PASSWORD="$7"
VSO_PAT="$8"
echo "machine monacotools.visualstudio.com password $VSO_PAT" > ~/.netrc
step "Install dependencies" \
npm install --arch=$ARCH --unsafe-perm
step "Hygiene" \
npm run gulp -- hygiene
step "Mix in repository from vscode-distro" \
npm run gulp -- mixin
step "Get Electron" \
npm run gulp -- "electron-$ARCH"
step "Install distro dependencies" \
node build/tfs/common/installDistro.js --arch=$ARCH
step "Build minified" \
npm run gulp -- "vscode-linux-$ARCH-min"
# step "Create loader snapshot"
# node build/lib/snapshotLoader.js --arch=$ARCH
step "Run unit tests" \
./scripts/test.sh --build --reporter dot
step "Publish release" \
./build/tfs/linux/release.sh

View file

@ -0,0 +1,50 @@
FROM microsoft/vsts-agent:ubuntu-14.04-standard
MAINTAINER Joao Moreno <joao.moreno@microsoft.com>
ARG DEBIAN_FRONTEND=noninteractive
RUN dpkg --add-architecture i386
RUN apt-get update
# Dependencies
RUN apt-get install -y build-essential
RUN apt-get install -y gcc-multilib g++-multilib
RUN apt-get install -y git
RUN apt-get install -y zip
RUN apt-get install -y rpm
RUN apt-get install -y createrepo
RUN apt-get install -y python-gtk2
RUN apt-get install -y jq
RUN apt-get install -y xvfb
RUN apt-get install -y fakeroot
RUN apt-get install -y libgtk2.0-0:i386
RUN apt-get install -y libgconf-2-4:i386
RUN apt-get install -y libnss3:i386
RUN apt-get install -y libasound2:i386
RUN apt-get install -y libxtst6:i386
RUN apt-get install -y libfuse2
RUN apt-get install -y libnotify-bin
RUN apt-get install -y libnotify4:i386
RUN apt-get install -y libx11-dev:i386
RUN apt-get install -y libxkbfile-dev:i386
RUN apt-get install -y libxss1:i386
RUN apt-get install -y libx11-xcb-dev:i386
RUN apt-get install -y libgl1-mesa-glx:i386 libgl1-mesa-dri:i386
RUN apt-get install -y libxkbfile-dev
RUN apt-get install -y bc bsdmainutils
RUN apt-get install -y libgirepository-1.0-1:i386 gir1.2-glib-2.0:i386 gir1.2-secret-1:i386 libsecret-1-dev:i386
RUN apt-get install -y dpkg-dev:i386
# Xvfb
# Thanks https://medium.com/@griggheo/running-headless-selenium-webdriver-tests-in-docker-containers-342fdbabf756
ADD xvfb.init /etc/init.d/xvfb
RUN chmod +x /etc/init.d/xvfb
RUN update-rc.d xvfb defaults
# nvm
ENV NVM_DIR /usr/local/nvm
RUN curl https://raw.githubusercontent.com/creationix/nvm/v0.33.2/install.sh | bash
# for libsecret
ENV PKG_CONFIG_PATH /usr/lib/i386-linux-gnu/pkgconfig
CMD (service xvfb start; export DISPLAY=:10; ./start.sh)

View file

@ -0,0 +1,15 @@
#!/bin/bash
if [ ! -f pat ]; then
echo "Error: file pat not found"
exit 1
fi
docker run \
-e VSTS_ACCOUNT="monacotools" \
-e VSTS_TOKEN="$(cat pat)" \
-e VSTS_AGENT="tb-lnx-ia32-local" \
-e VSTS_POOL="linux-ia32" \
-e VSTS_WORK="/var/vsts/work" \
--name "tb-lnx-ia32-local" \
-it joaomoreno/vscode-vso-agent-ia32:latest

View file

@ -0,0 +1,53 @@
#!/bin/bash
#
# /etc/rc.d/init.d/xvfbd
#
# chkconfig: 345 95 28
# description: Starts/Stops X Virtual Framebuffer server
# processname: Xvfb
#
### BEGIN INIT INFO
# Provides: xvfb
# Required-Start: $remote_fs $syslog
# Required-Stop: $remote_fs $syslog
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: Start xvfb at boot time
# Description: Enable xvfb provided by daemon.
### END INIT INFO
[ "${NETWORKING}" = "no" ] && exit 0
PROG="/usr/bin/Xvfb"
PROG_OPTIONS=":10 -ac"
PROG_OUTPUT="/tmp/Xvfb.out"
case "$1" in
start)
echo "Starting : X Virtual Frame Buffer "
$PROG $PROG_OPTIONS>>$PROG_OUTPUT 2>&1 &
disown -ar
;;
stop)
echo "Shutting down : X Virtual Frame Buffer"
killproc $PROG
RETVAL=$?
[ $RETVAL -eq 0 ] && /bin/rm -f /var/lock/subsys/Xvfb
/var/run/Xvfb.pid
echo
;;
restart|reload)
$0 stop
$0 start
RETVAL=$?
;;
status)
status Xvfb
RETVAL=$?
;;
*)
echo $"Usage: $0 (start|stop|restart|reload|status)"
exit 1
esac
exit $RETVAL

86
build/tfs/linux/release.sh Executable file
View file

@ -0,0 +1,86 @@
#!/bin/bash
. ./scripts/env.sh
. ./build/tfs/common/common.sh
step "Build Debian package" \
npm run gulp -- "vscode-linux-$ARCH-build-deb"
step "Build RPM package" \
npm run gulp -- "vscode-linux-$ARCH-build-rpm"
(cd $BUILD_SOURCESDIRECTORY/build/tfs/common && \
step "Install build dependencies" \
npm install --unsafe-perm)
# Variables
PLATFORM_LINUX="linux-$ARCH"
PLATFORM_DEB="linux-deb-$ARCH"
PLATFORM_RPM="linux-rpm-$ARCH"
[[ "$ARCH" == "ia32" ]] && DEB_ARCH="i386" || DEB_ARCH="amd64"
[[ "$ARCH" == "ia32" ]] && RPM_ARCH="i386" || RPM_ARCH="x86_64"
REPO="`pwd`"
ROOT="$REPO/.."
BUILDNAME="VSCode-$PLATFORM_LINUX"
BUILD="$ROOT/$BUILDNAME"
BUILD_VERSION="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/ | sed -e 's/code-[a-z]*_//g' -e 's/\.deb$//g')"
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.tar.gz"
TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
PACKAGEJSON="$BUILD/resources/app/package.json"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
rm -rf $ROOT/code-*.tar.*
(cd $ROOT && \
step "Create tar.gz archive" \
tar -czf $TARBALL_PATH $BUILDNAME)
step "Publish tar.gz archive" \
node build/tfs/common/publish.js $VSCODE_QUALITY $PLATFORM_LINUX archive-unsigned $TARBALL_FILENAME $VERSION true $TARBALL_PATH
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
step "Publish Debian package" \
node build/tfs/common/publish.js $VSCODE_QUALITY $PLATFORM_DEB package $DEB_FILENAME $VERSION true $DEB_PATH
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
step "Publish RPM package" \
node build/tfs/common/publish.js $VSCODE_QUALITY $PLATFORM_RPM package $RPM_FILENAME $VERSION true $RPM_PATH
if [ -z "$VSCODE_QUALITY" ]; then
echo "VSCODE_QUALITY is not set, skipping repo package publish"
else
if [ "$BUILD_SOURCEBRANCH" = "master" ] || [ "$BUILD_SOURCEBRANCH" = "refs/heads/master" ]; then
if [[ $BUILD_QUEUEDBY = *"Project Collection Service Accounts"* || $BUILD_QUEUEDBY = *"Microsoft.VisualStudio.Services.TFS"* ]]; then
# Get necessary information
pushd $REPO && COMMIT_HASH=$(git rev-parse HEAD) && popd
PACKAGE_NAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/ | sed -e 's/_.*//g')"
DEB_URL="https://az764295.vo.msecnd.net/$VSCODE_QUALITY/$COMMIT_HASH/$DEB_FILENAME"
RPM_URL="https://az764295.vo.msecnd.net/$VSCODE_QUALITY/$COMMIT_HASH/$RPM_FILENAME"
PACKAGE_VERSION="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/ | sed -e 's/code-[a-z]*_//g' -e 's/\_.*$//g')"
# Write config files needed by API, use eval to force environment variable expansion
DIRNAME=$(dirname $(readlink -f $0))
pushd $DIRNAME
# Submit to apt repo
if [ "$DEB_ARCH" = "amd64" ]; then
eval echo '{ \"server\": \"azure-apt-cat.cloudapp.net\", \"protocol\": \"https\", \"port\": \"443\", \"repositoryId\": \"58a4adf642421134a1a48d1a\", \"username\": \"$LINUX_REPO_USERNAME\", \"password\": \"$LINUX_REPO_PASSWORD\" }' > apt-config.json
eval echo '{ \"name\": \"$PACKAGE_NAME\", \"version\": \"$PACKAGE_VERSION\", \"repositoryId\": \"58a4adf642421134a1a48d1a\", \"sourceUrl\": \"$DEB_URL\" }' > apt-addpkg.json
echo "Submitting apt-addpkg.json:"
cat apt-addpkg.json
step "Publish to repositories" \
./repoapi_client.sh -config apt-config.json -addpkg apt-addpkg.json
fi
# Submit to yum repo (disabled as it's manual until signing is automated)
# eval echo '{ \"server\": \"azure-apt-cat.cloudapp.net\", \"protocol\": \"https\", \"port\": \"443\", \"repositoryId\": \"58a4ae3542421134a1a48d1b\", \"username\": \"$LINUX_REPO_USERNAME\", \"password\": \"$LINUX_REPO_PASSWORD\" }' > yum-config.json
# eval echo '{ \"name\": \"$PACKAGE_NAME\", \"version\": \"$PACKAGE_VERSION\", \"repositoryId\": \"58a4ae3542421134a1a48d1b\", \"sourceUrl\": \"$RPM_URL\" }' > yum-addpkg.json
# echo "Submitting yum-addpkg.json:"
# cat yum-addpkg.json
# ./repoapi_client.sh -config yum-config.json -addpkg yum-addpkg.json
popd
echo "To check repo publish status run ./repoapi_client.sh -config config.json -check <id>"
fi
fi
fi

262
build/tfs/linux/repoapi_client.sh Executable file
View file

@ -0,0 +1,262 @@
#!/bin/bash -e
# This is a VERY basic script for Create/Delete operations on repos and packages
#
cmd=$1
urls=urls.txt
defaultPackageFile=new_package.json
defaultRepoFile=new_repo.json
function Bail
{
echo "ERROR: $@"
exit 1
}
function BailIfFileMissing {
file="$1"
if [ ! -f "$file" ]; then
Bail "File $file does not exist"
fi
}
function Usage {
echo "USAGE: Manage repos and packages in an apt repository"
echo "$0 -config FILENAME -listrepos | -listpkgs | -addrepo FILENAME | -addpkg FILENAME |"
echo "-addpkgs FILENAME | -check ID | -delrepo REPOID | -delpkg PKGID"
echo -e "\t-config FILENAME : JSON file containing API server name and creds"
echo -e "\t-listrepos : List repositories"
echo -e "\t-listpkgs [REGEX] : List packages, optionally filter by REGEX"
echo -e "\t-addrepo FILENAME : Create a new repo using the specified JSON file"
echo -e "\t-addpkg FILENAME : Add package to repo using the specified JSON file"
echo -e "\t-addpkgs FILENAME : Add packages to repo using urls contained in FILENAME"
echo -e "\t-check ID : Check upload operation by ID"
echo -e "\t-delrepo REPOID : Delete the specified repo by ID"
echo -e "\t-delpkg PKGID : Delete the specified package by ID"
exit 1
}
function ParseFromJson {
if [ -z "$secretContents" ]; then
Bail "Unable to parse value because no JSON contents were specified"
elif [ -z "$1" ]; then
Bail "Unable to parse value from JSON because no key was specified"
fi
# Write value directly to stdout to be used by caller
echo $secretContents | jq "$1" | tr -d '"'
}
function ParseConfigFile {
configFile="$1"
if [ -z "$configFile" ]; then
echo "Must specify -config option"
Usage
fi
BailIfFileMissing "$configFile"
secretContents=$(cat "$configFile")
server=$(ParseFromJson .server)
protocol=$(ParseFromJson .protocol)
port=$(ParseFromJson .port)
repositoryId=$(ParseFromJson .repositoryId)
user=$(ParseFromJson .username)
pass=$(ParseFromJson .password)
baseurl="$protocol://$user:$pass@$server:$port"
}
# List Repositories
function ListRepositories
{
echo "Fetching repo list from $server..."
curl -k "$baseurl/v1/repositories" | sed 's/,/,\n/g' | sed 's/^"/\t"/g'
echo ""
}
# List packages, using $1 as a regex to filter results
function ListPackages
{
echo "Fetching package list from $server"
curl -k "$baseurl/v1/packages" | sed 's/{/\n{/g' | egrep "$1" | sed 's/,/,\n/g' | sed 's/^"/\t"/g'
echo ""
}
# Create a new Repo using the specified JSON file
function AddRepo
{
repoFile=$1
if [ -z $repoFile ]; then
Bail "Error: Must specify a JSON-formatted file. Reference $defaultRepoFile.template"
fi
if [ ! -f $repoFile ]; then
Bail "Error: Cannot create repo - $repoFile does not exist"
fi
packageUrl=$(grep "url" $repoFile | head -n 1 | awk '{print $2}' | tr -d ',')
echo "Creating new repo on $server [$packageUrl]"
curl -i -k "$baseurl/v1/repositories" --data @./$repoFile -H "Content-Type: application/json"
echo ""
}
# Upload a single package using the specified JSON file
function AddPackage
{
packageFile=$1
if [ -z $packageFile ]; then
Bail "Error: Must specify a JSON-formatted file. Reference $defaultPackageFile.template"
fi
if [ ! -f $packageFile ]; then
Bail "Error: Cannot add package - $packageFile does not exist"
fi
packageUrl=$(grep "sourceUrl" $packageFile | head -n 1 | awk '{print $2}')
echo "Adding package to $server [$packageUrl]"
curl -i -k "$baseurl/v1/packages" --data @./$packageFile -H "Content-Type: application/json"
echo ""
}
# Upload a single package by dynamically creating a JSON file using a provided URL
function AddPackageByUrl
{
url=$(echo "$1")
if [ -z "$url" ]; then
Bail "Unable to publish package because no URL was specified"
fi
tmpFile=$(mktemp)
tmpOut=$(mktemp)
if ! wget -q "$url" -O $tmpFile; then
rm -f $tmpFile $tmpFile
Bail "Unable to download URL $url"
elif dpkg -I $tmpFile > $tmpOut 2> /dev/null; then
echo "File is deb format"
pkgName=$(grep "^\s*Package:" $tmpOut | awk '{print $2}')
pkgVer=$(grep "^\s*Version:" $tmpOut | awk '{print $2}')
elif rpm -qpi $tmpFile > $tmpOut 2> /dev/null; then
echo "File is rpm format"
pkgName=$(egrep "^Name" $tmpOut | tr -d ':' | awk '{print $2}')
pkgVer=$(egrep "^Version" $tmpOut | tr -d ':' | awk '{print $2}')
else
rm -f $tmpFile $tmpOut
Bail "File is not a valid deb/rpm package $url"
fi
rm -f $tmpFile $tmpOut
if [ -z "$pkgName" ]; then
Bail "Unable to parse package name for $url"
elif [ -z "$pkgVer" ]; then
Bail "Unable to parse package version number for $url"
fi
# Create Package .json file
escapedUrl=$(echo "$url" | sed 's/\//\\\//g' | sed 's/\&/\\\&/g')
cp $defaultPackageFile.template $defaultPackageFile
sed -i "s/PACKAGENAME/$pkgName/g" $defaultPackageFile
sed -i "s/PACKAGEVERSION/$pkgVer/g" $defaultPackageFile
sed -i "s/PACKAGEURL/$escapedUrl/g" $defaultPackageFile
sed -i "s/REPOSITORYID/$repositoryId/g" $defaultPackageFile
# Perform Upload
AddPackage $defaultPackageFile
# Cleanup
rm -f $defaultPackageFile
}
# Upload multiple packages by reading urls line-by-line from the specified file
function AddPackages
{
urlFile=$1
if [ -z $urlFile ]; then
Bail "Must specify a flat text file containing one or more URLs"
fi
if [ ! -f $urlFile ]; then
Bail "Cannot add packages. File $urlFile does not exist"
fi
for url in $(cat $urlFile); do
if [ -n "$url" ]; then
AddPackageByUrl "$url"
fi
sleep 5
done
}
# Check upload by ID
function CheckUpload {
id=$1
if [ -z "$id" ]; then
Bail "Must specify an ID"
fi
curl -k $baseurl/v1/packages/queue/$id
echo ""
}
# Delete the specified repo
function DeleteRepo
{
repoId=$1
if [ -z $repoId ]; then
Bail "Please specify repository ID. Run -listrepos for a list of IDs"
fi
curl -I -k -X DELETE "$baseurl/v1/repositories/$repoId"
}
# Delete the specified package
function DeletePackage
{
packageId=$1
if [ -z $packageId ]; then
Bail "Please specify package ID. Run -listpkgs for a list of IDs"
fi
echo Removing pkgId $packageId from repo $repositoryId
curl -I -k -X DELETE "$baseurl/v1/packages/$packageId"
}
# Parse params
# Not using getopts because this uses multi-char flags
operation=
while (( "$#" )); do
if [[ "$1" == "-config" ]]; then
shift
configFile="$1"
elif [[ "$1" == "-listrepos" ]]; then
operation=ListRepositories
elif [[ "$1" == "-listpkgs" ]]; then
operation=ListPackages
if [ -n "$2" ]; then
shift
operand="$1"
fi
elif [[ "$1" == "-addrepo" ]]; then
operation=AddRepo
shift
operand="$1"
elif [[ "$1" == "-addpkg" ]]; then
operation=AddPackage
shift
operand="$1"
elif [[ "$1" == "-addpkgs" ]]; then
operation=AddPackages
shift
operand="$1"
elif [[ "$1" == "-check" ]]; then
operation=CheckUpload
shift
operand="$1"
elif [[ "$1" == "-delrepo" ]]; then
operation=DeleteRepo
shift
operand="$1"
elif [[ "$1" == "-delpkg" ]]; then
operation=DeletePackage
shift
operand="$1"
else
Usage
fi
shift
done
echo "Performing $operation $operand"
# Parse config file
ParseConfigFile "$configFile"
# Exit if no operation was specified
if [ -z "operation" ]; then
Usage
fi
$operation "$operand"

View file

@ -0,0 +1,46 @@
#!/bin/bash
set -e
. ./build/tfs/common/node.sh
. ./scripts/env.sh
. ./build/tfs/common/common.sh
export ARCH="$1"
export VSCODE_MIXIN_PASSWORD="$2"
VSO_PAT="$3"
echo "machine monacotools.visualstudio.com password $VSO_PAT" > ~/.netrc
step "Install dependencies" \
npm install --arch=$ARCH --unsafe-perm
step "Mix in repository from vscode-distro" \
npm run gulp -- mixin
step "Get Electron" \
npm run gulp -- "electron-$ARCH"
step "Install distro dependencies" \
node build/tfs/common/installDistro.js --arch=$ARCH
step "Build minified" \
npm run gulp -- "vscode-linux-$ARCH-min"
function configureEnvironment {
id -u testuser &>/dev/null || (useradd -m testuser; chpasswd <<< testuser:testpassword)
sudo -i -u testuser git config --global user.name "VS Code Agent"
sudo -i -u testuser git config --global user.email "monacotools@microsoft.com"
chown -R testuser $AGENT_BUILDDIRECTORY
}
function runTest {
pushd test/smoke
npm install
sudo -u testuser -H xvfb-run -a -s "-screen 0 1024x768x8" npm test -- --latest "$AGENT_BUILDDIRECTORY/VSCode-linux-ia32/code-insiders"
popd
}
step "Configure environment" configureEnvironment
step "Run smoke test" runTest

View file

@ -0,0 +1,43 @@
FROM microsoft/vsts-agent:ubuntu-14.04-standard
MAINTAINER Joao Moreno <joao.moreno@microsoft.com>
ARG DEBIAN_FRONTEND=noninteractive
RUN apt-get update
# Dependencies
RUN apt-get install -y build-essential
RUN apt-get install -y gcc-multilib g++-multilib
RUN apt-get install -y git
RUN apt-get install -y dpkg-dev
RUN apt-get install -y zip
RUN apt-get install -y rpm
RUN apt-get install -y createrepo
RUN apt-get install -y python-gtk2
RUN apt-get install -y jq
RUN apt-get install -y xvfb
RUN apt-get install -y fakeroot
RUN apt-get install -y libgtk2.0-0
RUN apt-get install -y libgconf-2-4
RUN apt-get install -y libnss3
RUN apt-get install -y libasound2
RUN apt-get install -y libxtst6
RUN apt-get install -y libfuse2
RUN apt-get install -y libnotify-bin
RUN apt-get install -y libx11-dev
RUN apt-get install -y libxss1
RUN apt-get install -y libx11-xcb-dev
RUN apt-get install -y libxkbfile-dev
RUN apt-get install -y bc bsdmainutils
RUN apt-get install -y libsecret-1-dev
# Xvfb
# Thanks https://medium.com/@griggheo/running-headless-selenium-webdriver-tests-in-docker-containers-342fdbabf756
ADD xvfb.init /etc/init.d/xvfb
RUN chmod +x /etc/init.d/xvfb
RUN update-rc.d xvfb defaults
# nvm
ENV NVM_DIR /usr/local/nvm
RUN curl https://raw.githubusercontent.com/creationix/nvm/v0.33.2/install.sh | bash
CMD (service xvfb start; export DISPLAY=:10; ./start.sh)

View file

@ -0,0 +1,15 @@
#!/bin/bash
if [ ! -f pat ]; then
echo "Error: file pat not found"
exit 1
fi
docker run \
-e VSTS_ACCOUNT="monacotools" \
-e VSTS_TOKEN="$(cat pat)" \
-e VSTS_AGENT="tb-lnx-x64-local" \
-e VSTS_POOL="linux-x64" \
-e VSTS_WORK="/var/vsts/work" \
--name "tb-lnx-x64-local" \
-it joaomoreno/vscode-vso-agent-x64:latest

View file

@ -0,0 +1,53 @@
#!/bin/bash
#
# /etc/rc.d/init.d/xvfbd
#
# chkconfig: 345 95 28
# description: Starts/Stops X Virtual Framebuffer server
# processname: Xvfb
#
### BEGIN INIT INFO
# Provides: xvfb
# Required-Start: $remote_fs $syslog
# Required-Stop: $remote_fs $syslog
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: Start xvfb at boot time
# Description: Enable xvfb provided by daemon.
### END INIT INFO
[ "${NETWORKING}" = "no" ] && exit 0
PROG="/usr/bin/Xvfb"
PROG_OPTIONS=":10 -ac"
PROG_OUTPUT="/tmp/Xvfb.out"
case "$1" in
start)
echo "Starting : X Virtual Frame Buffer "
$PROG $PROG_OPTIONS>>$PROG_OUTPUT 2>&1 &
disown -ar
;;
stop)
echo "Shutting down : X Virtual Frame Buffer"
killproc $PROG
RETVAL=$?
[ $RETVAL -eq 0 ] && /bin/rm -f /var/lock/subsys/Xvfb
/var/run/Xvfb.pid
echo
;;
restart|reload)
$0 stop
$0 start
RETVAL=$?
;;
status)
status Xvfb
RETVAL=$?
;;
*)
echo $"Usage: $0 (start|stop|restart|reload|status)"
exit 1
esac
exit $RETVAL

Some files were not shown because too many files have changed in this diff Show more